repo
stringlengths 2
99
| file
stringlengths 13
225
| code
stringlengths 0
18.3M
| file_length
int64 0
18.3M
| avg_line_length
float64 0
1.36M
| max_line_length
int64 0
4.26M
| extension_type
stringclasses 1
value |
|---|---|---|---|---|---|---|
parameter-exchange
|
parameter-exchange-master/src/eval/psi_eval.py
|
#!/usr/bin/env python3
"""Evaluate PSI properties.
Copyright (c) 2020.
Author: Erik Buchholz
Maintainer: Erik Buchholz
E-mail: buchholz@comsys.rwth-aachen.de
"""
import argparse
import contextlib
import logging
import os
import platform
import subprocess
import sys
import time
from lib import config, helpers
from lib.helpers import get_free_port, add_latency, reset_port, \
add_async_bandwidth
from lib.logging import configure_root_loger
from .shared import lb, read_output, check_if_client_hangs
sys.path.append(config.WORKING_DIR + 'cython/psi')
# noinspection PyUnresolvedReferences
from cPSIInterface import PyPSISender # noqa
# noinspection PyUnresolvedReferences
from cPSIInterface import PyPSIReceiver # noqa
# Constants -------------------------------------------------------------------
SET_SIZES = [1] + list(range(10 ** 6, 2 * 10 ** 7 + 1, 10 ** 6)) + \
[2 ** i for i in range(20, 25)]
# SET_SIZES = range(10 ** 5, 10 ** 6 + 1, 10 * 5)
RESUME = False
MALSECURE = False
LATENCY = 0 # range(0, 301, 50) # ms
BANDWIDTH = 0 # [0, 6000, 50000, 100000]
HOST = "localhost"
NUMTHREADS = 1
STATSECPARAM = 40
ROUNDS_START = 0
ROUNDS_END = 10
DIRECTORY = config.EVAL_DIR + "psi" + "/"
os.makedirs(DIRECTORY, exist_ok=True)
# -----------------------------------------------------------------------------
log = configure_root_loger(logging.INFO, None)
def write_header(eval_type: str, file_path: str, row_fmt: str):
"""Write eval header into files."""
with open(file_path, 'w') as fd:
fd.write("------------------------HEADER------------------------\n")
fd.write(f"EVAL: {eval_type}\n")
fd.write(f"Set Sizes: {SET_SIZES}\n")
fd.write(f"Rounds: {ROUNDS_END}\n")
fd.write(f"TLS: {TLS}\n")
fd.write(f"SCHEME: {SCHEME}\n")
fd.write(f"Statistical Security Paramters: {STATSECPARAM}\n")
fd.write(f"Threads: {NUMTHREADS}\n")
fd.write(f"Latency: {LATENCY}\n")
fd.write(f"Bandwidth:{BANDWIDTH}\n")
fd.write(f"RAM Measurement Interval: 0.5s\n")
fd.write(f"RAM measurements written to filename_serverram.csv and "
f"filename_receiverram.csv.\n")
fd.write(f"{row_fmt}\n")
fd.write("----------------------END-HEADER----------------------\n")
def psi_time(base_name: str) -> None:
"""
Main method of PSI eval.
:param base_name: Base filename without extension
:return:
"""
server_ram_file = DIRECTORY + base_name + '_serverram.csv'
client_ram_file = DIRECTORY + base_name + '_clientram.csv'
file_path = DIRECTORY + base_name + '.csv'
rs = ROUNDS_START
row_fmt = f"TIMESTAMP;ROUND;SETSIZE;TLS;MALSECURE;STATSECPARAM;THREADS;" \
f"LATENCY[ms];BANDWIDTH[kBit/s];" \
f"SERVERTIME[s];CLIENTTIME[s];" \
f"ClientToServer[Byte];ClientToServer[Packets];" \
f"ServerToClient[Byte];ServerToClient[Packets];" \
f"[ERROR]"
if not RESUME or not os.path.exists(file_path):
# Write header if new file only
write_header("PSI Time Eval", file_path, row_fmt)
row_fmt = f"TIMESTAMP;ROUND;SETSIZE;TLS;MALSECURE;STATSECPARAM;" \
f"THREADS;LATENCY[ms];BANDWIDTH[kBit/s];json.dumps(mem);" \
f"[ERROR]"
write_header("PSI Time Eval", server_ram_file, row_fmt)
write_header("PSI Time Eval", client_ram_file, row_fmt)
for r in lb(range(rs, ROUNDS_END), "Rounds", leave=True):
for s in lb(SET_SIZES, "Set Sizes", leave=False):
for stat in lb(STATSECPARAM, "StatSecParam", leave=False):
for latency in lb(LATENCY, "Latency", leave=False):
for bw in lb(BANDWIDTH, "Rate", leave=False):
success = False
while not success:
port = get_free_port()
error = ""
# Add latency/bw-limit to port
if platform.system() != "Darwin":
# Latency does not work on Max
reset_port()
if latency != 0:
add_latency(latency)
if bw != 0:
add_async_bandwidth(bw, port)
elif LATENCY != 0 or BANDWIDTH != 0:
raise RuntimeError(
"Mac does not support latencies and"
"bandwidths.")
stc, stc_file = \
helpers.start_trans_measurement(
port, direction="src", sleep=False)
cts, cts_file = \
helpers.start_trans_measurement(
port, direction="dst", sleep=False)
time.sleep(0.5) # Wait for start
sp, cp = None, None
try:
sp = start(False, port, s, stat, MALSECURE,
TLS, server_ram_file, r, latency,
HOST, False)
cp = start(True, port, s, stat, MALSECURE,
TLS, client_ram_file, r, latency,
HOST, False)
# Check that connection was successful.
check_if_client_hangs(cp)
sp.wait()
cp.wait()
# Read server output
output, err = sp.communicate()
if err != "":
log.error(err)
error += err.strip().replace('\n', '\\n')
server_time, server_mem, err = read_output(
output)
error += err
# Read client output
output, err = cp.communicate()
err = err.replace(
"client socket connect error (hangs).",
"").strip()
if err != "":
log.error(err)
error += err.strip().replace('\n', '\\n')
client_time, client_mem, err = read_output(
output)
error += err
# Stop transmission measurement
# Kill TCPDUMP
helpers.kill_tcpdump()
stc.wait(30)
cts.wait(5)
server_sent, server_pkts = \
helpers.read_tcpstat_from_file(
stc_file)
client_sent, client_pkts = \
helpers.read_tcpstat_from_file(
cts_file)
timestamp = time.strftime('%Y-%m-%d %H:%M:%S')
with open(file_path, "a") as fd:
fd.write(';'.join((
timestamp,
str(r),
str(s),
str(TLS),
str(MALSECURE),
str(STATSECPARAM),
str(NUMTHREADS),
str(latency),
str(bw),
str(server_time),
str(client_time),
str(client_sent),
str(client_pkts),
str(server_sent),
str(server_pkts),
error
)) + '\n')
# Client RAM
with open(client_ram_file, "a") as fd:
fd.write(';'.join((
timestamp,
str(r),
str(s),
str(TLS),
str(MALSECURE),
str(STATSECPARAM),
str(NUMTHREADS),
str(latency),
str(bw),
str(client_mem),
error
)) + '\n')
# Server RAM
with open(server_ram_file, "a") as fd:
fd.write(';'.join((
timestamp,
str(r),
str(s),
str(TLS),
str(MALSECURE),
str(STATSECPARAM),
str(NUMTHREADS),
str(latency),
str(bw),
str(server_mem),
error
)) + '\n')
success = True
except Exception as e:
log.exception("Main Loop not successful.")
success = False
finally:
# Clean Up
# Kill TCPDUMP
helpers.kill_tcpdump()
# Remove tempfiles
with contextlib.suppress(FileNotFoundError):
os.remove(stc_file)
os.remove(cts_file)
# Remove latency
if latency != 0 or bw != 0:
reset_port()
if sp is not None:
sp.kill()
if cp is not None:
cp.kill()
def start(isClient: bool, port: int, set_size: int,
statsecparam: int, malicious: bool, tls: bool,
ram_file: str, rnd: int, latency: int,
host: str = 'localhost', debug: bool = False,
rr17: bool = False) -> subprocess.Popen:
"""Start PSI receiver/sender in own process.
:return: Popen object of process
"""
if isClient:
cmd = f'python3 -m eval.psi.receiver'
else:
cmd = f'python3 -m eval.psi.sender'
cmd += f" -p {port} -s {set_size} --statsecparam {statsecparam} " \
f"--host {host} -r {rnd} -l {latency} -o {ram_file}"
if tls:
cmd += ' -t'
if malicious:
cmd += ' -m'
if rr17:
cmd += '--r17'
log.debug('Execute: {}'.format(cmd))
cmd = cmd.split(" ")
p = subprocess.Popen(cmd, universal_newlines=True,
stderr=subprocess.PIPE, stdout=subprocess.PIPE)
return p
if __name__ == '__main__':
p = argparse.ArgumentParser("PSI Eval")
p.add_argument('--resume', help="Resume Eval", action="store_true")
p.add_argument('-r', '--reps', help="Rounds", action='store', default=0,
type=int)
p.add_argument('-o', '--out', type=str, action='store',
help="Base filename WITHOUT file-ending!", required=True)
p.add_argument('-t', '--tls', type=int, action='store', required=True,
help="TLS Activated? [1 or 0]", choices=[0, 1])
p.add_argument('-m', '--malicious', action='store_true',
help="Use RR16?")
p.add_argument('--rr17', action='store_true',
help="Use RR17")
p.add_argument('-b', '--bandwidth', action='store_true',
help="Limit bandwidth?")
p.add_argument('-s', '--setsize',
help="Setsize: Either constant or 3 values.",
metavar=('CONSTANT/MIN', 'MAX STEP'),
nargs='+', action='store', type=int)
p.add_argument('--statsecparam',
help="Statistical Sec. Parameters: "
"Either constant or 3 values.",
metavar=('CONSTANT/MIN', 'MAX STEP'),
nargs='+', action='store', type=int)
p.add_argument('-l', '--latency',
help="Latency: Either constant or 3 values.",
metavar=('CONSTANT/MIN', 'MAX STEP'),
nargs='+', action='store', type=int)
args = p.parse_args()
if args.resume:
RESUME = True
if args.malicious:
MALSECURE = True
SCHEME = "RR16"
else:
MALSECURE = False
SCHEME = "KKRT16"
if args.rr17:
MALSECURE = True
SCHEME = "RR17"
if args.bandwidth:
BANDWIDTH = [0, 6000, 50000, 100000]
SET_SIZES = [1] + list(range(10 ** 5, 10 ** 6 + 1, 10 ** 5))
else:
BANDWIDTH = BANDWIDTH
if args.tls > 0:
TLS = True
else:
TLS = False
if args.reps > 0:
ROUNDS_END = args.reps
filename = args.out
if args.setsize is not None:
if len(args.setsize) == 1:
SET_SIZES = [args.setsize[0]]
elif len(args.setsize) == 3:
SET_SIZES = range(args.setsize[0],
args.setsize[1] + args.setsize[2],
args.setsize[2])
else:
raise ValueError("Either 1 or 3 setsize parameters!")
if args.statsecparam is not None:
if len(args.statsecparam) == 1:
STATSECPARAM = [args.statsecparam[0]]
elif len(args.statsecparam) == 3:
STATSECPARAM = range(args.statsecparam[0],
args.statsecparam[1] + args.statsecparam[2],
args.statsecparam[2])
else:
raise ValueError("Either 1 or 3 statSecParam parameters!")
if args.latency is not None:
if len(args.latency) == 1:
LATENCY = [args.latency[0]]
elif len(args.latency) == 3:
LATENCY = range(args.latency[0],
args.latency[1] + args.latency[2],
args.latency[2])
else:
raise ValueError("Either 1 or 3 latency parameters!")
psi_time(filename)
| 15,572
| 43.367521
| 79
|
py
|
parameter-exchange
|
parameter-exchange-master/src/eval/shared.py
|
#!/usr/bin/env python3
"""Shared eval methods.
Copyright (c) 2020.
Author: Erik Buchholz
Maintainer: Erik Buchholz
E-mail: buchholz@comsys.rwth-aachen.de
"""
import logging
import os
import select
import subprocess
from collections.abc import Iterable
from typing import TextIO, Any, List
from tqdm import tqdm
from lib.config import WORKING_DIR
log = logging.getLogger(__name__)
def reset_config() -> None:
"""Reset the config file."""
subprocess.run(['git', 'checkout', '-f', 'lib/config.py'])
def set_config(variable: str, v: Any) -> None:
"""Set a certain variable in the config file to the given value."""
with open(WORKING_DIR + "src/lib/config.py", "r") as f:
lines = f.readlines()
for i, line in enumerate(lines):
if f"{variable} =" in line:
lines[i] = f"{variable} = {str(v)}\n"
# Overwrite
with open(WORKING_DIR + "src/lib/config.py", "w") as f:
f.writelines(lines)
def set_total_rec_len(v: int) -> None:
"""Set the total record length."""
set_config("RECORD_LENGTH", v)
def set_rec_id_len(v: int) -> None:
"""Set the record id length."""
set_config("RECORD_ID_LENGTH", v)
def set_rounding(v: List[int]) -> None:
"""Set the rounding vector."""
set_config("ROUNDING_VEC", v)
def get_last_line(f: TextIO) -> str:
"""Return last line of file descriptor."""
lines = f.readlines()
return lines[-1]
def lb(o, *args, **kwargs):
"""Return a tqdm object if there is more than one element"""
if ((isinstance(o, list) or isinstance(o, tuple)) and
len(o) == 1):
return o
elif isinstance(o, Iterable):
return tqdm(o, *args, **kwargs)
else:
return [o]
def read_output(output: str) -> (str, str, str):
"""Read the output of the ot and psi processes."""
runtime, mem, error = ['0', '0', '0']
for line in output.split(os.linesep):
if not line.startswith('['):
if not line.startswith('C'):
if line.split(':') != ['']:
runtime, mem, error = line.split(':')
return runtime, mem, error
def check_if_client_hangs(cp: subprocess.Popen) -> None:
"""Raise error if client process hangs."""
counter2 = 0
try:
cp.wait(30)
except subprocess.TimeoutExpired:
# Not terminated
# Count current outputs
counter = 0
poll_obj = select.poll()
poll_obj.register(cp.stderr,
select.POLLIN)
while poll_obj.poll(0):
line = cp.stderr.readline()
if "client socket connect error" in line:
counter += 1
log.debug(f"Counted after first timeout: "
f"{counter}")
# Wait again
try:
cp.wait(30)
except subprocess.TimeoutExpired:
# Still unfinished
counter2 = 0
while poll_obj.poll(0):
line = cp.stderr.readline()
if "client socket connect error" in \
line:
counter2 += 1
log.debug(f"Counted after 2nd timeout: "
f"{counter2}")
if counter2 > 0:
# Connection still hangs, restart
raise RuntimeError("Conection hangs.")
| 3,289
| 27.119658
| 71
|
py
|
parameter-exchange
|
parameter-exchange-master/src/eval/client.py
|
#!/usr/bin/env python3
"""Evaluates Client App.
Copyright (c) 2020.
Author: Erik Buchholz
Maintainer: Erik Buchholz
E-mail: buchholz@comsys.rwth-aachen.de
"""
# Constants -------------------------------------------------------------------
import argparse
import atexit
import contextlib
import json
import logging
import os
import pickle
import random
import select
import shutil
import subprocess
import sys
import time
from typing import List
from unittest.mock import patch, Mock
import numpy
from data_provider import DataProvider
from eval import shared as shd
from eval.shared import lb
from lib import config, helpers, logging as logg
from lib import db_cli as db
from lib.base_client import UserType, ServerType
from lib.key_server_backend import KeyServer
from lib.record import Record
from lib.similarity_metrics import map_metric
DEBUG = False
LOGLVL = logging.INFO
SLEEP_TIME = 4 if DEBUG else 10
TIME_PER_MATCH = 10 # s (Observed on Preserver)
EXEC_TIME_OFFSET = 2 * 3600 # s Time for matching mainly
DIRECTORY = config.EVAL_DIR + "client" + "/"
os.makedirs(DIRECTORY, exist_ok=True)
ROUNDS = 10
NUM_MATCHES = range(0, 1001, 100)
# Default values
REC_LEN = 100
REC_ID_LEN = 10
REC_ROUND = [3 for _ in range(REC_ID_LEN)]
PSI = True
METRIC = "relOffset-0.3" # 0.3 with PSI, 1 for Bloom Only
TLS = config.OT_TLS
MODE = "RANDOM"
if config.OT_TLS or config.PSI_TLS:
raise RuntimeError("TLs should be disabled.")
log = logg.configure_root_loger(LOGLVL,
config.WORKING_DIR + 'data/client_eval.log')
atexit.register(shutil.rmtree, config.TEMP_DIR, True)
# -----------------------------------------------------------------------------
def write_header(file_path: str, row_fmt: str):
"""Write header into csv File."""
with open(file_path, 'w') as fd:
fd.write("------------------------HEADER------------------------\n")
fd.write(f"MODE: {MODE}\n")
fd.write(f"TLS: {TLS}\n")
fd.write(f"Metric: {METRIC}\n")
fd.write(f"Num Matches: {NUM_MATCHES}\n")
fd.write(f"\n")
fd.write(f"Bloom Capacity: {config.BLOOM_CAPACITY:,}\n")
fd.write(f"Bloom Error Rate: {config.BLOOM_ERROR_RATE}\n")
fd.write(f"Parallel Matching: {config.PARALLEL}\n")
fd.write(f"\n")
fd.write(f"OT Malicious Secure: {config.OT_MAL_SECURE}\n")
fd.write(f"OT TLS: {config.OT_TLS}\n")
fd.write(f"OT Setsize/Number of keys: {config.OT_SETSIZE:,}\n")
fd.write(f"Hash Key Length: {config.HASHKEY_LEN}\n")
fd.write(f"Encryption Key Length: {config.ENCKEY_LEN}\n")
fd.write(f"\n")
fd.write(f"PSI Mode: {PSI}\n")
if PSI:
fd.write(f"PSI Index Length: {config.PSI_INDEX_LEN}\n")
fd.write(f"PSI Setsize: {config.PSI_SETSIZE:,}\n")
fd.write(f"PSI Scheme: {config.PSI_SCHEME}\n")
fd.write(f"PSI TLS: {config.PSI_TLS}\n")
fd.write(f"\n")
fd.write(f"Record Length: {REC_LEN}\n")
fd.write(f"Record ID Length: {REC_ID_LEN}\n")
fd.write(f"Record Rounding: {REC_ROUND}\n")
fd.write(f"\n")
fd.write(f"Rounds: {ROUNDS}\n")
fd.write(f"Interval of RAM measurements: {config.RAM_INTERVAL}s\n")
fd.write(f"All times in Seconds! Timer start with 'StartTime' and is "
f"monotonic clock. Only differences are meaningful.\n")
fd.write(f"{row_fmt}\n")
fd.write("----------------------END-HEADER----------------------\n")
def compute_matches(t: List[float], num: int) -> List[Record]:
"""
Compute matching vectors around the target.
:param num: # of matches
:param t: Target record
:return: List of matching records
"""
metric, ars = map_metric(METRIC)
m = metric(t, *ars)
for i in numpy.arange(0.1, ars[0], 0.1):
m = metric(t, float(i))
if len(m) > 2 * num:
break
candidates = list(m)
random.shuffle(candidates)
candidates = candidates[:num]
if len(candidates) < num:
raise RuntimeError("Not enough candidates!")
log.info(f"Generated {len(candidates)} candidates.")
return [Record(v) for v in candidates]
def kill_bg_servers():
"""Kill old processes if running"""
subprocess.run(["tmux", "kill-session", "-t", "eval"],
stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
subprocess.run(["pkill", "-9", "celery"],
stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
atexit.register(kill_bg_servers)
def preparation(num_matches, target, id_len, rounding):
"""Prepare databases and start background tasks."""
# Kill old processes if running
kill_bg_servers()
time.sleep(SLEEP_TIME)
data_dir = config.DATA_DIR
log.info("Removing Databases.")
with contextlib.suppress(FileNotFoundError):
# Remove Bloom Filter
os.remove(data_dir + config.BLOOM_FILE)
# Remove Databases
os.remove(data_dir + config.KEYSERVER_DB)
os.remove(data_dir + config.STORAGE_DB)
# Add User
log.info("Prepare User DB.")
db.main(UserType.CLIENT, ['testuser', 'password', '-a'], no_print=True)
db.main(UserType.OWNER, ['testprovider', 'password', '-a'], no_print=True)
log.info("Starting Background Servers.")
subprocess.run([f"{config.WORKING_DIR}src/allStart.sh", "eval"])
time.sleep(SLEEP_TIME)
# Create data provider client
d = DataProvider('testprovider')
d.set_password('password')
# Check that servers are really online
tries = 0
done = False
while not done:
try:
if tries >= 1:
# Try to start servers again.
kill_bg_servers()
time.sleep(SLEEP_TIME)
subprocess.run(
[f"{config.WORKING_DIR}src/allStart.sh", "eval"])
time.sleep(SLEEP_TIME)
tries = 0
# Check Key Server
d.get_token(ServerType.KeyServer)
# Check celery
r = d.get(d.KEYSERVER.replace('provider', 'celery'))
if r.content != b"True":
raise RuntimeError("Celery of keyserver not started.")
# Check Storage Server
d.get_token(ServerType.StorageServer)
# Check celery
r = d.get(d.STORAGESERVER.replace('provider', 'celery'))
if r.content != b"True":
raise RuntimeError("Celery of storage-server not started.")
# Success
done = True
except Exception as e:
log.error(f"Server not up, yet. Try: {tries}. Error: {str(e)}")
tries += 1
time.sleep(5)
k = KeyServer()
def get_enc_keys(indices: List[int]):
"""Mock OT for performance."""
keys = k._enc_keys
return [keys[i] for i in indices]
m1 = Mock(return_value=k.get_hash_key())
with patch.object(d, "_get_enc_keys", get_enc_keys), \
patch.object(d, "get_hash_key", m1), \
patch("lib.config.RECORD_ID_LENGTH", id_len), \
patch("lib.config.RECORD_LENGTH", len(target)), \
patch("lib.config.ROUNDING_VEC", rounding):
# Add Records
if MODE == "IKV":
d.store_from_file(f"{config.WORKING_DIR}data/ikv_data.txt")
elif MODE == "WZL":
d.store_from_file(f"{config.WORKING_DIR}data/wzl_data.txt")
elif num_matches == 0:
# No need to store anything
pass
else:
start = time.monotonic()
matches = compute_matches(target, num_matches)
log.info(f"Dummy Record Computation took:"
f"{str(time.monotonic() - start)}")
d.store_records(matches)
def start(com_file: str, target: List[float]) -> subprocess.Popen:
"""Start a client App process and return it."""
cmd = ["python3", "client.py", "testuser", "password", "-r", str(target),
"-m", METRIC, "-e", com_file]
if DEBUG:
cmd.append('-vv')
if PSI:
cmd.append("--psi") # Measure PSI, too.
proc = subprocess.Popen(cmd, universal_newlines=True,
stderr=subprocess.PIPE)
# , stdout=subprocess.PIPE)
return proc
def main(base_filename: str, resume: bool = False):
"""Execute evaluaiton."""
file_path = DIRECTORY + base_filename + ".csv"
ram_path = DIRECTORY + base_filename + '_ram.csv'
row_fmt = (f"TIMESTAMP;"
f"ROUND;"
f"#BloomMatches;"
f"#PSIMatches;"
f"#Matches;"
f"#Results;"
f"StartTime[s];"
f"CandidateComputationTime[s];"
f"HashKeyTime[s];"
f"PSIPreparationTime[s];"
f"PSIExecutionTime[s];"
f"PSISetConstructionTime[s];"
f"BloomFilterRetrievalTime[s];"
f"MatchingTime[s];"
f"KeyRetrievalTime(OT)[s];"
f"RecordRetrievalTime[s];"
f"DecryptionTime[s];"
f"FromKS[Byte];FromKS[Pkt];"
f"ToKS[Byte];ToKs[Pkt];"
f"FromSS[Byte];FromSS[Pkt];"
f"ToSS[Byte];ToSS[Pkt];"
f"ToOTSvr[Byte];ToOTSvr[Pkt];"
f"FromOTSvr[Byte];FromOTSvr[Pkt];"
f"ToPSISvr[Byte];ToPSISvr[Pkt];"
f"FromPSISvr[Byte];FromPSISvr[Pkt];"
f"Error")
if not resume or not os.path.exists(file_path):
write_header(file_path, row_fmt)
row_fmt = "TIMESTAMP;ROUND;MATCHES;json.dumps(ram_usage)"
write_header(ram_path, row_fmt)
for r in lb(range(ROUNDS), "Rounds", leave=True):
failed = True
for m in lb(NUM_MATCHES, "Matches", leave=True):
success = False
while not success:
# Reset config file
shd.reset_config()
# Configuration
shd.set_rounding(REC_ROUND)
shd.set_rec_id_len(REC_ID_LEN)
shd.set_total_rec_len(REC_LEN)
log.info("Doing Preparation.")
# comp Target
if MODE == "WZL":
target = [1, 2.2, 60.0, 20.0, 60.0, 20.0, 60.0, 20.0,
1, 1, 2, 22.5, 23.6, 30.2, 1, 1, 40.0, 165.0, 0.08]
elif MODE == "IKV":
target = [12732.0, 12496.0, 0.20453525295926794, 12496.949, 1.962849116, 2.0, 2.0, 56.4, 1.019, 28.73374196, 1.0, 1.072714416,
0.257196124, 0.628290524, 4.359, 4.359, 2.435, 1.0, 1.0, 23.0, 36.0, 101.0567, 173.0, 226.0, 26.0, 4.9, 18.3, 10.782]
else:
target = [float(i + 2) for i in range(REC_LEN)]
preparation(m, target, REC_ID_LEN, REC_ROUND)
failed = False
tempfiles = []
process = None
com_file = helpers.get_temp_file() + '_comfile.pyc'
e = None
# May be deleted by clean-up of prev. round
os.makedirs(config.TEMP_DIR, exist_ok=True)
try:
error = ""
# Start data measurements
tks, tks_file = helpers.start_trans_measurement(
config.KEY_API_PORT, direction="dst", sleep=False
)
fks, fks_file = helpers.start_trans_measurement(
config.KEY_API_PORT, direction="src", sleep=False
)
tss, tss_file = helpers.start_trans_measurement(
config.STORAGE_API_PORT, direction="dst", sleep=False
)
fss, fss_file = helpers.start_trans_measurement(
config.STORAGE_API_PORT, direction="src", sleep=False
)
tempfiles = [tks_file, fks_file, tss_file, fss_file]
measurements = [tks, fks, tss, fss]
time.sleep(0.5)
longest_exec_time = TIME_PER_MATCH * m + \
EXEC_TIME_OFFSET # s
start_runtime = time.monotonic()
process = start(com_file, target)
# Check if client socket hangs
poll_err = select.poll()
poll_err.register(process.stderr,
select.POLLIN)
hangs = False
ctr = 0
while True:
try:
process.wait(30)
# Terminated
break
except subprocess.TimeoutExpired:
ctr += 1
total_wait_time = time.monotonic() - start_runtime
# if total_wait_time > longest_exec_time:
# raise RuntimeError("Execution time too
# long.")
# check if there is something on stderr
hang_detected = False
while poll_err.poll(0):
line = process.stderr.readline().strip()
if line != "":
print(f"Read Line: '{line}'")
if "client socket connect error" in line:
hang_detected = True
if hangs and hang_detected:
# Still hangs after 30s. Restart.
raise RuntimeError("Client hangs.")
hangs = hang_detected
# Load com file
with open(com_file, "rb") as fd:
e = pickle.load(fd)
if e['result'] is None:
# full_retrieve did not terminate
raise RuntimeError(e['error'])
result = e['result']
ram_usage = e['ram_usage']
ot_files_sent = e['ot_tcpdump_sent']
ot_files_recv = e['ot_tcpdump_recv']
psi_files_sent = e['psi_tcpdump_sent']
psi_files_recv = e['psi_tcpdump_recv']
# Kill TCPDUMP
helpers.kill_tcpdump()
for proc in measurements:
# Wait for termination
proc.wait(30)
# Get Data Amount results
fks_byte, fks_pkt = helpers.read_tcpstat_from_file(
fks_file)
tks_byte, tks_pkt = helpers.read_tcpstat_from_file(
tks_file)
fss_byte, fss_pkt = helpers.read_tcpstat_from_file(
fss_file)
tss_byte, tss_pkt = helpers.read_tcpstat_from_file(
tss_file)
# Check files exist
if e['num_matches'] > 0 and (len(ot_files_sent) == 0 or
len(ot_files_recv) == 0):
raise RuntimeError(
'OT executed but no pcap files available.')
if (len(psi_files_recv) == 0 or
len(psi_files_sent) == 0) and PSI:
raise RuntimeError(
'PSI executed but no pcap files available.')
if (MODE == "RANDOM" and e['num_matches'] != m) or (
MODE == "IKV" and len(result) != m) or (
MODE == "WZL" and len(result) != m):
s = (f"Mismatch of number matches. Expected: {m}"
f" Got: {e['num_matches']}")
error += s
log.error(s)
# Get OT Transmission info
ot_byte_sen = 0
ot_pkt_sen = 0
ot_byte_rec = 0
ot_pkt_rec = 0
for file in ot_files_sent:
b, pkt = helpers.read_tcpstat_from_file(file)
ot_byte_sen += b
ot_pkt_sen += pkt
os.remove(file)
for file in ot_files_recv:
b, pkt = helpers.read_tcpstat_from_file(file)
ot_byte_rec += b
ot_pkt_rec += pkt
os.remove(file)
# Get PSI Transmission info
psi_byte_sen = 0
psi_pkt_sen = 0
psi_byte_rec = 0
psi_pkt_rec = 0
for file in psi_files_sent:
b, pkt = helpers.read_tcpstat_from_file(file)
psi_byte_sen += b
psi_pkt_sen += pkt
os.remove(file)
for file in psi_files_recv:
b, pkt = helpers.read_tcpstat_from_file(file)
psi_byte_rec += b
psi_pkt_rec += pkt
os.remove(file)
with open(file_path, "a") as fd:
row = ";".join((
time.strftime('%Y-%m-%d %H:%M:%S'),
str(r),
str(e['bloom_matches']),
str(e['psi_matches']),
str(e['num_matches']),
str(len(result)),
str(e['start_time']),
str(e['compute_candidates_time']),
str(e['hash_key_time']),
str(e['psi_preparation_time']),
str(e['psi_execution_time']),
str(e['psi_set_construction_time']),
str(e['bloom_filter_retrieve_time']),
str(e['bloom_matching_time']),
str(e['key_retrieve_time']),
str(e['record_retrieve_time']),
str(e['decryption_time']),
str(fks_byte),
str(fks_pkt),
str(tks_byte),
str(tks_pkt),
str(fss_byte),
str(fss_pkt),
str(tss_byte),
str(tss_pkt),
str(ot_byte_sen),
str(ot_pkt_sen),
str(ot_byte_rec),
str(ot_pkt_rec),
str(psi_byte_sen),
str(psi_pkt_sen),
str(psi_byte_rec),
str(psi_pkt_rec),
error
))
fd.write(f"{row}\n")
with open(ram_path, 'a') as fd:
fd.write(
';'.join(
(
time.strftime('%Y-%m-%d %H:%M:%S'),
str(r),
str(m),
json.dumps(ram_usage)
)
) + '\n'
)
success = True
except Exception as e:
log.exception(str(e))
success = False
failed = True # Restart server
finally:
# Reset config file
shd.reset_config()
# Clean Up
if process is not None:
process.terminate()
try:
process.wait(5)
except subprocess.TimeoutExpired:
# Terminate was not enough
process.kill()
# Kill TCPDUMP
helpers.kill_tcpdump()
# Remove Tempfiles
shutil.rmtree(config.TEMP_DIR, ignore_errors=True)
def get_client_parser() -> argparse.ArgumentParser:
"""Return an argparser for the client eval."""
parser = argparse.ArgumentParser(description="Client Eval")
parser.add_argument('-m', "--metric", help="Name of similarity metric",
type=str, action="store", default=METRIC)
parser.add_argument('-r', '--reps', help="Rounds", action='store',
default=ROUNDS, type=int)
parser.add_argument('-n', '--num', help="# Matches", action='store',
default=NUM_MATCHES,
type=int)
parser.add_argument('-o', '--out', type=str, action='store',
help="Base filename WITHOUT file-ending!",
required=True)
parser.add_argument('--resume', action="store_true",
help="Append to file.",
default=False)
parser.add_argument('-p', "--psi", help="Also evaluate PSI.",
action="store_true", default=False)
action_group = parser.add_mutually_exclusive_group(required=False)
action_group.add_argument('--wzl1', action='store_true',
help='Use WZL Data.')
action_group.add_argument('--wzl2', action='store_true',
help='Use WZL Data.')
action_group.add_argument('--ikv', action='store_true',
help='Use IKV Data.')
action_group.add_argument('--random', action='store_true',
help='Use Random Data.')
return parser
if __name__ == '__main__':
if not config.EVAL:
log.error("config.EVAL has to be True.")
sys.exit(-1)
log.setLevel(LOGLVL)
p = get_client_parser()
args = p.parse_args()
ROUNDS = args.reps
METRIC = args.metric
NUM_MATCHES = args.num
PSI = args.psi
if args.wzl1 or args.wzl2:
MODE = "WZL"
PSI = True
REC_LEN = 19
REC_ID_LEN = 17
REC_ROUND = [0, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 3, 3, 3, 0, 0, 3]
if args.wzl1:
METRIC = "wzl1"
NUM_MATCHES = 10
elif args.wzl2:
METRIC = "wzl2"
NUM_MATCHES = 6
elif args.ikv:
MODE = "IKV"
PSI = False
REC_LEN = 28
REC_ID_LEN = 21
REC_ROUND = [2 for _ in range(REC_ID_LEN)]
METRIC = "relOffset-3"
NUM_MATCHES = 77
else:
MODE = "RANDOM"
PSI = True
# Use above default values
REC_LEN = 100
REC_ID_LEN = 10
REC_ROUND = [3 for _ in range(REC_ID_LEN)]
main(args.out, args.resume)
| 23,282
| 39.421875
| 147
|
py
|
parameter-exchange
|
parameter-exchange-master/src/eval/metric_eval.py
|
#!/usr/bin/env python3
"""Evaluate the number of candidated produced by the offset metric.
Copyright (c) 2020.
Author: Erik Buchholz
Maintainer: Erik Buchholz
E-mail: buchholz@comsys.rwth-aachen.de
"""
import argparse
import logging
import os
import numpy as np
from lib import config
from lib.logging import configure_root_loger
# Constants -------------------------------------------------------------------
from lib.similarity_metrics import comp_offset_num, RelativeOffsetIterator
from .shared import lb
METRIC = RelativeOffsetIterator
ARGS = [(10,)] # [(i,) for i in range(1, 1001, 1)]
ROUNDS = 10
RECORD_ID_LENGTH = range(1, 100, 1) # Value of Lego data.
RECORD_ROUNDING = [[3 for i in range(100)]]
RECORD_TOTAL_LENGTH = 100
POSITIVE_ONLY = False
RESUME = False
DIRECTORY = config.EVAL_DIR + "metric" + "/"
os.makedirs(DIRECTORY, exist_ok=True)
# -----------------------------------------------------------------------------
log = configure_root_loger(logging.INFO, None)
def write_header(file_path: str, row_format: str) -> None:
"""Write eval header to file"""
with open(file_path, 'w') as fd:
fd.write("------------------------HEADER------------------------\n")
fd.write(f"EVAL: Metric Eval\n")
fd.write(f"Metric: {get_metric_name()}\n")
fd.write(f"Metric Args: {str(ARGS)}\n")
fd.write(f"Positive Only: {str(POSITIVE_ONLY)}\n")
fd.write(f"Data source: Random Data\n")
fd.write(f"Record Rounding: {RECORD_ROUNDING}\n")
fd.write(f"Record ID Length: {RECORD_ID_LENGTH}\n")
fd.write(f"Record Total Length: {RECORD_TOTAL_LENGTH}\n")
fd.write(f"Rounds: {ROUNDS}\n")
fd.write(f"{row_format}\n")
fd.write("----------------------END-HEADER----------------------\n")
def get_metric_name() -> str:
return getattr(METRIC, '__name__', 'Unknown')
def fake_data_eval(base_name: str):
"""Evaluate the metric with fake data."""
# Read Lego data
file_path = DIRECTORY + base_name + '.csv'
row_fmt = ("ROUND;"
"METRIC;"
"OFFSET;"
"POS_ONLY;"
"REC_LEN;"
"REC_ID_LEN;"
"REC_ROUNDING"
";#CANDIDATES")
if not RESUME or not os.path.exists(file_path):
write_header(file_path, row_fmt)
# Start loop
for round in lb(range(0, ROUNDS), "Rounds", position=0):
for rounding in lb(RECORD_ROUNDING, "Rounding", leave=False):
for id_len in lb(RECORD_ID_LENGTH, "ID Length", leave=False):
for total_len in lb(RECORD_TOTAL_LENGTH, "Total Length",
leave=False):
for offset in lb(ARGS, "Arguments", leave=False):
# target = [random.random() for _ in range(total_len)]
rounding_vec = [rounding[i] for i in range(id_len)]
target = [float(100) for i in range(total_len)]
it = METRIC(target,
*offset,
rounding_vec=rounding_vec,
record_id_length=id_len)
total = comp_offset_num(it)
if len(offset) == 1:
offset = offset[0]
with open(file_path, 'a') as f:
f.write(';'.join((
str(round),
str(get_metric_name()),
str(offset),
str(POSITIVE_ONLY),
str(total_len),
str(id_len),
str(rounding_vec[0]),
str(total),
'\n'
)))
if __name__ == '__main__':
p = argparse.ArgumentParser("Metric Eval")
p.add_argument('--resume', help="Resume Eval", action="store_true")
p.add_argument('-o', '--out', type=str, action='store',
help="Filename", required=True)
p.add_argument('-r', '--rounds', help="How many rounds to perform?",
default=ROUNDS, type=int)
cases = p.add_mutually_exclusive_group(required=False)
cases.add_argument('--id1', action='store_true')
cases.add_argument('--id2', action='store_true')
cases.add_argument('--id3', action='store_true')
cases.add_argument('--id4', action='store_true')
cases.add_argument('--id5', action='store_true')
cases.add_argument('--id6', action='store_true')
cases.add_argument('--id7', action='store_true')
cases.add_argument('--id8', action='store_true')
cases.add_argument('--id9', action='store_true')
cases.add_argument('--id10', action='store_true')
cases.add_argument('--id11', action='store_true')
cases.add_argument('--id12', action='store_true')
args = p.parse_args()
filename = args.out
ROUNDS = args.rounds
if args.id1:
METRIC = RelativeOffsetIterator
ARGS = [(i,) for i in np.arange(1, 50, 0.01)]
POSITIVE_ONLY = False
RECORD_TOTAL_LENGTH = 100
RECORD_ID_LENGTH = 10
RECORD_ROUNDING = [[3 for _ in range(RECORD_ID_LENGTH)]]
elif args.id2:
METRIC = RelativeOffsetIterator
ARGS = [(i,) for i in np.arange(1, 50, 0.01)]
POSITIVE_ONLY = True
RECORD_TOTAL_LENGTH = 100
RECORD_ID_LENGTH = 10
RECORD_ROUNDING = [[3 for _ in range(RECORD_ID_LENGTH)]]
elif args.id3:
METRIC = RelativeOffsetIterator
ARGS = [(10,)]
POSITIVE_ONLY = False
RECORD_TOTAL_LENGTH = range(10, 100)
RECORD_ID_LENGTH = 10
RECORD_ROUNDING = [[3 for _ in range(RECORD_ID_LENGTH)]]
elif args.id4:
METRIC = RelativeOffsetIterator
ARGS = [(10,)]
POSITIVE_ONLY = False
RECORD_TOTAL_LENGTH = 100
RECORD_ID_LENGTH = range(1, 100)
RECORD_ROUNDING = [[3 for _ in range(100)]]
elif args.id5:
METRIC = RelativeOffsetIterator
ARGS = [(10,)]
POSITIVE_ONLY = False
RECORD_TOTAL_LENGTH = 100
RECORD_ID_LENGTH = 10
RECORD_ROUNDING = [[i for _ in range(RECORD_ID_LENGTH)] for i in
range(1, 10)]
elif args.id6:
METRIC = RelativeOffsetIterator
ARGS = [(i,) for i in np.arange(1, 20, 0.01)]
POSITIVE_ONLY = False
RECORD_TOTAL_LENGTH = 28
RECORD_ID_LENGTH = 21
RECORD_ROUNDING = [[2 for _ in range(RECORD_ID_LENGTH)]]
elif args.id7:
METRIC = RelativeOffsetIterator
ARGS = [(10,)]
POSITIVE_ONLY = False
RECORD_TOTAL_LENGTH = 28
RECORD_ID_LENGTH = 21
RECORD_ROUNDING = [[i for _ in range(RECORD_ID_LENGTH)] for i in
range(1, 10)]
elif args.id8:
METRIC = RelativeOffsetIterator
ARGS = [(10,)]
POSITIVE_ONLY = False
RECORD_TOTAL_LENGTH = 28
RECORD_ID_LENGTH = range(1, 21)
RECORD_ROUNDING = [[2 for _ in range(21)]]
elif args.id9:
METRIC = RelativeOffsetIterator
ARGS = [(i,) for i in np.arange(1, 20, 0.01)]
POSITIVE_ONLY = False
RECORD_TOTAL_LENGTH = 19
RECORD_ID_LENGTH = 17
RECORD_ROUNDING = [[3 for _ in range(RECORD_ID_LENGTH)]]
elif args.id10:
METRIC = RelativeOffsetIterator
ARGS = [(10,)]
POSITIVE_ONLY = False
RECORD_TOTAL_LENGTH = 19
RECORD_ID_LENGTH = 17
RECORD_ROUNDING = [[i for _ in range(RECORD_ID_LENGTH)] for i in
range(1, 10)]
elif args.id11:
METRIC = RelativeOffsetIterator
ARGS = [(10,)]
POSITIVE_ONLY = False
RECORD_TOTAL_LENGTH = 19
RECORD_ID_LENGTH = range(1, 17)
RECORD_ROUNDING = [[3 for _ in range(17)]]
elif args.id12:
METRIC = RelativeOffsetIterator
ARGS = [(10,)]
POSITIVE_ONLY = False
RECORD_TOTAL_LENGTH = 100
RECORD_ID_LENGTH = 10
RECORD_ROUNDING = [[j] + [3 for i in range(9)] for j in range(1, 10)]
fake_data_eval(filename)
| 8,233
| 37.657277
| 79
|
py
|
parameter-exchange
|
parameter-exchange-master/src/eval/__init__.py
| 0
| 0
| 0
|
py
|
|
parameter-exchange
|
parameter-exchange-master/src/eval/data_provider.py
|
#!/usr/bin/env python3
"""Evaluates Data Provider App.
Copyright (c) 2020.
Author: Erik Buchholz
Maintainer: Erik Buchholz
E-mail: buchholz@comsys.rwth-aachen.de
"""
import argparse
import atexit
import contextlib
import json
import logging
import os
import pickle
import random
import select
import shutil
import subprocess
import sys
import time
from typing import List, Iterable
from data_provider import DataProvider
from eval import shared as shd
from eval.shared import lb
from lib import config, helpers, logging as logg
from lib import db_cli as db
from lib.base_client import UserType, ServerType
# Constants -------------------------------------------------------------------
LOGLVL = logging.INFO
TIME_PER_UPLOAD = 10 # s
TIME_OFFSET = 100
DIRECTORY = config.EVAL_DIR + "provider" + "/"
os.makedirs(DIRECTORY, exist_ok=True)
DIFF = 100 # Maximal difference each entry of the records may have from target
ROUNDS = 10
NUM_UPLOADS = [1] + list(range(100, 1001, 100))
REC_LEN = 100
REC_ID_LEN = 10
REC_ROUND = [3 for _ in range(REC_ID_LEN)]
TLS = config.OT_TLS
MODE = "RANDOM"
log = logg.configure_root_loger(
LOGLVL, config.WORKING_DIR + 'data/dp_eval.log')
atexit.register(shutil.rmtree, config.TEMP_DIR, True)
if config.OT_TLS or config.PSI_TLS:
raise RuntimeError("TLs should be disabled.")
# -----------------------------------------------------------------------------
def write_header(file_path: str, row_fmt: str):
"""Write header into csv File."""
with open(file_path, 'w') as fd:
fd.write("------------------------HEADER------------------------\n")
fd.write(f"MODE: {MODE}\n")
fd.write(f"TLS: {TLS}\n")
# fd.write(f"Target: {TARGET}\n")
fd.write(f"Num Uploads (% for Scenario Evals): {NUM_UPLOADS}\n")
if MODE == "RANDOM":
fd.write(f"Diffence of each element: {DIFF}\n")
fd.write(f"\n")
fd.write(f"Bloom Capacity: {config.BLOOM_CAPACITY:,}\n")
fd.write(f"Bloom Error Rate: {config.BLOOM_ERROR_RATE}\n")
fd.write(f"Parallel Matching: {config.PARALLEL}\n")
fd.write(f"\n")
fd.write(f"OT Malicious Secure: {config.OT_MAL_SECURE}\n")
fd.write(f"OT TLS: {config.OT_TLS}\n")
fd.write(f"OT Setsize/Number of keys: {config.OT_SETSIZE:,}\n")
fd.write(f"Hash Key Length: {config.HASHKEY_LEN}\n")
fd.write(f"Encryption Key Length: {config.ENCKEY_LEN}\n")
fd.write(f"\n")
fd.write(f"Record Length: {REC_LEN}\n")
fd.write(f"Record ID Length: {REC_ID_LEN}\n")
fd.write(f"Record Rounding: {REC_ROUND}\n")
fd.write(f"\n")
fd.write(f"Rounds: {ROUNDS}\n")
fd.write(f"Interval of RAM measurements: {config.RAM_INTERVAL}s\n")
fd.write(f"All times in Seconds! Timer start with 'StartTime' and is "
f"monotonic clock. Only differences are meaningful.\n")
fd.write(f"{row_fmt}\n")
fd.write("----------------------END-HEADER----------------------\n")
def generate_records(t: List[float], num: int) -> Iterable[List[float]]:
"""
Generate 'num' many vectors based on the target 't'.
:param num: # of records to generate
:param t: Target record
:return: List of matching records
"""
candidates = (
[i + random.uniform(0, DIFF) for i in t]
for _ in range(num)
)
log.info(f"Generated {num} records.")
return candidates
def kill_bg_servers():
"""Kill old processes if running"""
subprocess.run(["tmux", "kill-session", "-t", "eval"],
stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
subprocess.run(["pkill", "-9", "celery"],
stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
atexit.register(kill_bg_servers)
def preparation():
"""Prepare databases and start background tasks."""
# Kill old processes if running
kill_bg_servers()
time.sleep(10)
data_dir = config.DATA_DIR
log.info("Removing Databases.")
with contextlib.suppress(FileNotFoundError):
# Remove Bloom Filter
os.remove(data_dir + config.BLOOM_FILE)
# Remove Databases
os.remove(data_dir + config.KEYSERVER_DB)
os.remove(data_dir + config.STORAGE_DB)
# Add User
log.info("Prepare User DB.")
db.main(UserType.CLIENT, ['testuser', 'password', '-a'], no_print=True)
db.main(UserType.OWNER, ['testprovider', 'password', '-a'], no_print=True)
log.info("Starting Background Servers.")
subprocess.run([f"{config.WORKING_DIR}src/allStart.sh", "eval"])
time.sleep(10)
# Create data provider client
d = DataProvider('testprovider')
d.set_password('password')
# Check that servers are really online
tries = 0
done = False
while not done:
try:
if tries >= 1:
# Try to start servers again.
kill_bg_servers()
time.sleep(10)
subprocess.run(
[f"{config.WORKING_DIR}src/allStart.sh", "eval"])
time.sleep(10)
tries = 0
# Check Key Server
d.get_token(ServerType.KeyServer)
# Check celery
r = d.get(d.KEYSERVER.replace('provider', 'celery'))
if r.content != b"True":
raise RuntimeError("Celery of keyserver not started.")
# Check Storage Server
d.get_token(ServerType.StorageServer)
# Check celery
r = d.get(d.STORAGESERVER.replace('provider', 'celery'))
if r.content != b"True":
raise RuntimeError("Celery of storage-server not started.")
# Success
done = True
except Exception as e:
log.error(f"Server not up, yet. Try: {tries}. Error: {str(e)}")
tries += 1
time.sleep(5)
def start(filename: str, com_file: str) -> subprocess.Popen:
"""Start a data provider App process and return it.
:param filename: The file containing the records to upload.
:param com_file: Communication file.
:return: The created process
"""
cmd = ["python3", "data_provider.py", "testprovider", "password", "-f",
filename, "-e", com_file]
proc = subprocess.Popen(cmd, universal_newlines=True,
stderr=subprocess.PIPE)
# , stdout=subprocess.PIPE)
return proc
def main(base_filename: str, resume: bool = False):
"""Execute evaluaiton."""
file_path = DIRECTORY + base_filename + ".csv"
ram_path = DIRECTORY + base_filename + '_ram.csv'
row_fmt = (f"TIMESTAMP;"
f"ROUND;"
f"#Uploads;"
f"TotalRecLen;"
f"StartTime[s];"
f"ParseListTime[s];"
f"HashKeyTime[s];"
f"HashSetTime[s];"
f"OTIndexTime[s];"
f"KeyRetrievalTime(OT)[s];"
f"SetKeyTime[s];"
f"EncryptionTime[s];"
f"SendTime[s];"
f"FromKS[Byte];"
f"FromKS[Pkt];"
f"ToKS[Byte];"
f"ToKs[Pkt];"
f"FromSS[Byte];"
f"FromSS[Pkt];"
f"ToSS[Byte];"
f"ToSS[Pkt];"
f"ToOTSvr[Byte];"
f"ToOTSvr[Pkt];"
f"FromOTSvr[Byte];"
f"FromOTSvr[Pkt];"
f"SentJSONSize[Length];"
f"Error")
if not resume or not os.path.exists(file_path):
write_header(file_path, row_fmt)
row_fmt = "TIMESTAMP;ROUND;UPLOADS;TotalRecLen;json.dumps(ram_usage)"
write_header(ram_path, row_fmt)
for r in lb(range(ROUNDS), "Rounds", leave=True):
failed = True
for m in lb(NUM_UPLOADS, "Uploads", leave=True):
for rec_len in lb(REC_LEN, "Total Length", leave=False):
success = False
# On preserver, the time per upload is approx 1s
while not success:
# Reset config
shd.reset_config()
# Configuration
shd.set_rounding(REC_ROUND)
shd.set_rec_id_len(REC_ID_LEN)
shd.set_total_rec_len(rec_len)
log.info("Doing Preparation.")
preparation()
failed = False
tempfiles = []
process = None
com_file = helpers.get_temp_file() + '_comfile.pyc'
e = None
# May be deleted by clean-up of prev. round
os.makedirs(config.TEMP_DIR, exist_ok=True)
try:
error = ""
# Start data measurements
tks, tks_file = helpers.start_trans_measurement(
config.KEY_API_PORT, direction="dst", sleep=False
)
fks, fks_file = helpers.start_trans_measurement(
config.KEY_API_PORT, direction="src", sleep=False
)
tss, tss_file = helpers.start_trans_measurement(
config.STORAGE_API_PORT, direction="dst", sleep=False
)
fss, fss_file = helpers.start_trans_measurement(
config.STORAGE_API_PORT, direction="src", sleep=False
)
tempfiles = [tks_file, fks_file, tss_file, fss_file]
measurements = [tks, fks, tss, fss]
time.sleep(0.5)
upload_file = helpers.get_temp_file() + "_upload.txt"
if MODE == "IKV" or MODE == "WZL" or MODE == "IKV2":
if "IKV" in MODE:
source_file = f"{config.WORKING_DIR}data/" \
f"ikv_data.txt"
else:
source_file = f"{config.WORKING_DIR}data/" \
f"wzl_data.txt"
# Only upload m%
with open(source_file, "r") as f:
lines = f.readlines()
num = int(len(lines) / 100 * m)
if MODE == "IKV2":
# non-random
uploads = lines[:num]
else:
uploads = random.choices(lines, k=num)
with open(upload_file, "w") as f:
f.writelines(uploads)
# Update Value
m = num
else:
recs = generate_records(
[float(i + 1) for i in range(rec_len)],
m)
with open(upload_file, "w") as f:
for rec in recs:
f.write(str(rec) + '\n')
longest_exec_time = TIME_PER_UPLOAD * m + TIME_OFFSET
start_runtime = time.monotonic()
process = start(upload_file, com_file)
# Check if client socket hangs
poll_err = select.poll()
poll_err.register(process.stderr,
select.POLLIN)
hangs = False
ctr = 0
while True:
try:
process.wait(30)
# Terminated
break
except subprocess.TimeoutExpired:
ctr += 1
total_wait_time = time.monotonic() - start_runtime
if total_wait_time > longest_exec_time:
raise RuntimeError(
"Execution time too long.")
# check if there is something on stderr
hang_detected = False
while poll_err.poll(0):
line = process.stderr.readline().strip()
if line != "":
print(f"Read Line: '{line}'")
if "client socket connect error" in line:
hang_detected = True
if hangs and hang_detected:
# Still hangs after 30s. Restart.
raise RuntimeError("Client hangs.")
hangs = hang_detected
# Load com file
with open(com_file, "rb") as fd:
e = pickle.load(fd)
if e['error'] is not None:
raise RuntimeError(e['error'])
result = e['result']
ram_usage = e['ram_usage']
ot_files_sent = e['ot_tcpdump_sent']
ot_files_recv = e['ot_tcpdump_recv']
# Kill TCPDUMP
helpers.kill_tcpdump()
for proc in measurements:
# Wait for termination
proc.wait(30)
# Check files exist
if m > 0 and (len(ot_files_sent) == 0 or
len(ot_files_recv) == 0):
raise RuntimeError(
'OT executed but no pcap files available.')
# Get Data Amount results
fks_byte, fks_pkt = helpers.read_tcpstat_from_file(
fks_file)
tks_byte, tks_pkt = helpers.read_tcpstat_from_file(
tks_file)
fss_byte, fss_pkt = helpers.read_tcpstat_from_file(
fss_file)
tss_byte, tss_pkt = helpers.read_tcpstat_from_file(
tss_file)
# Get OT Transmission info
ot_byte_sen = 0
ot_pkt_sen = 0
ot_byte_rec = 0
ot_pkt_rec = 0
for file in ot_files_sent:
b, pkt = helpers.read_tcpstat_from_file(file)
ot_byte_sen += b
ot_pkt_sen += pkt
os.remove(file)
for file in ot_files_recv:
b, pkt = helpers.read_tcpstat_from_file(file)
ot_byte_rec += b
ot_pkt_rec += pkt
os.remove(file)
with open(file_path, "a") as fd:
row = ';'.join((
time.strftime('%Y-%m-%d %H:%M:%S'),
str(r),
str(m),
str(rec_len),
str(e['start_time']),
str(e['parsed_list_time']),
str(e['hash_key_time']),
str(e['hash_set_time']),
str(e['ot_index_time']),
str(e['key_retrieve_time']),
str(e['set_key_time']),
str(e['encryption_time']),
str(e['send_time']),
str(fks_byte),
str(fks_pkt),
str(tks_byte),
str(tks_pkt),
str(fss_byte),
str(fss_pkt),
str(tss_byte),
str(tss_pkt),
str(ot_byte_sen),
str(ot_pkt_sen),
str(ot_byte_rec),
str(ot_pkt_rec),
str(e['json_length']),
error
))
fd.write(
f"{row}\n")
with open(ram_path, 'a') as fd:
fd.write(
';'.join(
(
time.strftime('%Y-%m-%d %H:%M:%S'),
str(r),
str(m),
str(rec_len),
json.dumps(ram_usage)
)
) + '\n'
)
success = True
except Exception as e:
log.exception(str(e))
success = False
failed = True # Restart server
finally:
# Clean Up
if process is not None:
process.terminate()
try:
process.wait(5)
except subprocess.TimeoutExpired:
# Terminate was not enough
process.kill()
# Kill TCPDUMP
helpers.kill_tcpdump()
# Remove Tempfiles
shutil.rmtree(config.TEMP_DIR, ignore_errors=True)
# Reset config file
shd.reset_config()
def get_client_parser() -> argparse.ArgumentParser:
"""Return an argparser for the client eval."""
parser = argparse.ArgumentParser(description="Data Provider Eval")
parser.add_argument('-r', '--reps', help="Rounds", action='store',
default=ROUNDS, type=int)
# parser.add_argument('-n', '--num', help="# records to upload",
# action='store',
# default=NUM_UPLOADS, type=int)
parser.add_argument('-o', '--out', type=str, action='store',
help="Base filename WITHOUT file-ending!",
required=True)
# parser.add_argument('-l', '--length', type=int,
# default=config.RECORD_LENGTH,
# help="Length of randomrecords")
parser.add_argument('--resume', action="store_true",
help="Append to file.",
default=False)
action_group = parser.add_mutually_exclusive_group(required=False)
action_group.add_argument('--wzl', action='store_true',
help='Use WZL Data.')
action_group.add_argument('--ikv', action='store_true',
help='Use IKV Data.')
action_group.add_argument('--ikv2', action='store_true',
help='Use non-random IKV Data.')
action_group.add_argument('--uploads', action='store_true',
help='Use Random Data and vary num uploads.')
action_group.add_argument('--rec_len', action='store_true',
help='Use Random Data and vary record length.')
action_group.add_argument('--debug', action='store_true',
help='Debug Mode.')
return parser
if __name__ == '__main__':
if not config.EVAL:
log.error("config.EVAL has to be True.")
sys.exit(-1)
p = get_client_parser()
args = p.parse_args()
ROUNDS = args.reps
# NUM_UPLOADS = args.num
if args.wzl:
MODE = "WZL"
REC_LEN = 19
REC_ID_LEN = 17
REC_ROUND = [0, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 3, 3, 3, 0, 0, 3]
# Upload in Percent
NUM_UPLOADS = range(10, 101, 10)
elif args.ikv:
MODE = "IKV"
REC_LEN = 28
REC_ID_LEN = 21
REC_ROUND = [2 for _ in range(REC_ID_LEN)]
# Upload in Percent
NUM_UPLOADS = range(10, 101, 10)
elif args.ikv2:
MODE = "IKV2"
REC_LEN = 28
REC_ID_LEN = 21
REC_ROUND = [2 for _ in range(REC_ID_LEN)]
# Upload in Percent
NUM_UPLOADS = range(10, 101, 10)
elif args.uploads:
MODE = "RANDOM"
REC_LEN = 100
REC_ID_LEN = 10
REC_ROUND = [3 for _ in range(REC_ID_LEN)]
NUM_UPLOADS = [1] + list(range(100, 1001, 100))
elif args.rec_len:
MODE = "RANDOM"
REC_LEN = list(range(100, 1001, 100))
REC_ID_LEN = 10
REC_ROUND = [3 for _ in range(REC_ID_LEN)]
NUM_UPLOADS = 100
elif args.debug:
# Debug
MODE = "RANDOM"
REC_LEN = 100
REC_ID_LEN = 10
REC_ROUND = [3 for _ in range(REC_ID_LEN)]
NUM_UPLOADS = list(range(500, 501, 100))
REPS = 5
main(args.out, args.resume)
| 21,695
| 40.563218
| 82
|
py
|
parameter-exchange
|
parameter-exchange-master/src/eval/ot_eval.py
|
#!/usr/bin/env python3
"""Evaluate OT properties.
Copyright (c) 2020.
Author: Erik Buchholz
Maintainer: Erik Buchholz
E-mail: buchholz@comsys.rwth-aachen.de
"""
import argparse
import contextlib
import logging
import os
import platform
import subprocess
import sys
import time
from lib import config, helpers
from lib.helpers import get_free_port, add_latency, reset_port, \
add_async_bandwidth
from lib.logging import configure_root_loger
from .shared import lb, read_output, check_if_client_hangs
sys.path.append(config.WORKING_DIR + 'cython/ot')
# noinspection PyUnresolvedReferences
from cOTInterface import PyOTSender # noqa
# noinspection PyUnresolvedReferences
from cOTInterface import PyOTReceiver # noqa
# Constants -------------------------------------------------------------------
SET_SIZES = [1] + list(range(1000000, 1300001, 1000000)) + \
[2 ** i for i in range(20, 24)] # 2 ** 20
NUM_OTS = [1] + list(range(20, 101, 20))
LATENCY = 0 # range(0, 301, 50) # ms
BANDWIDTH = 0 # [0, 6000, 50000, 100000]
RESUME = False
HOST = "localhost"
NUMTHREADS = 1
STATSECPARAM = 40
MALSECURE = False
INPUTBITCOUNT = 128
TLS = False
ROUNDS_START = 0
ROUNDS_END = 10
DIRECTORY = config.EVAL_DIR + "ot" + "/"
os.makedirs(DIRECTORY, exist_ok=True)
CREATE_TCPDUMP = False
# -----------------------------------------------------------------------------
log = configure_root_loger(logging.INFO, None)
def write_header(e_type: str, file_path: str, row_fmt: str):
"""Write header of eval files."""
with open(file_path, 'w') as fd:
fd.write("------------------------HEADER------------------------\n")
fd.write(f"EVAL: {e_type}\n")
fd.write(f"OT Setsize: {SET_SIZES}\n")
fd.write(f"Performed OTs: {NUM_OTS}\n")
fd.write(f"Rounds: {ROUNDS_END}\n")
fd.write(f"TLS: {TLS}\n")
fd.write(f"Malicious Secure: {MALSECURE}\n")
fd.write(f"Latency: {LATENCY}\n")
fd.write(f"Statistical Security Paramters: {STATSECPARAM}\n")
fd.write(f"Input Bit Count: {INPUTBITCOUNT}\n")
fd.write(f"Threads: {NUMTHREADS}\n")
fd.write(f"RAM Measurement Interval: 0.5s\n")
fd.write(f"RAM measurements written to filename_serverram.csv and "
f"filename_receiverram.csv.\n")
fd.write(f"{row_fmt}\n")
fd.write("----------------------END-HEADER----------------------\n")
def create_tcpdump(setsize: int, num_ots: int, rep: int,
port: int) -> subprocess.Popen:
"""Create a full pcap of the communication."""
out_dir = DIRECTORY + 'tcpdumps/'
os.makedirs(out_dir, exist_ok=True)
filepath = out_dir + f'dump_{setsize}_{num_ots}_{rep}.pcap'
cmd = ['sudo', 'tcpdump', '-i', 'lo', '-w', filepath, 'port', str(port)]
s = subprocess.Popen(cmd)
return s
def ot_time(base_name: str) -> None:
"""
Main method of OT eval.
:param base_name:
:return:
"""
server_ram_file = DIRECTORY + base_name + '_serverram.csv'
client_ram_file = DIRECTORY + base_name + '_clientram.csv'
file_path = DIRECTORY + base_name + '.csv'
rs = ROUNDS_START
row_fmt = f"TIMESTAMP;ROUND;SETSIZE;NUMOTS;TLS;MALSECURE;LATENCY;" \
f"BANDWIDTH[kBit/s];STATSECPARAM;INPUTBITCOUNT;THREADS;" \
f"SENDERTIME[s];" \
f"RECEIVERTIME[s];" \
f"ClientToServer[Byte];ClientToServer[Packets];" \
f"ServerToClient[Byte];ServerToClient[Packets];" \
f"[ERROR]"
if not RESUME or not os.path.exists(file_path):
# Write header if new file only
write_header("PSI Time Eval", file_path, row_fmt)
row_fmt = "TIMESTAMP;ROUND;SETSIZE;NUMOTS;TLS;MALICIOUS;LATENCY;" \
"BANDWIDTH[kBit/s];STATSECPARAM;INPUTBITCOUNT;THREADS;" \
"json.dumps(mem);ERROR"
write_header("PSI Time Eval", server_ram_file, row_fmt)
write_header("PSI Time Eval", client_ram_file, row_fmt)
for r in lb(range(rs, ROUNDS_END), "Rounds", leave=True):
for s in lb(SET_SIZES, "Set Sizes", leave=False):
for o in lb(NUM_OTS, "Total OTs", leave=False):
for stat in lb(STATSECPARAM, "StatSecParam", leave=False):
for latency in lb(LATENCY, "Latency", leave=False):
for bw in lb(BANDWIDTH, "Rate", leave=False):
success = False
while not success:
port = get_free_port()
error = ""
# Add latency/bw-limit to port
if platform.system() != "Darwin":
# Latency does not work on Max
reset_port()
if latency != 0:
add_latency(latency)
if bw != 0:
add_async_bandwidth(bw, port)
elif LATENCY != 0 or BANDWIDTH != 0:
raise RuntimeError(
"Mac does not support latencies and"
"bandwidths.")
stc, stc_file = \
helpers.start_trans_measurement(
port, direction="src", sleep=False)
cts, cts_file = \
helpers.start_trans_measurement(
port, direction="dst", sleep=False)
if CREATE_TCPDUMP:
tcpdump_p = create_tcpdump(s, o, r, port)
time.sleep(0.5) # Wait for start
sp, cp = None, None
try:
sp = start(False, port, s, o, stat,
MALSECURE,
TLS, server_ram_file, r,
latency,
HOST, False, )
cp = start(True, port, s, o, stat,
MALSECURE,
TLS, client_ram_file, r,
latency,
HOST, False)
# Check that connection was successful.
check_if_client_hangs(cp)
sp.wait()
cp.wait()
# Read server output
output, err = sp.communicate()
if err != "":
log.error(err)
error += err.strip().replace('\n', '\\n')
server_time, server_mem, err = read_output(
output)
error += err
# Read client output
output, err = cp.communicate()
err = err.replace(
"client socket connect error (hangs).",
"").strip()
if err != "":
log.error(err)
error += err.strip().replace('\n', '\\n')
client_time, client_mem, err = read_output(
output)
error += err
# Stop transmission measurement
# Kill TCPDUMP
helpers.kill_tcpdump()
stc.wait(30)
cts.wait(5)
server_sent, server_pkts = \
helpers.read_tcpstat_from_file(
stc_file)
client_sent, client_pkts = \
helpers.read_tcpstat_from_file(
cts_file)
timestamp = time.strftime(
'%Y-%m-%d %H:%M:%S')
# Standard File
with open(file_path, "a") as fd:
fd.write(';'.join((
timestamp,
str(r),
str(s),
str(o),
str(TLS),
str(MALSECURE),
str(latency),
str(bw),
str(STATSECPARAM),
str(INPUTBITCOUNT),
str(NUMTHREADS),
str(server_time),
str(client_time),
str(client_sent),
str(client_pkts),
str(server_sent),
str(server_pkts),
error
)) + '\n')
# Client RAM
with open(client_ram_file, 'a') as fd:
fd.write(
';'.join((
timestamp,
str(r),
str(s),
str(o),
str(TLS),
str(MALSECURE),
str(latency),
str(bw),
str(STATSECPARAM),
str(INPUTBITCOUNT),
str(NUMTHREADS),
str(client_mem),
error
)) + '\n'
)
# Server RAM
with open(server_ram_file, 'a') as fd:
fd.write(
';'.join((
timestamp,
str(r),
str(s),
str(o),
str(TLS),
str(MALSECURE),
str(latency),
str(bw),
str(STATSECPARAM),
str(INPUTBITCOUNT),
str(NUMTHREADS),
str(server_mem),
error
)) + '\n'
)
success = True
except Exception as e:
log.exception(str(e))
success = False
finally:
# Clean Up
# Kill TCPDUMP
helpers.kill_tcpdump()
# Remove tempfiles
with contextlib.suppress(
FileNotFoundError):
os.remove(stc_file)
os.remove(cts_file)
if sp is not None:
sp.kill()
if cp is not None:
cp.kill()
# Remove latency
if latency != 0 or bw != 0:
reset_port()
def start(isClient: bool, port: int, set_size: int, num_ots: int,
statsecparam: int, malicious: bool, tls: bool,
ram_file: str, rnd: int, latency: int,
host: str = 'localhost', debug: bool = False) -> subprocess.Popen:
"""
Start OT sender/receiver in new process.
:return: Create Popen object
"""
# measure = '/usr/bin/time -f "%e:%M"'
# environment = 'OMP_NUM_THREADS=' + str(C_cur)
if isClient:
cmd = f'python3 -m eval.ot.receiver'
else:
cmd = f'python3 -m eval.ot.sender'
cmd += f" -p {port} -s {set_size} --statsecparam {statsecparam} -n " \
f"{num_ots} --host {host} -r {rnd} -l {latency} -o {ram_file}"
if tls:
cmd += ' -t'
if malicious:
cmd += ' -m'
log.debug('Execute: {}'.format(cmd))
cmd = cmd.split(" ")
proc = subprocess.Popen(cmd, universal_newlines=True,
stderr=subprocess.PIPE, stdout=subprocess.PIPE)
return proc
if __name__ == '__main__':
p = argparse.ArgumentParser("OT Eval")
p.add_argument('--resume', help="Resume Eval", action="store_true")
p.add_argument('-r', '--reps', help="Rounds", action='store', default=0,
type=int)
p.add_argument('-o', '--out', type=str, action='store',
help="Base filename WITHOUT file-ending!", required=True)
p.add_argument('-t', '--tls', type=int, action='store',
help="TLS Activated? [1 or 0]", choices=[0, 1],
required=True)
p.add_argument('-m', '--malicious', action='store_true',
help="Use OOS16?")
p.add_argument('-s', '--setsize',
help="Setsize: Either constant or 3 values.",
metavar=('CONSTANT/MIN', 'MAX STEP'),
nargs='+', action='store', type=int)
p.add_argument('-n', '--numOTs',
help="# OTs to perform: Either constant or 3 values.",
metavar=('CONSTANT/MIN', 'MAX STEP'),
nargs='+', action='store', type=int)
p.add_argument('--statsecparam',
help="Statistical Sec. Parameters: "
"Either constant or 3 values.",
metavar=('CONSTANT/MIN', 'MAX STEP'),
nargs='+', action='store', type=int)
p.add_argument('-l', '--latency',
help="Latency: Either constant or 3 values.",
metavar=('CONSTANT/MIN', 'MAX STEP'),
nargs='+', action='store', type=int)
p.add_argument('-b', '--bandwidth', action='store_true',
help="Limit bandwidth?")
p.add_argument('--baseline', action='store_true',
help="76Baseline?")
args = p.parse_args()
if args.resume:
RESUME = True
if args.malicious:
MALSECURE = True
INPUTBITCOUNT = 76
if args.tls > 0:
TLS = True
else:
TLS = False
if args.reps > 0:
ROUNDS_END = args.reps
filename = args.out
if args.setsize is not None:
if len(args.setsize) == 1:
SET_SIZES = [args.setsize[0]]
elif len(args.setsize) == 3:
SET_SIZES = range(args.setsize[0],
args.setsize[1] + args.setsize[2],
args.setsize[2])
else:
raise ValueError("Either 1 or 3 setsize parameters!")
if args.numOTs is not None:
if len(args.numOTs) == 1:
NUM_OTS = [args.numOTs[0]]
elif len(args.numOTs) == 3:
NUM_OTS = range(args.numOTs[0],
args.numOTs[1] + args.numOTs[2],
args.numOTs[2])
else:
raise ValueError("Either 1 or 3 # OTs parameters!")
if args.statsecparam is not None:
if len(args.statsecparam) == 1:
STATSECPARAM = [args.statsecparam[0]]
elif len(args.statsecparam) == 3:
STATSECPARAM = range(args.statsecparam[0],
args.statsecparam[1] + args.statsecparam[2],
args.statsecparam[2])
else:
raise ValueError("Either 1 or 3 statSecParam parameters!")
if args.latency is not None:
if len(args.latency) == 1:
LATENCY = [args.latency[0]]
elif len(args.latency) == 3:
LATENCY = range(args.latency[0],
args.latency[1] + args.latency[2],
args.latency[2])
else:
raise ValueError("Either 1 or 3 latency parameters!")
if args.bandwidth:
BANDWIDTH = [0, 6000, 50000, 100000]
else:
BANDWIDTH = BANDWIDTH
if args.baseline:
INPUTBITCOUNT = 76
ot_time(filename)
| 18,180
| 44.339152
| 79
|
py
|
parameter-exchange
|
parameter-exchange-master/src/eval/psi/sender.py
|
#!/usr/bin/env python3
"""PSI Sender script
Copyright (c) 2020.
Author: Erik Buchholz
Maintainer: Erik Buchholz
E-mail: buchholz@comsys.rwth-aachen.de
"""
import argparse
import json
import sys
import time
from memory_profiler import memory_usage
from lib import config
sys.path.append(config.WORKING_DIR + 'cython/psi')
# noinspection PyUnresolvedReferences
from cPSIInterface import PyPSISender # noqa
SERVERCERT = config.KEY_TLS_CERT
SERVERKEY = config.KEY_TLS_KEY
RAM_INTERVAL = 0.5
def get_sender(port, setSize: int, stat: int,
tls: bool, host: str = 'localhost') -> PyPSISender:
"""Return a configured PSISender."""
sender = PyPSISender()
sender.setSize = setSize
sender.hostName = host
sender.port = port
sender.numThreads = 1
sender.tls = tls
sender.statSecParam = stat
sender.serverCert = SERVERCERT
sender.serverKey = SERVERKEY
return sender
if __name__ == "__main__":
error = ""
p = argparse.ArgumentParser("PSI Receiver")
p.add_argument('-t', '--tls', action='store_true',
help="TLS", default=False)
p.add_argument('-m', '--malicious', action='store_true',
help="Use RR16?", default=False)
p.add_argument('-s', '--set_size', required=True,
help="Setsize.", action='store', type=int)
p.add_argument('--statsecparam', dest='stat',
help="Statistical Sec. Parameters",
action='store', type=int, default=40)
p.add_argument('--host', type=str, help="Host", default="localhost")
p.add_argument('-p', '--port', type=int, help="Port", required=True)
p.add_argument('-r', '--round', type=int, help="Round")
p.add_argument('-l', '--latency', type=int, help="Latency")
p.add_argument('-o', '--out', type=str, action='store',
help="ram_path!")
a = p.parse_args()
sender = get_sender(a.port, a.set_size, a.stat, a.tls, a.host)
if a.malicious:
protocol = "RR16"
else:
protocol = "KKRT16"
server_set = list(range(a.set_size))
start = time.monotonic()
mem = memory_usage((sender.execute, (protocol, server_set)),
interval=RAM_INTERVAL,
include_children=True,
max_iterations=1)
runtime = time.monotonic() - start
print(f"{runtime}:{json.dumps(mem)}:{error}")
| 2,402
| 31.04
| 72
|
py
|
parameter-exchange
|
parameter-exchange-master/src/eval/psi/receiver.py
|
#!/usr/bin/env python3
"""PSI receiver script.
Copyright (c) 2020.
Author: Erik Buchholz
Maintainer: Erik Buchholz
E-mail: buchholz@comsys.rwth-aachen.de
"""
import argparse
import json
import logging
import sys
import time
from memory_profiler import memory_usage
from lib import config
sys.path.append(config.WORKING_DIR + 'cython/psi')
# noinspection PyUnresolvedReferences
from cPSIInterface import PyPSIReceiver # noqa
ROOTCA = config.TLS_ROOT_CA
RAM_INTERVAL = 0.5
log = logging.getLogger()
def get_receiver(port, setSize: int, stat: int,
tls: bool, host: str = 'localhost') -> PyPSIReceiver:
"""Return a configured PSIReceiver."""
recv = PyPSIReceiver()
recv.setSize = setSize
recv.hostName = host
recv.port = port
recv.numThreads = 1
recv.statSecParam = stat
recv.tls = tls
recv.rootCA = ROOTCA
return recv
if __name__ == "__main__":
error = ""
p = argparse.ArgumentParser("PSI Receiver")
p.add_argument('-t', '--tls', action='store_true',
help="TLS", default=False)
p.add_argument('-m', '--malicious', action='store_true',
help="Use RR16?", default=False)
p.add_argument('-s', '--set_size', required=True,
help="Setsize.", action='store', type=int)
p.add_argument('--statsecparam', dest='stat',
help="Statistical Sec. Parameters",
action='store', type=int, default=40)
p.add_argument('--host', type=str, help="Host", default="localhost")
p.add_argument('-p', '--port', type=int, help="Port", required=True)
p.add_argument('-r', '--round', type=int, help="Round")
p.add_argument('-l', '--latency', type=int, help="Latency")
p.add_argument('-o', '--out', type=str, action='store',
help="ram_path!")
a = p.parse_args()
if a.malicious:
protocol = "RR16"
else:
protocol = "KKRT16"
recv = get_receiver(a.port, a.set_size, a.stat, a.tls, a.host)
client_set = [i + a.set_size * (i % 2) for i in range(a.set_size)]
start = time.monotonic()
mem, res = memory_usage((recv.execute, (protocol, client_set)),
interval=RAM_INTERVAL,
include_children=True,
retval=True,
max_iterations=1)
runtime = time.monotonic() - start
# Check result
if set(res) != set([i for i in range(a.set_size) if i % 2 == 0]):
error = "Result incorrect!"
log.error("Bad result.")
print(f"{runtime}:{json.dumps(mem)}:{error}")
| 2,612
| 30.865854
| 72
|
py
|
parameter-exchange
|
parameter-exchange-master/src/eval/psi/__init__.py
| 0
| 0
| 0
|
py
|
|
parameter-exchange
|
parameter-exchange-master/src/eval/ot/sender.py
|
#!/usr/bin/env python3
"""OT Sender script.
Copyright (c) 2020.
Author: Erik Buchholz
Maintainer: Erik Buchholz
E-mail: buchholz@comsys.rwth-aachen.de
"""
import argparse
import json
import random
import sys
import time
from memory_profiler import memory_usage
from lib import config
sys.path.append(config.WORKING_DIR + 'cython/ot')
# noinspection PyUnresolvedReferences
from cOTInterface import PyOTSender # noqa
SERVERCERT = config.KEY_TLS_CERT
SERVERKEY = config.KEY_TLS_KEY
MAX_VALUE = 2 ** 100
RAM_INTERVAL = 0.5
NUM_THREADS = 1
def get_sender(port: int, setsize: int, num_ots: int,
stat: int, num_threads: int, host: str,
mal_secure: bool
) -> PyOTSender:
"""Return configured sender."""
sender = PyOTSender()
sender.totalOTs = num_ots
sender.numThreads = num_threads
sender.hostName = host
sender.port = port
sender.serverKey = SERVERKEY
sender.serverCert = SERVERCERT
if mal_secure:
sender.maliciousSecure = True
sender.inputBitCount = 76
else:
sender.maliciousSecure = False
sender.inputBitCount = 128
sender.statSecParam = stat
sender.numChosenMsgs = setsize
return sender
if __name__ == "__main__":
error = ""
p = argparse.ArgumentParser("OT Sender")
p.add_argument('-t', '--tls', action='store_true',
help="TLS")
p.add_argument('-m', '--malicious', action='store_true',
help="Use OOS16?", default=False)
p.add_argument('-s', '--set_size', required=True,
help="Setsize.", action='store', type=int)
p.add_argument('-n', '--num_ots', required=True,
help="# OTs to perform.", action='store', type=int)
p.add_argument('--statsecparam', dest='stat',
help="Statistical Sec. Parameters",
action='store', type=int, default=40)
p.add_argument('--host', type=str, help="Host", default="localhost")
p.add_argument('-p', '--port', type=int, help="Port", required=True)
p.add_argument('-r', '--round', type=int, help="Round")
p.add_argument('-l', '--latency', type=int, help="Latency")
p.add_argument('-o', '--out', type=str, action='store',
help="ram_path!")
a = p.parse_args()
sender = get_sender(a.port, a.set_size, a.num_ots, a.stat, NUM_THREADS,
a.host, a.malicious)
server_set = [
random.randint(0, MAX_VALUE) for _ in range(a.set_size)]
start = time.monotonic()
mem = memory_usage((sender.executeSame, (server_set, a.tls)),
interval=RAM_INTERVAL,
include_children=True,
max_iterations=1
)
runtime = time.monotonic() - start
print(f"{runtime}:{json.dumps(mem)}:{error}")
| 2,852
| 32.174419
| 75
|
py
|
parameter-exchange
|
parameter-exchange-master/src/eval/ot/receiver.py
|
#!/usr/bin/env python3
"""OT receiver script.
Copyright (c) 2020.
Author: Erik Buchholz
Maintainer: Erik Buchholz
E-mail: buchholz@comsys.rwth-aachen.de
"""
import argparse
import json
import random
import sys
import time
from memory_profiler import memory_usage
from lib import config
sys.path.append(config.WORKING_DIR + 'cython/ot')
# noinspection PyUnresolvedReferences
from cOTInterface import PyOTReceiver # noqa
ROOTCA = config.TLS_ROOT_CA
RAM_INTERVAL = 0.5
NUM_THREADS = 1
def get_receiver(port: int, setsize: int, num_ots: int,
stat: int, num_threads: int, host: str,
mal_secure: bool
) -> PyOTReceiver:
"""Return configured receiver."""
recv = PyOTReceiver()
recv.totalOTs = num_ots
recv.numThreads = num_threads
recv.hostName = host
recv.port = port
recv.rootCA = ROOTCA
if mal_secure:
recv.maliciousSecure = True
recv.inputBitCount = 76
else:
recv.maliciousSecure = False
recv.inputBitCount = 128
recv.statSecParam = stat
recv.numChosenMsgs = setsize
return recv
if __name__ == "__main__":
error = ""
p = argparse.ArgumentParser("OT Receiver")
p.add_argument('-t', '--tls', action='store_true',
help="TLS")
p.add_argument('-m', '--malicious', action='store_true',
help="Use OOS16?", default=False)
p.add_argument('-s', '--set_size', required=True,
help="Setsize.", action='store', type=int)
p.add_argument('-n', '--num_ots', required=True,
help="# OTs to perform.", action='store', type=int)
p.add_argument('--statsecparam', dest='stat',
help="Statistical Sec. Parameters",
action='store', type=int, default=40)
p.add_argument('--host', type=str, help="Host", default="localhost")
p.add_argument('-p', '--port', type=int, help="Port", required=True)
p.add_argument('-r', '--round', type=int, help="Round")
p.add_argument('-l', '--latency', type=int, help="Latency")
p.add_argument('-o', '--out', type=str, action='store',
help="ram_path!")
a = p.parse_args()
recv = get_receiver(a.port, a.set_size, a.num_ots, a.stat, NUM_THREADS,
a.host,
a.malicious)
choices = [random.randint(0, a.set_size - 1) for _ in range(a.num_ots)]
start = time.monotonic()
mem, res = memory_usage((recv.execute, (choices, a.tls)),
interval=RAM_INTERVAL,
include_children=True,
retval=True,
max_iterations=1
)
runtime = time.monotonic() - start
print(f"{runtime}:{json.dumps(mem)}:{error}")
| 2,818
| 32.164706
| 75
|
py
|
parameter-exchange
|
parameter-exchange-master/src/eval/ot/__init__.py
| 0
| 0
| 0
|
py
|
|
parameter-exchange
|
parameter-exchange-master/src/key_server/main.py
|
#!/usr/bin/env python3
"""Blueprint for main pages.
Copyright (c) 2020.
Author: Erik Buchholz
Maintainer: Erik Buchholz
E-mail: buchholz@comsys.rwth-aachen.de
"""
import logging
import os
from typing import Any
from flask import (Blueprint, render_template, g, current_app,
send_from_directory)
from lib.base_server import is_redis_online
log: logging.Logger = logging.getLogger(__name__)
bp = Blueprint('main', __name__)
@bp.route('/favicon.ico')
def favicon() -> Any:
"""Return key server favicon."""
return send_from_directory(os.path.join(current_app.root_path, 'static'),
'favicon.ico', mimetype='image/vnd.microsoft'
'.icon')
def is_celery_online() -> bool:
"""Return True if celery is reachable, False otherwise."""
from key_server import celery_app
s = celery_app.control.inspect().ping()
if s is None:
return False
else:
return True
@bp.route('/')
def main() -> str:
"""State whether server is running including redis and celery."""
g.redis_online = is_redis_online(current_app.config['REDIS_PORT'])
g.celery_online = is_celery_online()
return render_template('index.html')
@bp.route('/celery')
def celery_status() -> str:
"""Return True if celery is started correctly and false otherwise."""
return str(is_celery_online())
| 1,420
| 25.314815
| 77
|
py
|
parameter-exchange
|
parameter-exchange-master/src/key_server/connector.py
|
#!/usr/bin/env python3
"""Connector to backend and celery.
Copyright (c) 2020.
Author: Erik Buchholz
Maintainer: Erik Buchholz
E-mail: buchholz@comsys.rwth-aachen.de
"""
import logging
import secrets
from typing import Dict
from celery import Task
from flask import g, current_app as app, request, url_for, render_template
from key_server.key_database import KeyRetrieval, HashKeyRetrieval
from lib import helpers
from lib.base_client import UserType
from lib.base_server import client_pw, provider_pw
from lib.database import db
from lib.helpers import to_base64
from lib.key_server_backend import KeyServer
from key_server import celery_app, database
from lib.user_database import get_user
log: logging.Logger = logging.getLogger(__name__)
class TaskType:
"""Allows key server tasks."""
OT = "OT"
def get_keyserver_backend() -> KeyServer:
"""Return the request's backend KeyServer object and create one if it
does not exists yet."""
if 'keyserver' not in g:
g.keyserver = KeyServer(app.config['DATA_DIR'])
return g.keyserver
def get_hash_key(user_type: str, username: str) -> dict:
"""Return the hash key in Base64 encoding.
:param username: Username
:param user_type: Type of user
:return on success looks like this:
{
'success': True,
'hash_key': 'AAAAAAAAAAAAAAAAAAAAAQ=='
}
"""
app.logger.debug('Hash Key requested.')
_add_to_hash_key_db(user_type, username)
key = to_base64(get_keyserver_backend().get_hash_key())
return {'success': True,
'hash_key': key
}
def _add_to_hash_key_db(user_type: str, username: str) -> HashKeyRetrieval:
"""
Track access to get_hash_key in database.
:param user_type: Type of user accessing API
:param username: Username
:return: Created DB object
"""
u = get_user(user_type, username)
if user_type == UserType.CLIENT:
t = HashKeyRetrieval(
client=u,
)
elif user_type == UserType.OWNER:
t = HashKeyRetrieval(
provider=u,
)
else: # pragma no cover
raise ValueError("Bad user type.")
db.session.add(t)
db.session.commit()
return t
def _add_to_key_retrieval_db(user_type: str,
username: str,
num_ots: int) -> KeyRetrieval:
"""
Track access to OT in database.
:param user_type: Type of user accessing API
:param username: Username
:param num_ots: # OTs performed == # retrieved keys
:return: Created DB object
"""
u = get_user(user_type, username)
if user_type == UserType.CLIENT:
t = KeyRetrieval(
client=u,
retrieved_keys=num_ots
)
elif user_type == UserType.OWNER:
t = KeyRetrieval(
provider=u,
retrieved_keys=num_ots
)
else: # pragma no cover
raise ValueError("Bad user type.")
db.session.add(t)
db.session.commit()
return t
def retrieve_keys(user_type: str, username: str) -> dict:
"""
Start an OT Server for encryption key retrieval.
:param username: Username of User
:param user_type: client or provider
:return: Dict containing connection information:
{
'success': True,
'port': "1213",
'host': "127.0.0.1",
'totalOTs': 10,
'tls': True
}
"""
# Get Parameters
total_ots = request.args.get('totalOTs', 0, type=int)
try:
if total_ots == 0:
raise ValueError("No total OTs defined.")
except ValueError as e:
app.logger.warning(f"Key retrieval failed: {str(e)}")
return {'success': False,
'msg': str(e)}
# Checks okay, start OT server
port = secrets.randbelow(65536 - 1024) + 1024
while not helpers.port_free(port):
port = secrets.randbelow(65536 - 1024) + 1024 # pragma no cover
if not app.config['KEY_RANDOMIZE_PORTS']:
if helpers.port_free(1213):
port = 1213
else:
app.logger.warning(f"Port 1213 already in use! Using {port} "
f"instead.")
app.logger.info(f"Starting OT Sending instance on port {port}.")
_add_to_key_retrieval_db(user_type, username, total_ots)
task = execute_ot.delay(total_ots, port)
if user_type == UserType.OWNER:
from key_server.provider import provider_auth
database.add_task(provider_auth.username(),
UserType.OWNER, task.id,
TaskType.OT)
else:
from key_server.client import client_auth
database.add_task(client_auth.username(),
UserType.CLIENT, task.id,
TaskType.OT)
app.logger.debug(f"OT Server Thread started on port {port}.")
return {
'success': True,
'port': port,
'host': app.config['OT_HOST'],
'totalOTs': total_ots,
'tls': app.config['OT_TLS']
}
@celery_app.task(bind=True)
def execute_ot(self: Task, total_ots: int,
port: int) -> None: # pragma no cover
"""
:param self: Celery task object
:param total_ots: Number of OTs to perform
:param port: Port to open the OT server on
:return: None
"""
log.info(f"Celery offering {total_ots} OTs on port {port}.")
# Define time limit to kill task if it runs for too long, because the
# python code has no access.
self.time_limit = 3600
self.update_state(state='STARTED')
get_keyserver_backend().offer_ot(total_ots, port)
self.update_state(state='SUCCESS')
def status_overview(user_type: str):
"""
Display status of all background tasks of this user.
:return: Page containing background task status.
"""
if user_type == UserType.CLIENT:
db_tasks = database.get_tasks(user_type,
client_pw.username()
)
elif user_type == UserType.OWNER:
db_tasks = database.get_tasks(
user_type, provider_pw.username())
else:
raise ValueError("Unknown User Type.")
tasks = []
for db_task in db_tasks:
if db_task.task_type in Tasks:
task = Tasks[db_task.task_type].AsyncResult(db_task.id)
else:
raise ValueError("Unknown Task Type.")
d = {
'id': db_task.id,
'type': db_task.task_type,
'status': task.state,
'time': db_task.timestamp,
'error': "None",
'task_url': url_for(f'/{user_type}.taskstatus', task_id=task.id,
task_type=db_task.task_type),
'kill_url': url_for(f'/{user_type}.killtask', task_id=task.id,
task_type=db_task.task_type)
}
if task.state == 'FAILURE':
d['error'] = str(task.info)
tasks.append(d)
g.tasks = sorted(tasks, key=lambda k: k['time'],
reverse=True)
return render_template('status.html')
def task_status(task_type: str, task_id: str) -> Dict or str:
"""
Return status of the defined background celery task.
:param task_type: Type of task that shall be checked
:param task_id: ID of the task
:return: Status of the task as JSON
"""
if task_type in Tasks:
task = Tasks[task_type].AsyncResult(task_id)
else:
return """
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
<title>404 Not Found</title>
<h1>Not Found</h1>
<p>No such task type exists.</p>
""", 404
if task.state == 'PENDING':
# job did not start yet
response = {
'state': task.state
}
elif task.state != 'FAILURE':
response = {
'state': task.state
}
else:
# something went wrong in the background job
response = {
'state': task.state,
'status': str(task.info), # this is the exception raised
}
return response
def kill_task(task_type: str, task_id: str) -> Dict or str:
"""
Return status of the defined background celery task.
:param task_type: Type of task that shall be checked
:param task_id: ID of the task
:return: Status of the task as JSON
"""
if task_type in Tasks:
task = Tasks[task_type].AsyncResult(task_id)
else:
return """
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
<title>404 Not Found</title>
<h1>Not Found</h1>
<p>No such task type exists.</p>
""", 404
if task.state != 'STARTED':
# job did not start yet
response = {
'success': False,
'msg': 'Task not running.'
}
else:
log.warning(f"Killing Task '{task_id}'.")
task.revoke(terminate=True, signal='SIGKILL')
response = {
'success': True,
'msg': None
}
return response
Tasks = {
TaskType.OT: execute_ot
}
| 9,315
| 30.261745
| 79
|
py
|
parameter-exchange
|
parameter-exchange-master/src/key_server/celery.py
|
#!/usr/bin/env python
"""Startup script for celery worker.
Copyright (c) 2020.
Author: Erik Buchholz
Maintainer: Erik Buchholz
E-mail: buchholz@comsys.rwth-aachen.de
"""
# noinspection PyUnresolvedReferences
from key_server import celery_app, create_app
app = create_app()
app.app_context().push()
| 300
| 20.5
| 45
|
py
|
parameter-exchange
|
parameter-exchange-master/src/key_server/client.py
|
#!/usr/bin/env python3
"""Client Pages of key server.
Copyright (c) 2020.
Author: Erik Buchholz
Maintainer: Erik Buchholz
E-mail: buchholz@comsys.rwth-aachen.de
"""
import logging
from flask import (Blueprint, jsonify)
from flask_httpauth import HTTPBasicAuth
from key_server.connector import get_hash_key, retrieve_keys, \
kill_task, task_status, status_overview
from lib.base_client import UserType
from lib.base_server import verify_token, client_pw, gen_token
log: logging.Logger = logging.getLogger(__name__)
bp = Blueprint('/client', __name__, url_prefix='/client')
client_auth = HTTPBasicAuth()
@client_auth.verify_password
def client_verify_token(user: str, token: str) -> bool:
"""
Verify whether the token is valid for this client.
:param user: Username of client.
:param token: Token to verify.
:return: True if token is valid for user, false otherwise.
"""
return verify_token(UserType.CLIENT, user, token)
@bp.route('/gen_token')
@client_pw.login_required
def client_gen_token() -> str:
"""
Generate a new token for the logged-in user.
:return: A JSON containing an error message on failure or the token on
success.
"""
return gen_token(UserType.CLIENT, client_pw.username())
@bp.route('/hash_key')
@client_auth.login_required
def client_get_hash_key() -> str:
"""
Return the hash key for the logged-in user.
:return: JSON containing the hash key or an error message.
"""
return jsonify(get_hash_key(UserType.CLIENT, client_auth.username()))
@bp.route('/key_retrieval')
@client_auth.login_required
def client_retrieve_keys() -> str:
"""Start an OT Server for encryption key retrieval.
:return: Connection information for OT server.
"""
return jsonify(retrieve_keys(UserType.CLIENT, client_auth.username()))
@bp.route('/status')
@client_pw.login_required
def status():
"""
Display status of all background tasks of this user.
:return: Page containing background task status.
"""
return status_overview(UserType.CLIENT)
@bp.route('/<task_type>/status/<task_id>')
@client_pw.login_required
def taskstatus(task_type: str, task_id: str):
"""
Return status of the defined background celery task.
"""
return jsonify(task_status(task_type, task_id))
@bp.route('/<task_type>/kill/<task_id>')
@client_pw.login_required
def killtask(task_type: str, task_id: str):
"""Kill specified task."""
return jsonify(kill_task(task_type, task_id))
| 2,496
| 26.141304
| 74
|
py
|
parameter-exchange
|
parameter-exchange-master/src/key_server/__init__.py
|
#!/usr/bin/env python3
"""Application factory for keyserver app.
Copyright (c) 2020.
Author: Erik Buchholz
Maintainer: Erik Buchholz
E-mail: buchholz@comsys.rwth-aachen.de
"""
import os
from celery import Celery
from flask import Flask
from lib import config, database
from lib.key_server_backend import KeyServer
from lib.logging import configure_root_loger
# Configure logging
# configure_root_loger(config.LOGLEVEL)
celery_app = Celery(__name__, broker=config.KEY_CELERY_BROKER_URL,
backend=config.KEY_CELERY_BROKER_URL)
def create_app(test_config=None, logging_level=config.LOGLEVEL,
data_dir=config.DATA_DIR) -> Flask:
"""Factory function for flask app. Return a configured flask app object."""
# Configure App
app = Flask(__name__, instance_relative_config=True)
redis_port = config.KEY_REDIS_PORT
if test_config is not None and 'DATA_DIR' in test_config:
data_dir = test_config['DATA_DIR']
log_dir = data_dir + 'logs/'
app.config.from_mapping(
REDIS_PORT=redis_port,
CELERY_BROKER_URL=config.KEY_CELERY_BROKER_URL,
CELERY_RESULT_BACKEND=config.KEY_CELERY_BROKER_URL,
HASHKEY_LEN=config.HASHKEY_LEN,
KEY_RANDOMIZE_PORTS=config.RANDOMIZE_PORTS,
OT_HOST=config.OT_HOST,
OT_TLS=config.OT_TLS,
DATA_DIR=data_dir,
SQLALCHEMY_DATABASE_URI=f"sqlite:///{data_dir}/{config.KEYSERVER_DB}",
SQLALCHEMY_TRACK_MODIFICATIONS=False,
)
if test_config is not None:
# load the test config if passed in
app.config.from_mapping(test_config)
# ensure the instance folder exists
os.makedirs(app.instance_path, exist_ok=True)
os.makedirs(data_dir, exist_ok=True)
# Update Logging with new values
configure_root_loger(logging_level, log_dir + config.KEY_LOGFILE)
# Update Celery
celery_app.conf.update(app.config)
# Update SQL Alchemy
import key_server.key_database
# noinspection PyUnresolvedReferences
import lib.user_database
# Needs to be imported so that table is created, too
database.db.init_app(app)
with app.app_context():
database.db.create_all()
# Include pages
from key_server import main
app.register_blueprint(main.bp)
from key_server import client
app.register_blueprint(client.bp)
from key_server import provider
app.register_blueprint(provider.bp)
# Generate keys
KeyServer(app.config['DATA_DIR'])
if config.EVAL:
print("************************************************************")
print("Starting in Eval Mode!")
print("************************************************************")
return app
| 2,721
| 29.58427
| 79
|
py
|
parameter-exchange
|
parameter-exchange-master/src/key_server/provider.py
|
#!/usr/bin/env python3
"""Provider Pages of key server.
Copyright (c) 2020.
Author: Erik Buchholz
Maintainer: Erik Buchholz
E-mail: buchholz@comsys.rwth-aachen.de
"""
import logging
from flask import (Blueprint, jsonify)
from flask_httpauth import HTTPBasicAuth
from key_server.connector import get_hash_key, retrieve_keys, \
kill_task, task_status, status_overview
from lib.base_client import UserType
from lib.base_server import verify_token, provider_pw, gen_token
log: logging.Logger = logging.getLogger(__name__)
bp = Blueprint('/provider', __name__, url_prefix='/provider')
provider_auth = HTTPBasicAuth()
@provider_auth.verify_password
def provider_verify_token(user: str, token: str) -> bool:
"""
Verify whether the token is valid for this data provider.
:param user: Username of data provider.
:param token: Token to verify.
:return: True if token is valid for user, false otherwise.
"""
return verify_token(UserType.OWNER, user, token)
@bp.route('/gen_token')
@provider_pw.login_required
def provider_gen_token() -> str:
"""
Generate a new token for the logged-in user.
:return: A JSON containing an error message on failure or the token on
success.
"""
return gen_token(UserType.OWNER, provider_pw.username())
@bp.route('/hash_key')
@provider_auth.login_required
def provider_get_hash_key() -> str:
"""
Return the hash key for the logged-in user.
:return: JSON containing the hash key or an error message.
"""
return jsonify(get_hash_key(UserType.OWNER, provider_auth.username()))
@bp.route('/key_retrieval')
@provider_auth.login_required
def provider_retrieve_keys() -> str:
"""Start an OT Server for encryption key retrieval.
:return: Connection information for OT server.
"""
log.debug("Provider key_retrieval accessed.")
return jsonify(retrieve_keys(UserType.OWNER, provider_auth.username()))
@bp.route('/status')
@provider_pw.login_required
def status():
"""
Display status of all background tasks of this user.
:return: Page containing background task status.
"""
return status_overview("provider")
@bp.route('/<task_type>/status/<task_id>')
@provider_pw.login_required
def taskstatus(task_type: str, task_id: str):
"""
Return status of the defined background celery task.
"""
return jsonify(task_status(task_type, task_id))
@bp.route('/<task_type>/kill/<task_id>')
@provider_pw.login_required
def killtask(task_type: str, task_id: str):
"""Kill specified task."""
return jsonify(kill_task(task_type, task_id))
| 2,588
| 27.141304
| 75
|
py
|
parameter-exchange
|
parameter-exchange-master/src/key_server/key_database.py
|
#!/usr/bin/env python3
"""Database Models for key server only.
Copyright (c) 2020.
Author: Erik Buchholz
Maintainer: Erik Buchholz
E-mail: buchholz@comsys.rwth-aachen.de
"""
from datetime import datetime
from lib.database import db
# noinspection PyUnresolvedReferences
from lib.user_database import Client, Owner
# Need to be imported for the foreign key to work
class KeyRetrieval(db.Model):
"""
SQLAlchemy class representing one key retrieval operation on the
key server only
"""
__tablename__ = 'key_retrievals'
id = db.Column(db.Integer,
nullable=False,
primary_key=True) # Auto
client_id = db.Column(db.Integer,
db.ForeignKey("client.id"))
client = db.relationship("Client",
uselist=False,
foreign_keys=[client_id])
provider_id = db.Column(db.Integer,
db.ForeignKey("owner.id"))
provider = db.relationship("Owner",
uselist=False,
foreign_keys=[provider_id])
retrieved_keys = db.Column(db.Integer, nullable=False)
timestamp = db.Column(db.DateTime,
default=datetime.now(),
nullable=False)
class HashKeyRetrieval(db.Model):
"""
SQLAlchemy class storing all hash key retrieval operations.
"""
__tablename__ = 'hash_key_retrievals'
id = db.Column(db.Integer,
nullable=False,
primary_key=True) # Auto
client_id = db.Column(db.Integer,
db.ForeignKey("client.id"))
client = db.relationship("Client",
uselist=False,
foreign_keys=[client_id])
provider_id = db.Column(db.Integer,
db.ForeignKey("owner.id"))
provider = db.relationship("Owner",
uselist=False,
foreign_keys=[provider_id])
timestamp = db.Column(db.DateTime,
default=datetime.now(),
nullable=False)
| 2,187
| 32.661538
| 68
|
py
|
parameter-exchange
|
parameter-exchange-master/src/storage_server/main.py
|
#!/usr/bin/env python3
"""Blueprint for main pages.
Copyright (c) 2020.
Author: Erik Buchholz
Maintainer: Erik Buchholz
E-mail: buchholz@comsys.rwth-aachen.de
"""
import logging
import os
from typing import Any
from flask import (Blueprint, render_template, g, current_app,
send_from_directory)
from lib.base_server import is_redis_online
log: logging.Logger = logging.getLogger(__name__)
bp = Blueprint('main', __name__)
@bp.route('/favicon.ico')
def favicon() -> Any:
"""Return storage server favicon."""
return send_from_directory(os.path.join(current_app.root_path, 'static'),
'favicon.ico', mimetype='image/vnd.microsoft'
'.icon')
def is_celery_online() -> bool:
"""Return True if celery is reachable, False otherwise."""
from storage_server import celery_app
s = celery_app.control.inspect().ping()
if s is None:
return False
else:
return True
@bp.route('/')
def main() -> str:
"""State whether server is running for testing."""
g.redis_online = is_redis_online(current_app.config['REDIS_PORT'])
g.celery_online = is_celery_online()
return render_template('index.html')
@bp.route('/celery')
def celery_status() -> str:
"""Return True if celery is started correctly and false otherwise."""
return str(is_celery_online())
| 1,413
| 25.185185
| 77
|
py
|
parameter-exchange
|
parameter-exchange-master/src/storage_server/connector.py
|
#!/usr/bin/env python3
"""Connector to backend and celery.
Copyright (c) 2020.
Author: Erik Buchholz
Maintainer: Erik Buchholz
E-mail: buchholz@comsys.rwth-aachen.de
"""
import logging
from typing import Dict, List
from celery import Task
from flask import g, current_app as app, render_template, url_for
from lib import database
from lib.base_client import UserType
from lib.base_server import client_pw, provider_pw
from lib.storage_server_backend import StorageServer
from . import celery_app
log: logging.Logger = logging.getLogger(__name__)
class TaskType:
"""Allows storage server tasks."""
PSI = "PSI"
BLOOM_INSERT = "BLOOM_INSERT"
def get_storageserver_backend() -> StorageServer:
"""Return the flask request's backend StorageServer object and create
one if none exists."""
if 'storageserver' not in g:
g.storageserver = StorageServer(app.config['DATA_DIR'])
return g.storageserver
def _batch_store_records(record_list: List[List[str]], username: str) -> None:
"""
Batch storage of records.
:param record_list: List of records to store of the following form:
[
('hash1', 'record1', 'owner1'),
('hash2', 'record2', 'owner2')
]
:return: None
"""
StorageServer.batch_store_records_db(record_list)
task = insert_bloom.delay(record_list)
database.add_task(username,
UserType.OWNER,
task.id, TaskType.BLOOM_INSERT)
@celery_app.task(bind=True) # pragma no cover
def execute_psi(self: Task, port: int) -> None:
"""Execute PSI with celery."""
log.info(f"Celery offering PSI on Port {port}.")
self.time_limit = 3600
self.update_state(state='STARTED')
StorageServer.offer_psi(port=port)
self.update_state(state='SUCCESS')
@celery_app.task(bind=True) # pragma no cover
def insert_bloom(self: Task, record_list: List[List[str]]) -> None:
"""Bloom Filter insertion done via celery becasue of high fluctuation
in eval."""
log.info(f"Celery Inserting values into Bloom Filter.")
self.time_limit = 3600
self.update_state(state='STARTED')
get_storageserver_backend().batch_store_records_bloom(record_list)
self.update_state(state='SUCCESS')
def status_overview(user_type: str):
"""
Display status of all background tasks of this user.
:return: Page containing background task status.
"""
if user_type == UserType.CLIENT:
db_tasks = database.get_tasks(user_type,
client_pw.username())
elif user_type == UserType.OWNER:
db_tasks = database.get_tasks(
user_type, provider_pw.username())
else:
raise ValueError("Unknown User Type.")
tasks = []
for db_task in db_tasks:
if db_task.task_type in Tasks:
task = Tasks[db_task.task_type].AsyncResult(db_task.id)
else:
raise ValueError("Unknown Task Type.")
d = {
'id': db_task.id,
'type': db_task.task_type,
'status': task.state,
'time': db_task.timestamp,
'error': "None",
'task_url': url_for(f'/{user_type}.taskstatus', task_id=task.id,
task_type=db_task.task_type),
'kill_url': url_for(f'/{user_type}.killtask', task_id=task.id,
task_type=db_task.task_type)
}
if task.state == 'FAILURE':
d['error'] = str(task.info)
tasks.append(d)
g.tasks = sorted(tasks, key=lambda k: k['time'],
reverse=True)
return render_template('status.html')
def task_status(task_type: str, task_id: str) -> Dict or str:
"""
Return status of the defined background celery task.
:param task_type: Type of task that shall be checked
:param task_id: ID of the task
:return: Status of the task as JSON
"""
if task_type in Tasks:
task = Tasks[task_type].AsyncResult(task_id)
else:
return """
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2
Final//EN">
<title>404 Not Found</title>
<h1>Not Found</h1>
<p>No such task type exists.</p>
""", 404
if task.state == 'PENDING':
# job did not start yet
response = {
'state': task.state
}
elif task.state != 'FAILURE':
response = {
'state': task.state
}
else:
# something went wrong in the background job
response = {
'state': task.state,
'status': str(task.info), # this is the exception raised
}
return response
def kill_task(task_type: str, task_id: str) -> Dict or str:
"""
Return status of the defined background celery task.
:param task_type: Type of task that shall be checked
:param task_id: ID of the task
:return: Status of the task as JSON
"""
if task_type in Tasks:
task = Tasks[task_type].AsyncResult(task_id)
else:
return """
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
<title>404 Not Found</title>
<h1>Not Found</h1>
<p>No such task type exists.</p>
""", 404
if task.state != 'STARTED':
# job did not start yet
response = {
'success': False,
'msg': 'Task not running.'
}
else:
log.warning(f"Killing Task '{task_id}'.")
task.revoke(terminate=True, signal='SIGKILL')
response = {
'success': True,
'msg': None
}
return response
Tasks = {
TaskType.PSI: execute_psi,
TaskType.BLOOM_INSERT: insert_bloom,
}
| 5,899
| 30.550802
| 79
|
py
|
parameter-exchange
|
parameter-exchange-master/src/storage_server/celery.py
|
#!/usr/bin/env python
"""Startup script for celery worker.
Copyright (c) 2020.
Author: Erik Buchholz
Maintainer: Erik Buchholz
E-mail: buchholz@comsys.rwth-aachen.de
"""
# noinspection PyUnresolvedReferences
from storage_server import celery_app, create_app
app = create_app()
app.app_context().push()
| 304
| 20.785714
| 49
|
py
|
parameter-exchange
|
parameter-exchange-master/src/storage_server/storage_database.py
|
#!/usr/bin/env python3
"""Database Models for storage server only.
Copyright (c) 2020.
Author: Erik Buchholz
Maintainer: Erik Buchholz
E-mail: buchholz@comsys.rwth-aachen.de
"""
from datetime import datetime
from lib.database import db
# noinspection PyUnresolvedReferences
from lib.user_database import Client, Owner
# Need to be imported for the foreign key to work
class StoredRecord(db.Model):
"""
SQLAlchemy class representing one record
"""
hash = db.Column(db.Text, nullable=False, primary_key=True)
ciphertext = db.Column(db.Text, nullable=False, primary_key=True)
owner = db.Column(db.Text, nullable=False, primary_key=True)
class RecordRetrieval(db.Model):
"""
SQLAlchemy class representing one data retrieval operation on the
storage server only
"""
__tablename__ = 'record_retrieval_accesses'
id = db.Column(db.Integer,
nullable=False,
primary_key=True) # Auto
client_id = db.Column(db.Integer,
db.ForeignKey("client.id"),
nullable=False)
client = db.relationship("Client",
uselist=False,
foreign_keys=[client_id])
enc_keys_by_hash = db.Column(db.Integer, nullable=False)
# Number of encryption keys for all requested hashes
enc_keys_by_records = db.Column(db.Integer, nullable=False)
# Number of encryption keys fo all returned records
timestamp = db.Column(db.DateTime,
default=datetime.now(),
nullable=False)
class BillingInfo(db.Model):
"""
SQLAlchemy class representing storage server billing information,
i.e. the retrieval of a record by a data owner
"""
__tablename__ = 'billing_information'
id = db.Column(db.Integer,
nullable=False,
primary_key=True) # Auto
client_id = db.Column(db.Integer,
db.ForeignKey("client.id"),
nullable=False)
client = db.relationship("Client",
uselist=False,
foreign_keys=[client_id])
provider_id = db.Column(db.Integer,
db.ForeignKey("owner.id"),
nullable=False)
provider = db.relationship("Owner",
uselist=False,
foreign_keys=[provider_id])
count = db.Column(db.Integer, nullable=False) # Num. of retr. items
transaction_id = db.Column(db.Integer,
db.ForeignKey("record_retrieval_accesses.id"),
nullable=False) # ID of transaction
transaction = db.relationship("RecordRetrieval",
uselist=False,
foreign_keys=[transaction_id])
timestamp = db.Column(db.DateTime, default=datetime.now(), nullable=False)
class BloomAccess(db.Model):
"""
SQLAlchemy class representing one access to the Bloom API
"""
__tablename__ = 'bloom_accesses'
id = db.Column(db.Integer,
nullable=False,
primary_key=True) # Auto
client_id = db.Column(db.Integer,
db.ForeignKey("client.id"),
nullable=False)
client = db.relationship("Client",
uselist=False,
foreign_keys=[client_id])
timestamp = db.Column(db.DateTime,
default=datetime.now(),
nullable=False)
class PSIAccess(db.Model):
"""
SQLAlchemy class representing one access to the PSI API
"""
__tablename__ = 'psi_accesses'
id = db.Column(db.Integer,
nullable=False,
primary_key=True) # Auto
client_id = db.Column(db.Integer,
db.ForeignKey("client.id"),
nullable=False)
client = db.relationship("Client",
uselist=False,
foreign_keys=[client_id])
timestamp = db.Column(db.DateTime,
default=datetime.now(),
nullable=False)
| 4,311
| 34.933333
| 78
|
py
|
parameter-exchange
|
parameter-exchange-master/src/storage_server/client.py
|
#!/usr/bin/env python3
"""Client Pages of storage server.
Copyright (c) 2020.
Author: Erik Buchholz
Maintainer: Erik Buchholz
E-mail: buchholz@comsys.rwth-aachen.de
"""
import logging
import secrets
from flask import Blueprint, jsonify, request, current_app as app
from flask_httpauth import HTTPBasicAuth
from lib import helpers, config, database
from lib.base_client import UserType
from lib.base_server import verify_token, gen_token, client_pw
from lib.database import db
from lib.storage_server_backend import StorageServer
from lib.user_database import get_user
from storage_server.connector import get_storageserver_backend, execute_psi, \
status_overview, kill_task, task_status, TaskType
from storage_server.storage_database import PSIAccess, BloomAccess
log: logging.Logger = logging.getLogger(__name__)
bp = Blueprint('/client', __name__, url_prefix='/client')
client_auth = HTTPBasicAuth()
@client_auth.verify_password
def client_verify_token(user: str, token: str) -> bool:
"""
Verify whether the token is valid for this client.
:param user: Username of client.
:param token: Token to verify.
:return: True if token is valid for user, false otherwise.
"""
return verify_token(UserType.CLIENT, user, token)
@bp.route('/gen_token')
@client_pw.login_required
def client_gen_token() -> str:
"""
Generate a new token for the logged-in user.
:return: A JSON containing an error message on failure or the token on
success.
"""
return gen_token(UserType.CLIENT, client_pw.username())
def _track_bloom_access(user_type: str, username: str) -> BloomAccess:
"""
Track an access to the Bloom API.
:param user_type: Client or Owner
:param username: Name of user
:return: The created DB Object
"""
u = get_user(user_type, username)
if user_type == UserType.CLIENT:
t = BloomAccess(
client=u
)
else: # pragma no cover
raise ValueError("Bad user type.")
db.session.add(t)
db.session.commit()
return t
@bp.route('/bloom')
@client_auth.login_required
def client_get_bloom() -> str:
"""
Return a base64 encoding of the bloom filter encoding the storage
server's record set.
:return: Dict containing base64 encoded bloom filter
"""
try:
_track_bloom_access(UserType.CLIENT, client_auth.username())
b = get_storageserver_backend().get_bloom_filter(
).decode()
except ValueError as e:
return jsonify(
{
"success": False,
"msg": str(e)
})
return jsonify(
{
"success": True,
"bloom": b
})
@bp.route('/retrieve_record', methods=['POST'])
@client_auth.login_required
def retrieve_record() -> str:
"""
Retrieve record for the authenticated user.
Requires JSON as POST data:
{
'hash': Base64(Hash) [str]
}
:return: Dict containing ist of records matching the hash or error msg
'records':
[
['Base64(HASH)', 'json.dumps(CIPHERTEXT-1)'],
['Base64(HASH)', 'json.dumps(CIPHERTEXT-2)']
]
"""
try:
if request.json is None or 'hash' not in request.json:
raise ValueError("Missing POST value 'hash'.")
h = request.json['hash']
r = StorageServer.get_record(h, client_auth.username())
except ValueError as e:
return jsonify({'success': False,
'msg': str(e)})
return jsonify({'success': True,
'records': r})
@bp.route('/batch_retrieve_records', methods=['POST'])
@client_auth.login_required
def batch_retrieve_records() -> str:
"""
Retrieve multiple records for the authenticated user.
Requires JSON as POST data:
{
'hashes': [
Base64(Hash-1) [str],
Base64(Hash-2) [str],
...
]
}
:return: jsonified dict containing records on success and an error
message otherwise. On success:
'records':
[
('Base64(HASH-1)', 'json.dumps(CIPHERTEXT-1)'),
('Base64(HASH-1)', 'json.dumps(CIPHERTEXT-2)'),
('Base64(HASH-2)', 'json.dumps(CIPHERTEXT-3)')
]
"""
try:
if request.json is None:
raise KeyError("Missing POST value 'hashes'.")
hash_list = request.json['hashes']
r = StorageServer.batch_get_records(hash_list, client_auth.username())
except KeyError:
return jsonify({'success': False,
'msg': "Missing POST value 'hashes'."})
return jsonify({'success': True,
'records': r})
def _track_PSI_access(user_type: str, username: str) -> PSIAccess:
"""
Track an access to the PSI API.
:param user_type: Client or Owner
:param username: Name of user
:return: The created DB Object
"""
u = get_user(user_type, username)
if user_type == UserType.CLIENT:
t = PSIAccess(
client=u
)
else: # pragma no cover
raise ValueError("Bad user type.")
db.session.add(t)
db.session.commit()
return t
@bp.route('/psi')
@client_auth.login_required
def psi():
"""
Start a PSI Server and return connection information.
:return: Dict containing PSI Server access information.
{
'success': bool,
'port': int,
'host': str,
'tls': bool,
'setSize': int,
'msg': str (On failure only)
}
"""
# Get free port
port = secrets.randbelow(65536 - 1024) + 1024
while not helpers.port_free(port):
port = secrets.randbelow(65536 - 1024) + 1024 # pragma no cover
if not app.config['RANDOMIZE_PORTS']: # pragma no cover
if helpers.port_free(config.PSI_PORT):
port = config.PSI_PORT
else: # pragma no cover
app.logger.warning(f"Port {config.PSI_PORT} already in use!"
f"Using {port} instead.")
app.logger.info(f"Starting PSI Sending instance on port {port}.")
_track_PSI_access(UserType.CLIENT, client_auth.username())
task = execute_psi.delay(port)
database.add_task(client_auth.username(),
UserType.CLIENT,
task.id, TaskType.PSI)
app.logger.debug(f"PSI Server Thread started on port {port}.")
return jsonify({
'success': True,
'port': port,
'host': app.config['PSI_HOST'],
'tls': app.config['PSI_TLS'],
'setSize': config.PSI_SETSIZE
})
@bp.route('/status')
@client_pw.login_required
def status():
"""
Display status of all background tasks of this user.
:return: Page containing background task status.
"""
return status_overview(UserType.CLIENT)
@bp.route('/<task_type>/status/<task_id>')
@client_pw.login_required
def taskstatus(task_type: str, task_id: str):
"""
Return status of the defined background celery task.
:param task_type: Type of celery task
:param task_id: ID of celery task
:return: None
"""
return jsonify(task_status(task_type, task_id))
@bp.route('/<task_type>/kill/<task_id>')
@client_pw.login_required
def killtask(task_type: str, task_id: str):
"""Kill specified task.
:param task_type: Type of celery task
:param task_id: ID of celery task
:return: None"""
return jsonify(kill_task(task_type, task_id))
| 7,578
| 29.316
| 78
|
py
|
parameter-exchange
|
parameter-exchange-master/src/storage_server/__init__.py
|
#!/usr/bin/env python3
"""Application factory for storageserver app.
Copyright (c) 2020.
Author: Erik Buchholz
Maintainer: Erik Buchholz
E-mail: buchholz@comsys.rwth-aachen.de
"""
import os
from celery import Celery
from flask import Flask
from lib import config, database
from lib.logging import configure_root_loger
# Configure logging
# configure_root_loger(config.LOGLEVEL)
celery_app = Celery(__name__, broker=config.STORAGE_CELERY_BROKER_URL,
backend=config.STORAGE_CELERY_BROKER_URL)
def create_app(test_config=None, logging_level=config.LOGLEVEL,
data_dir=config.DATA_DIR) -> Flask:
"""Factory function for flask app. Return a configured flask app object."""
app = Flask(__name__, instance_relative_config=True)
redis_port = config.STORAGE_REDIS_PORT
if test_config is not None and 'DATA_DIR' in test_config:
data_dir = test_config['DATA_DIR']
log_dir = data_dir + 'logs/'
app.config.from_mapping(
SECRET_KEY='dev', # TODO: Exchange
REDIS_PORT=redis_port,
CELERY_BROKER_URL=config.STORAGE_CELERY_BROKER_URL,
CELERY_RESULT_BACKEND=config.STORAGE_CELERY_BROKER_URL,
DATA_DIR=data_dir,
PSI_HOST=config.PSI_HOST,
PSI_TLS=config.PSI_TLS,
RANDOMIZE_PORTS=config.RANDOMIZE_PORTS,
SQLALCHEMY_DATABASE_URI=f"sqlite:///{data_dir}/{config.STORAGE_DB}",
SQLALCHEMY_TRACK_MODIFICATIONS=False,
)
if test_config is not None:
# load the test config if passed in
app.config.from_mapping(test_config)
os.makedirs(app.instance_path, exist_ok=True)
os.makedirs(data_dir, exist_ok=True)
# Update Logging with new values
configure_root_loger(logging_level, log_dir + config.STORAGE_LOGFILE)
# Update celery
celery_app.conf.update(app.config)
# Update SQL Alchemy
import storage_server.storage_database
# noinspection PyUnresolvedReferences
import lib.user_database
# Needs to be imported so that table is created, too
database.db.init_app(app)
# For bloom filter
from storage_server.connector import get_storageserver_backend
with app.app_context():
database.db.create_all()
# Initialize Bloom Filter
get_storageserver_backend()._initialize_bloom_filter()
# Include pages
from storage_server import main
app.register_blueprint(main.bp)
from storage_server import client
app.register_blueprint(client.bp)
from storage_server import provider
app.register_blueprint(provider.bp)
if config.EVAL:
print("************************************************************")
print("Starting in Eval Mode!")
print("************************************************************")
return app
| 2,793
| 31.488372
| 79
|
py
|
parameter-exchange
|
parameter-exchange-master/src/storage_server/provider.py
|
#!/usr/bin/env python3
"""Provider Pages of storage server.
Copyright (c) 2020.
Author: Erik Buchholz
Maintainer: Erik Buchholz
E-mail: buchholz@comsys.rwth-aachen.de
"""
import logging
from flask import Blueprint, request, jsonify, render_template
from flask_httpauth import HTTPBasicAuth
from lib.base_client import UserType
from lib.base_server import verify_token, gen_token, provider_pw
from storage_server.connector import (_batch_store_records,
get_storageserver_backend,
status_overview, task_status, kill_task)
log: logging.Logger = logging.getLogger(__name__)
bp = Blueprint('/provider', __name__, url_prefix='/provider')
provider_auth = HTTPBasicAuth()
@provider_auth.verify_password
def provider_verify_token(user: str, token: str) -> bool:
"""
Verify whether the token is valid for this data provider.
:param user: Username of data provider.
:param token: Token to verify.
:return: True if token is valid for user, false otherwise.
"""
return verify_token(UserType.OWNER, user, token)
@bp.route('/gen_token')
@provider_pw.login_required
def provider_gen_token() -> str:
"""
Generate a new token for the logged-in user.
:return: A JSON containing an error message on failure or the token on
success.
"""
return gen_token(UserType.OWNER, provider_pw.username())
@bp.route('/store_record', methods=['POST'])
@provider_auth.login_required
def store_record() -> None:
"""
Store a record into the database.
Requires request JSON as HTTP POST data:
{
'hash': Base64(Hash)[str]
'ciphertext': json.dumps(ciphertext)[str]
'owner': 'ownername'[str]
}
:return: None
"""
try:
if request.json is None:
raise ValueError("Empty POST JSON.")
try:
hash_val: str = request.json['hash']
ciphertext: str = request.json['ciphertext']
owner: str = request.json['owner']
except KeyError:
raise ValueError("Require 'hash', 'ciphertext' and 'owner'.")
if owner != provider_auth.username():
raise ValueError("Owner in JSON not authenticated owner.")
except ValueError as e:
return jsonify({
'success': False,
'msg': f"Invalid POST data: {str(e)}"
})
log.info(f"Store record: {hash_val} - {ciphertext} of {owner}")
try:
get_storageserver_backend().store_record(hash_val, ciphertext, owner)
except Exception as e:
log.exception(f"Failed to store record: {str(e)}")
return jsonify({
'success': False,
'msg': str(e)
})
return jsonify({
'success': True,
'msg': None
})
@bp.route('/store_interface')
@provider_auth.login_required
def store_interface() -> None:
return render_template('store.html')
@bp.route('/batch_store_records', methods=['POST'])
@provider_auth.login_required
def batch_store_records() -> None:
"""
Store many records into the database.
Requires a JSON as HTTP POST data:
[
[Base64(Hash-1)[str], json.dumps(ciphertext-1)[str], 'owner'[str]],
[Base64(Hash-2)[str], json.dumps(ciphertext-2)[str], 'owner'[str]],
[Base64(Hash-2)[str], json.dumps(ciphertext-3)[str], 'owner'[str]],
]
:return: None
"""
log.info(f"Batch Store Records")
record_list = request.json
owner = provider_auth.username()
try:
if request.json is None:
raise ValueError("Missing POST values.")
if not isinstance(record_list, list):
raise ValueError(f"batch_store_records received non list"
f": {record_list}")
for item in record_list:
if not len(item) == 3 or not isinstance(item, list):
raise ValueError(f"Record list contained bad item: {item}")
for i in item:
if not isinstance(i, str):
raise ValueError(f"Record list contained bad item: {item}")
# Verify that owner is correct
if item[2] != owner:
raise ValueError(
f"Different owner in record than authenticated owner!")
except ValueError as e:
log.warning(str(e))
return jsonify({
'success': False,
'msg': str(e)
})
_batch_store_records(record_list, owner)
return jsonify({
'success': True,
'msg': None
})
@bp.route('/status')
@provider_pw.login_required
def status():
"""
Display status of all background tasks of this user.
:return: Page containing background task status.
"""
return status_overview(UserType.OWNER)
@bp.route('/<task_type>/status/<task_id>')
@provider_pw.login_required
def taskstatus(task_type: str, task_id: str):
"""
Return status of the defined background celery task.
:param task_type: Type of celery task
:param task_id: ID of celery task
:return: None
"""
return jsonify(task_status(task_type, task_id))
@bp.route('/<task_type>/kill/<task_id>')
@provider_pw.login_required
def killtask(task_type: str, task_id: str):
"""Kill specified task.
:param task_type: Type of celery task
:param task_id: ID of celery task
:return: None"""
return jsonify(kill_task(task_type, task_id))
| 5,405
| 30.068966
| 79
|
py
|
parameter-exchange
|
parameter-exchange-master/src/test/test_record.py
|
#!/usr/bin/env python3
"""Test for record class.
Copyright (c) 2020.
Author: Erik Buchholz
Maintainer: Erik Buchholz
E-mail: buchholz@comsys.rwth-aachen.de
"""
from unittest import TestCase
from unittest.mock import patch
from lib.helpers import from_base64
from lib.record import Record, hash_to_index, round_s, get_power
rounding = 3
id_len = 3
@patch("lib.config.RECORD_ID_LENGTH", id_len)
@patch("lib.config.RECORD_LENGTH", 5)
class TestRecord(TestCase):
record = [1.1, 22.22, 333.333, 4444.4444, 55555.55555]
owner = "Testowner"
hash_key = b"abcde"
encryption_key = b"stEinAESPasswort"
longhash_hex = (
"0c449889397b9499e55f53911b21772e119a8c9128c47329f64888b0e42be"
"3acaa82747aa5c328efbce2255b39057bd090d37e500a3e2af19009f983e9dbf293"
)
longhash = bytes.fromhex(longhash_hex)
ciphertext = {
'ciphertext': '/eHeyXRfvvtRrm2kQs+i6+g/x6OuChngHdoPb1ZSv29jHn+GbwR'
'+EQ==',
'hash':
'DESYiTl7lJnlX1ORGyF3LhGajJEoxHMp9kiIsOQr46yqgnR6pcMo77ziJVs5B'
'XvQkNN+UAo+KvGQCfmD6dvykw==',
'length': 'BQ==',
'mac': 'r07173RxSg4fX0b3v4sb6A==',
'nonce': 'g2xlxbCW2c77STMpf5wNpQ=='}
@patch("lib.config.RECORD_LENGTH", 5)
@patch("lib.config.RECORD_ID_LENGTH", id_len)
@patch("lib.config.ROUNDING_VEC", [rounding for _ in range(id_len)])
def setUp(self) -> None:
"""Create dummy record."""
self.r = Record(self.record)
def test_hash_to_index(self):
h1 = bytes.fromhex("ffffffffffffffff")
h2 = bytes.fromhex("c4dff7abcdef")
self.assertEqual(
hash_to_index(h1, 32),
2 ** 32 - 1
)
self.assertEqual(
hash_to_index(h1, 11),
2 ** 11 - 1
)
self.assertEqual(
hash_to_index(h2, 21),
1564612
)
def test_init(self):
r = Record(self.record, self.owner)
self.assertEqual(r.record, self.record)
self.assertEqual(r.owner, self.owner)
self.assertIsNone(r._long_hash)
with self.assertRaises(TypeError):
Record(["test"])
with self.assertRaises(ValueError):
# Bad record length
Record([1, 2, 3])
def test_get_long_hash(self):
with self.assertRaises(ValueError):
self.r.get_long_hash()
self.r._hash_key = self.hash_key
h = self.r.get_long_hash()
self.assertEqual(len(h), 64)
self.assertEqual(self.longhash_hex, h.hex())
@patch("lib.config.PSI_INDEX_LEN", 128)
def test_get_psi_index(self):
with self.assertRaises(ValueError):
self.r.get_psi_index()
self.r._hash_key = self.hash_key
h = self.r.get_psi_index()
self.assertEqual(61763042635925203158941279474836587532, h)
def test_get_owner(self):
with self.assertRaises(RuntimeError):
self.r.get_owner()
self.r.owner = "erik"
self.assertEqual("erik", self.r.get_owner())
def test_get_ot_index(self):
with self.assertRaises(ValueError):
self.r.get_ot_index()
self.r._hash_key = self.hash_key
h = self.r.get_ot_index()
self.assertEqual(541708, h)
def test__get_rounded_record(self):
res = self.r._get_rounded_record()
rounded = [1.1, 22.2, 333.0]
self.assertEqual(rounded, res)
self.r._rounding_vector = [0, 0, 0]
self.assertEqual(self.record[:3],
self.r._get_rounded_record())
def test_set_hash_key(self):
self.r.set_hash_key(self.hash_key)
self.assertEqual(self.hash_key, self.r._hash_key)
def test_set_encryption_key(self):
self.r.set_encryption_key(self.encryption_key)
self.assertEqual(self.encryption_key, self.r._encryption_key)
def test__get_identifier(self):
self.assertEqual(self.r._get_identifier(),
str([1.1, 22.2, 333.0]).encode('utf-8'))
def test_get_encrypted_record(self):
self.r._hash_key = self.hash_key
with self.assertRaises(ValueError):
self.r.get_encrypted_record()
res = self.r.get_encrypted_record(
self.encryption_key,
nonce=from_base64(self.ciphertext['nonce']))
self.assertEqual(self.ciphertext, res)
def test_from_ciphertext(self):
r = Record.from_ciphertext(self.ciphertext, self.encryption_key)
self.assertEqual(r.record, self.record)
def test_str(self):
self.assertEqual(
str(self.r),
f"({str(self.record)},)"
)
self.r.owner = self.owner
self.assertEqual(
str(self.r),
f"({str(self.record)}, '{self.owner}')"
)
self.r.owner = None
self.r._hash_key = self.hash_key
self.assertEqual(
str(self.r),
f"('0x{self.longhash_hex}', {str(self.record)})"
)
self.r.owner = self.owner
self.assertEqual(
str(self.r),
f"('0x{self.longhash_hex}', {str(self.record)}, '{self.owner}')"
)
def test_to_hash_rec_tuple(self):
self.r._hash_key = self.hash_key
self.assertEqual(self.r.to_hash_rec_tuple(),
("0x" + self.longhash_hex, self.record))
def test_to_full_tuple(self):
self.r._hash_key = self.hash_key
with self.assertRaises(ValueError):
self.r.to_full_tuple()
self.r.owner = self.owner
self.assertEqual(self.r.to_full_tuple(),
("0x" + self.longhash_hex, self.record, self.owner))
def test_equality(self):
r2 = Record(self.record, "Has-Owner")
r3 = Record([1, 2, 3, 4, 5], "Has-Owner")
self.assertTrue(self.r == self.r)
# different type
self.assertTrue(self.r != "Record")
# no owner on one side
self.assertTrue(self.r == r2)
self.assertTrue(r2 == self.r)
# both have owner
self.assertTrue(r2 == r2)
self.assertTrue(r2 != r3)
def test_round_s(self):
with self.assertRaises(ValueError):
round_s(1, -1)
for sign in [-1, 1]:
self.assertEqual(
sign * 0.00112,
round_s(sign * 0.00111999, 3)
)
self.assertEqual(
sign * 0.0111,
round_s(sign * 0.01111111, 3)
)
self.assertEqual(
sign * 0.111,
round_s(sign * 0.11111, 3)
)
self.assertEqual(
sign * 1.11,
round_s(sign * 1.11, 3)
)
self.assertEqual(
sign * 11.1,
round_s(sign * 11.11, 3)
)
self.assertEqual(
sign * 111,
round_s(sign * 111.11, 3)
)
self.assertEqual(
sign * 1110,
round_s(sign * 1111.11, 3)
)
self.assertEqual(
sign * 11100,
round_s(sign * 11111.11, 3)
)
self.assertEqual(
sign * 1.11,
round_s(sign * 1.11, 0)
)
self.assertEqual(
sign * 11.111111111,
round_s(sign * 11.111111111, 0)
)
def test_get_power(self):
with self.assertRaises(ValueError):
get_power(0)
| 7,512
| 31.244635
| 77
|
py
|
parameter-exchange
|
parameter-exchange-master/src/test/test_db_cli.py
|
#!/usr/bin/env python3
"""Test of User Database CLI.
Copyright (c) 2020.
Author: Erik Buchholz
Maintainer: Erik Buchholz
E-mail: buchholz@comsys.rwth-aachen.de
"""
import io
import logging
import os
import sys
from unittest import TestCase
from unittest.mock import patch
from lib import config
from lib.base_client import UserType
from lib.helpers import captured_output
with patch("lib.user_database.db"), patch("flask.Flask"):
from lib.db_cli import main
class DBCLITestWithoutDB(TestCase):
@classmethod
def setUpClass(cls) -> None:
"""Disable Logging"""
logging.getLogger().setLevel(logging.FATAL)
def test_main(self):
# no args
with self.assertRaises(SystemExit):
with captured_output():
main(UserType.CLIENT, [])
def test_wrong_action(self):
with self.assertRaises(SystemExit):
with captured_output():
main(UserType.CLIENT, ['-o'])
class DBCLITest(TestCase):
@classmethod
def setUpClass(cls) -> None:
"""Disable Logging"""
logging.getLogger().setLevel(logging.FATAL)
def setUp(self) -> None:
"""
Block output of argparse.
"""
text_trap = io.StringIO() # Block print of argparse
sys.stderr = text_trap
sys.stdout = text_trap
self.test_dir = config.DATA_DIR + "test/"
os.makedirs(self.test_dir, exist_ok=True)
@patch("lib.db_cli.user_db")
def test_list(self, d):
d.get_all_users.return_value = ['userA', 'userB', 'userC']
# list
with captured_output() as (out, err):
main("User", ['-l'], self.test_dir)
output = out.getvalue().strip()
self.assertIn("0: userA\n1: userB\n2: userC", output)
with captured_output() as (out, err):
main("User", ['--list'], self.test_dir)
output = out.getvalue().strip()
self.assertIn("0: userA\n1: userB\n2: userC", output)
@patch("lib.db_cli.user_db")
def test_log(self, d):
d.get_all_users.return_value = ['userA', 'userB', 'userC']
with captured_output() as (out, err):
main("User", ['-l'], self.test_dir, no_print=True)
output = out.getvalue().strip()
self.assertEqual("", output)
@patch("lib.db_cli.user_db")
def test_add(self, d):
# Add User - Fail
with captured_output() as (out, err):
main(UserType.CLIENT, ['-a', 'userD'], self.test_dir)
self.assertIn("User ID and Password have to defined.",
out.getvalue().strip())
d.add_user.assert_not_called()
# Add User - Fail
d.add_user.side_effect = ValueError("Test Error")
with captured_output() as (out, err):
main(UserType.CLIENT, [
'--add', 'userD', 'short'], self.test_dir)
self.assertIn("Test Error", out.getvalue().strip())
# Add User - Success
d.add_user.side_effect = None
with captured_output():
main(UserType.CLIENT, ['-a', 'userD',
'passwordD'], self.test_dir)
d.add_user.assert_called_with(
UserType.CLIENT, 'userD', 'passwordD')
@patch("lib.db_cli.user_db")
def test_get_token(self, u):
# Token - Fail
with self.assertLogs(level=logging.WARNING) as m:
main(UserType.CLIENT, ['-t', 'userA'], self.test_dir)
self.assertIn('User ID and Password have to defined.',
str(m.output))
# Token - Fail due to wrong password
u.verify_password.return_value = False
with captured_output() as (out, err):
main(UserType.CLIENT, ['--get_token', 'userA', 'wrong'],
self.test_dir)
self.assertIn('Incorrect password',
out.getvalue().strip())
# Token - Success
u.verify_password.return_value = True
with captured_output():
main(UserType.CLIENT, ['-t', 'userA',
'passwordA'], self.test_dir)
u.generate_token.assert_called_with(UserType.CLIENT, 'userA')
@patch("lib.db_cli.user_db")
def test_new_pw(self, d):
# New Password - Fail
with self.assertLogs(level=logging.WARNING) as m:
main(UserType.CLIENT, [
'-n', 'newPassword', 'userA'], self.test_dir)
self.assertIn('User ID and Password have to defined.',
str(m.output))
# New Password - Fail
d.update_password.side_effect = ValueError(
"Password needs to have at least 8 characters")
with self.assertLogs(level=logging.WARNING) as m:
main(UserType.CLIENT, ['--new', 'short', 'userA', 'passwordA'],
self.test_dir)
self.assertIn('Password needs to have at least 8 characters',
str(m.output))
# New Password - Success
d.update_password.side_effect = None
with captured_output() as (out, err):
main(UserType.CLIENT, ['-n', 'newPassword', 'userA', 'passwordA'],
self.test_dir)
self.assertTrue(d.verify_password(UserType.CLIENT, "userA",
"newPassword"))
@patch("lib.db_cli.user_db")
def test_verify_password(self, d):
# Verify Password - Fail
with self.assertLogs(level=logging.WARNING) as m:
main(UserType.CLIENT, ['--verify', 'userA'], self.test_dir)
self.assertIn('User ID and Password have to defined.',
str(m.output))
# Verify Password - Fail
d.verify_password.return_value = False
with captured_output() as (out, err):
main(UserType.CLIENT, ['--verify',
'userA', 'wrong'], self.test_dir)
self.assertIn('Password is not correct.',
out.getvalue().strip())
# Verify Password - Success
d.verify_password.return_value = True
with captured_output() as (out, err):
main(UserType.CLIENT, ['--verify',
'userA', 'passwordA'], self.test_dir)
self.assertIn('Credentials are correct.',
out.getvalue().strip())
@patch("lib.db_cli.user_db")
def test_verify_token(self, d):
# Verify Token - Fail
with self.assertLogs(level=logging.INFO) as m:
main(UserType.CLIENT, ['-s', "token"], self.test_dir)
self.assertIn('User ID has to be defined.',
str(m.output))
# Verify Token - Fail
d.verify_token.return_value = False
with captured_output() as (out, err):
main(UserType.CLIENT, ['--verify-token', 'wrong', 'userA', 'passwordA'],
self.test_dir)
self.assertIn('Bad Token.',
out.getvalue().strip())
# Verify Token - Success
d.verify_token.return_value = True
with captured_output() as (out, err):
main(UserType.CLIENT, ['-s', 'token', 'userA', 'passwordA'],
self.test_dir)
self.assertIn('Token correct. Token destroyed.',
out.getvalue().strip())
| 7,982
| 41.238095
| 88
|
py
|
parameter-exchange
|
parameter-exchange-master/src/test/test_similarity_metrics.py
|
#!/usr/bin/env python3
"""Test the similarity metrics.
Copyright (c) 2020.
Author: Erik Buchholz
Maintainer: Erik Buchholz
E-mail: buchholz@comsys.rwth-aachen.de
"""
import copy
import unittest
from unittest import TestCase
from unittest.mock import patch
import lib.similarity_metrics as sm
from lib.record import Record
class SimilarityMetricsTest(unittest.TestCase):
def test_map_metric(self):
with self.assertRaises(ValueError):
sm.map_metric("UNKNOWN")
func, args = sm.map_metric("absOffset-1")
self.assertEqual(sm.AbsoluteOffsetIterator, func)
self.assertEqual(
(1,), args)
func, args = sm.map_metric("absOffset-0.5")
self.assertEqual(sm.AbsoluteOffsetIterator, func)
self.assertEqual(
(0.5,), args)
func, args = sm.map_metric("relOffset-1")
self.assertEqual(sm.RelativeOffsetIterator, func)
self.assertEqual(
(1,), args)
func, args = sm.map_metric("relOffset-0.5")
self.assertEqual(sm.RelativeOffsetIterator, func)
self.assertEqual(
(0.5,), args)
func, args = sm.map_metric("wzl1")
self.assertEqual(sm.VariableOffsetIterator, func)
offsets = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1000, 0, 0]
self.assertEqual(
(offsets, True), args)
func, args = sm.map_metric("wzl2")
self.assertEqual(sm.VariableOffsetIterator, func)
offsets = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 400]
self.assertEqual(
(offsets, True), args)
def test_offset(self):
r = [2.0, 2.0, 3.0, 4.0]
self.assertEqual(1,
len(list(sm.AbsoluteOffsetIterator(r, 0, [3, 3], 2))))
self.assertEqual(441,
len(list(
sm.AbsoluteOffsetIterator(r, 0.1, [3, 3], 2))))
r2 = [2000.0, 20000.0, 3.0, 4.0]
self.assertEqual(3,
len(list(
sm.AbsoluteOffsetIterator(r2, 10, [3, 3], 2))))
r = [200.0, 200.0, 3.0, 4.0]
it = sm.RelativeOffsetIterator(r, 0.1, rounding_vec=[3, 3],
record_id_length=2)
self.assertEqual([(200.0, 200.0, 3.0, 4.0)], [i for i in it])
def test_abs_offset_iterator(self):
r = [1.0, 2.0, 3.0, 4.0]
it = sm.AbsoluteOffsetIterator(r, 0.1, rounding_vec=[3, 3],
record_id_length=2)
self.assertEqual(len(it), len([i for i in it]))
def test_rel_offset_iterator(self):
r = [2.0, 2.0, 3.0, 4.0]
it = sm.RelativeOffsetIterator(r, 5, rounding_vec=[3, 3],
record_id_length=2)
self.assertEqual(441, len([i for i in it]))
r = [20.0, 20.0, 3.0, 4.0]
it = sm.RelativeOffsetIterator(r, 0.5, rounding_vec=[3, 3],
record_id_length=2)
self.assertEqual(9, len([i for i in it]))
r = [200.0, 2.0, 3.0, 4.0]
it = sm.RelativeOffsetIterator(r, 0.05, rounding_vec=[3, 3],
record_id_length=2)
self.assertEqual([(200.0, 2.0, 3.0, 4.0)], [i for i in it])
def test_split(self):
r = [200.0, 2.0, 3.0, 4.0]
it = sm.RelativeOffsetIterator(r, 2, rounding_vec=[3, 3],
record_id_length=2)
l1 = [str(i) for i in it]
l = [str(i) for i in it]
with self.assertRaises(RuntimeError):
it.split(5)
it = sm.RelativeOffsetIterator(r, 2, rounding_vec=[3, 3],
record_id_length=2)
its = it.split(5)
self.assertGreaterEqual(len(its), 5)
l2 = []
for it in its:
l2.extend([str(i) for i in it])
self.assertEqual(sorted(l1), sorted(l2))
it = sm.RelativeOffsetIterator(r, 2, rounding_vec=[3, 3],
record_id_length=2)
its = it.split(5)
for i in its:
length = len(list(i))
self.assertIn(length, [9, 18])
# split on second
it = sm.AbsoluteOffsetIterator(r, 0.5, rounding_vec=[2, 2],
record_id_length=2)
it2 = copy.deepcopy(it)
l1 = [i for i in it2]
its = it.split(5)
self.assertEqual(4,
len(its))
l2 = []
for it in its:
l2.extend([i for i in it])
self.assertEqual(l1,
l2)
def test_repr(self):
r = [100.0, 1.0, 3.0, 4.0]
t = sm.AbsoluteOffsetIterator(r, 1)
self.assertEqual(
"<OffsetIterator from [99.0, 0.0, 2.0, 3.0] to [101.0, 2.0, 4.0, "
"5.0]>",
str(t)
)
@patch("lib.config.RECORD_LENGTH", 4)
def test_record_iterator(self):
r = [100.0, 1.0, 3.0, 4.0]
it = sm.RecordIterator([r], b"key")
res = [i for i in it]
self.assertEqual(
[Record(r, hash_key=b"key")],
res
)
@patch("lib.config.RECORD_LENGTH", 4)
def test_record_iterator_len(self):
r = [100.0, 1.0, 3.0, 4.0]
it = sm.RecordIterator([r], b"key")
self.assertEqual(1, len(it))
m = sm.RelativeOffsetIterator(r, 5, rounding_vec=[3, 3],
record_id_length=2)
it = sm.RecordIterator(m, b"key")
self.assertEqual(3136, len(it))
it = sm.RecordIterator(iter([1]), b"key")
self.assertEqual(0, len(it))
def test_comp_offset_num(self):
# Relative
r = [2.0, 2.0, 3.0, 4.0]
it = sm.RelativeOffsetIterator(r, 5, rounding_vec=[3, 3],
record_id_length=2)
self.assertEqual(441, sm.comp_offset_num(it))
r = [20.0, 20.0, 3.0, 4.0]
it = sm.RelativeOffsetIterator(r, 0.5, rounding_vec=[3, 3],
record_id_length=2)
self.assertEqual(9, sm.comp_offset_num(it))
# Absolute
r = [2.0, 2.0, 3.0, 4.0]
it = sm.AbsoluteOffsetIterator(r, 0.1, rounding_vec=[3, 3],
record_id_length=2)
self.assertEqual(441, sm.comp_offset_num(it))
def test_len(self):
r = [2.0, 2.0, 3.0, 4.0]
offset = 0.5
it = sm.RelativeOffsetIterator(r, offset, rounding_vec=[3, 3],
record_id_length=2)
self.assertEqual(
len([i for i in copy.deepcopy(it)]),
len(it)
)
self.assertEqual(
9, # 3 * 3
len(it)
)
offset = 5
it = sm.RelativeOffsetIterator(r, offset, rounding_vec=[3, 3],
record_id_length=2)
self.assertEqual(
len([i for i in copy.deepcopy(it)]),
len(it)
)
offset = 7
it = sm.RelativeOffsetIterator(r, offset, rounding_vec=[3, 3],
record_id_length=2)
self.assertEqual(
len([i for i in copy.deepcopy(it)]),
len(it)
)
# Test len with power hop
# Example of WZL data
r = [1, 2.2, 60.0, 20.0, 60.0, 20.0, 60.0, 20.0,
1, 1, 2, 22.5, 23.6, 30.2, 1, 1, 40.0, 165.0, 0.08]
rounding = [0, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 3, 3, 3, 0, 0, 3]
id_len = 17
offset = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 400]
it = sm.VariableOffsetIterator(r, offset, True, rounding, id_len)
self.assertEqual(
701,
len(it)
)
def test_compute_increment(self):
self.assertEqual(
0.1,
sm.compute_increment(44, 3)
)
self.assertEqual(
0.01,
sm.compute_increment(1, 3)
)
self.assertEqual(
10,
sm.compute_increment(1111, 3)
)
self.assertEqual(
0.1,
sm.compute_increment(-99, 3)
)
self.assertEqual(
1,
sm.compute_increment(-100, 3)
)
self.assertEqual(
0.0001,
sm.compute_increment(-0.03, 3)
)
self.assertEqual(
1,
sm.compute_increment(7, 0)
)
self.assertEqual(
1,
sm.compute_increment(10, 0)
)
self.assertEqual(
1,
sm.compute_increment(0.1, 0)
)
def test_copy(self):
r = [1.0, 1.0, 3.0, 4.0]
it = sm.RelativeOffsetIterator(r, 1, [2, 2], 2)
it2 = copy.copy(it)
self.assertEqual(len(it), len(it2))
self.assertEqual(list(it), list(it2))
def test_eval_offsets(self):
r = [float(i) for i in range(1, 101)]
rounding = [3 for _ in range(10)]
for offset in range(1, 6):
it = sm. RelativeOffsetIterator(r, offset, rounding, 10)
# self.assertGreaterEqual(10**9, len(it), f"Offset: {offset}")
@patch("lib.config.RECORD_ID_LENGTH", 5)
@patch("lib.config.ROUNDING_VEC", [2 for _ in range(5)])
class TestVariableOffsetIterator(TestCase):
target = [i for i in range(6)]
def test_error(self):
with self.assertRaises(ValueError) as e:
sm.VariableOffsetIterator(self.target, [1, 2])
def test_special_value_0(self):
it = sm.VariableOffsetIterator(self.target, [0 for _ in range(5)])
self.assertEqual(
1,
len(it)
)
self.assertEqual(
[tuple(self.target)],
[i for i in it]
)
def test_normal_case(self):
# Basically 2 identical Iterators
it = sm.VariableOffsetIterator(self.target, [10 for _ in range(5)])
it2 = sm.RelativeOffsetIterator(self.target, 10)
self.assertEqual(
len(it),
len(it2)
)
self.assertEqual(
list(it),
list(it2)
)
def test_manual_case(self):
with patch("lib.config.ROUNDING_VEC", [2 for _ in range(3)]),\
patch("lib.config.RECORD_ID_LENGTH", 3):
target = [2.0, 2.0, 4.0]
it = sm.VariableOffsetIterator(target, [5, 5, 2.5])
self.assertEqual(
{
(1.9, 2.0, 4.0),
(2.0, 2.0, 4.0),
(2.1, 2.0, 4.0),
(1.9, 1.9, 4.0),
(2.0, 1.9, 4.0),
(2.1, 1.9, 4.0),
(1.9, 2.1, 4.0),
(2.0, 2.1, 4.0),
(2.1, 2.1, 4.0),
(1.9, 2.0, 3.9),
(2.0, 2.0, 3.9),
(2.1, 2.0, 3.9),
(1.9, 1.9, 3.9),
(2.0, 1.9, 3.9),
(2.1, 1.9, 3.9),
(1.9, 2.1, 3.9),
(2.0, 2.1, 3.9),
(2.1, 2.1, 3.9),
(1.9, 2.0, 4.1),
(2.0, 2.0, 4.1),
(2.1, 2.0, 4.1),
(1.9, 1.9, 4.1),
(2.0, 1.9, 4.1),
(2.1, 1.9, 4.1),
(1.9, 2.1, 4.1),
(2.0, 2.1, 4.1),
(2.1, 2.1, 4.1),
},
set(it)
)
it = sm.VariableOffsetIterator(target, [5, 5, 2.5],
positive_only=True)
self.assertEqual(
{
(2.0, 2.0, 4.0),
(2.1, 2.0, 4.0),
(2.0, 2.1, 4.0),
(2.1, 2.1, 4.0),
(2.0, 2.0, 4.1),
(2.1, 2.0, 4.1),
(2.0, 2.1, 4.1),
(2.1, 2.1, 4.1),
},
set(it)
)
| 12,025
| 33.657061
| 79
|
py
|
parameter-exchange
|
parameter-exchange-master/src/test/test_integrity.py
|
#! /usr/bin/env python3
"""Integrity Test for client application.
Copyright (c) 2020.
Author: Erik Buchholz
Maintainer: Erik Buchholz
E-mail: buchholz@comsys.rwth-aachen.de
"""
import contextlib
import json
import logging
import os
import shutil
import warnings
from typing import List
from unittest import TestCase
from unittest.mock import patch, Mock
from requests_flask_adapter import Session
import client
import data_provider
import key_server
import storage_server
from lib import config
from lib.helpers import to_base64
from lib.key_server_backend import KeyServer
from lib.record import Record
from lib.storage_server_backend import StorageServer
from lib.user_database import Client, Owner, db as user_db
test_dir = config.DATA_DIR + "test/"
loglvl = logging.ERROR
def mock_verify(pwhash, password):
return pwhash == "pwd-hash"
def mock_insert(records):
StorageServer(test_dir).batch_store_records_bloom(records)
return Mock()
@patch("lib.config.EVAL", False)
@patch("lib.config.RECORD_ID_LENGTH", 2)
@patch("lib.config.ROUNDING_VEC", [3, 3])
@patch("lib.config.RECORD_LENGTH", 5)
@patch("lib.config.OT_TLS", True)
@patch("lib.config.BLOOM_CAPACITY", 100)
@patch("lib.config.BLOOM_ERROR_RATE", 10 ** -5)
@patch("lib.config.OT_SETSIZE", 10)
@patch("lib.config.PSI_SETSIZE", 450) # 441 Candidates
@patch("key_server.connector.execute_ot", Mock())
@patch("storage_server.client.execute_psi", Mock())
@patch("storage_server.connector.insert_bloom.delay", mock_insert)
@patch("lib.user_database.generate_password_hash",
Mock(return_value="pwd-hash"))
@patch("lib.user_database.check_password_hash", mock_verify)
@patch("lib.database.add_task", Mock())
class ClientIntegrityTest(TestCase):
"""
Full integrity test, only OT and PSI mocked.
"""
target = [1.0, 2.0, 3.0, 4.0, 5.5]
# Server Records:
enc_keys = [
b'\x1b\x8fL+\xd2\xfcLQ\x1a\x03:\xcf\x15\x8a\xc7+'
for _ in range(10)
]
enc_keys_int = [int.from_bytes(i, 'big') for i in enc_keys]
user = "testuser"
provider = "testprovider"
password = "password"
test_config = {
'TESTING': True,
'DATA_DIR': test_dir,
'CELERY_ALWAYS_EAGER': True # Celery synchronous execution
}
@classmethod
@patch("lib.config.OT_SETSIZE", 10)
@patch("lib.config.RECORD_LENGTH", 5)
@patch("lib.config.RECORD_ID_LENGTH", 2)
@patch("lib.config.ROUNDING_VEC", [3, 3])
def setUpClass(cls) -> None:
"""Disable logging."""
logging.getLogger().setLevel(loglvl)
warnings.filterwarnings("ignore", category=ResourceWarning)
warnings.filterwarnings("ignore", category=ImportWarning)
# Clear directory
shutil.rmtree(test_dir, ignore_errors=True)
os.makedirs(test_dir, exist_ok=True)
# Records
cls.sr: List[Record] = [
Record([1.1, 2.01, 3.3, 4.4, 5.5]), # Match
Record([1.5, 4.4, 3.9, 5.0, 5.5]), # No Match
Record([1.0, 7.0, 3.0, 4.0, 5.5]), # No Match
Record([1.0, 2.0, 10.6, 10.0, 5.5]), # Match
Record([3.0, 2.0, 3.0, 4.0, 5.5]), # No Match
Record([1.1, 2.104, 5, 9, 5.5]), # Match
Record([2.0, 2.0, 3.0, 4.0, 5.5]) # No Match
]
cls.matches = [
cls.sr[0],
cls.sr[3],
cls.sr[5]
]
# Generate hash and encryption keys
key_backend = KeyServer(test_dir)
cls.hash_key = key_backend._hash_key
for r in cls.sr:
r.set_hash_key(cls.hash_key)
@classmethod
def tearDownClass(cls) -> None:
"""Remove Test files."""
shutil.rmtree(test_dir, ignore_errors=True)
@patch("lib.config.OT_TLS", True)
@patch("lib.config.BLOOM_CAPACITY", 100)
@patch("lib.config.BLOOM_ERROR_RATE", 10 ** -5)
def setUp(self) -> None:
with contextlib.suppress(FileNotFoundError):
os.remove(test_dir + config.BLOOM_FILE)
os.remove(test_dir + config.STORAGE_DB)
os.remove(test_dir + config.KEYSERVER_DB)
# Flask apps
self.str_app = storage_server.create_app(
self.test_config,
logging_level=loglvl)
self.key_app = key_server.create_app(self.test_config,
logging_level=loglvl)
Session.register('https://localhost:5000', self.key_app)
Session.register('https://localhost:5001', self.str_app)
# Create Fake users for Key Server
with self.key_app.app_context():
c = Client(username=self.user, password="pwd-hash")
p = Owner(username=self.provider, password="pwd-hash")
user_db.session.add(c)
user_db.session.add(p)
user_db.session.commit()
# Create Fake users for Storage Server
with self.str_app.app_context():
c = Client(username=self.user, password="pwd-hash")
p = Owner(username=self.provider, password="pwd-hash")
user_db.session.add(c)
user_db.session.add(p)
user_db.session.commit()
@patch("lib.storage_server_backend.StorageServer._add_to_billing_db",
Mock())
@patch("lib.config.PSI_MODE", False)
def test_client_integrity_bpe(self):
# Full Integrity Test including flask
self.c = client.Client(self.user)
self.c.set_password(self.password)
self.c.metric = "offset-0.1"
# Redirect requests to flask
target = self.target
# Server Records:
matches: List[Record] = self.matches
str_backend = StorageServer(test_dir)
with self.str_app.app_context():
for m in matches:
str_backend.store_record(
to_base64(m.get_long_hash()),
json.dumps(m.get_encrypted_record(self.enc_keys[0])),
'OwnerA'
)
# PSI Matches
psi_matches = []
for m in matches:
psi_matches.append(m.get_psi_index())
s = Session(True)
with patch("requests.get", s.get), \
patch("requests.post", s.post), \
patch.object(self.c, "_receive_ots",
Mock(return_value=self.enc_keys_int[:3])):
res = self.c.full_retrieve(target)
# Set hash key for comparison
for r in res:
r.set_hash_key(self.hash_key)
# Compare without order
for m in matches:
self.assertIn(m, res)
for r in res:
self.assertIn(r, matches)
@patch("lib.storage_server_backend.StorageServer._add_to_billing_db",
Mock())
@patch("lib.config.PSI_MODE", True)
def test_client_integrity_psi(self):
# Full Integrity Test including flask
self.c = client.Client(self.user)
self.c.set_password(self.password)
self.c.metric = "offset-0.1"
# Redirect requests to flask
target = self.target
# Server Records:
matches: List[Record] = self.matches
str_backend = StorageServer(test_dir)
with self.str_app.app_context():
for m in matches:
str_backend.store_record(
to_base64(m.get_long_hash()),
json.dumps(m.get_encrypted_record(self.enc_keys[0])),
'OwnerA'
)
# PSI Matches
psi_matches = []
for m in matches:
psi_matches.append(m.get_psi_index())
s = Session(True)
with patch("requests.get", s.get), \
patch("requests.post", s.post), \
patch.object(self.c, "_receive_psi",
Mock(return_value=psi_matches)), \
patch.object(self.c, "_receive_ots",
Mock(return_value=self.enc_keys_int[:3])):
res = self.c.full_retrieve(target)
# Set hash key for comparison
for r in res:
r.set_hash_key(self.hash_key)
# Compare without order
for m in matches:
self.assertIn(m, res)
for r in res:
self.assertIn(r, matches)
@patch("lib.storage_server_backend.StorageServer._add_to_billing_db",
Mock())
@patch("lib.storage_server_backend.StorageServer._add_to_transaction_db",
Mock())
@patch("lib.storage_server_backend.get_user",
Mock())
def test_data_provider_int(self):
# Full integrity Test including flask
self.dp = data_provider.DataProvider(self.provider)
self.dp.set_password(self.password)
str_backend = StorageServer(test_dir)
with self.str_app.app_context():
for r in self.sr:
# check that bloom filter is empty
b = str_backend.bloom
self.assertNotIn(to_base64(r.get_long_hash()), b)
# Check that DB empty
res = str_backend.batch_get_records(
[to_base64(r.get_long_hash()) for r in self.sr],
"client"
)
# Decrypt
result = [
Record.from_ciphertext(json.loads(r), self.enc_keys[0])
for h, r in res
]
self.assertEqual([], result)
s = Session(True)
with patch("requests.get", s.get), \
patch("requests.post", s.post), \
patch.object(self.dp, "_receive_ots",
Mock(return_value=self.enc_keys_int[:len(self.sr)])):
self.dp.store_records(self.sr)
str_backend = StorageServer(test_dir)
with self.str_app.app_context():
for r in self.sr:
# check that records are in bloom filter
b = str_backend.bloom
self.assertIn(to_base64(r.get_long_hash()), b)
# Check records in db
res = str_backend.batch_get_records(
[to_base64(r.get_long_hash()) for r in self.sr],
"client"
)
# Decrypt
result = [
Record.from_ciphertext(json.loads(r), self.enc_keys[0])
for h, r in res
]
for m in self.sr:
self.assertIn(m, result)
for r in result:
self.assertIn(r, self.sr)
| 10,326
| 34.245734
| 79
|
py
|
parameter-exchange
|
parameter-exchange-master/src/test/test_psi.py
|
#!/usr/bin/env python3
"""Test Cython Port of libPSI.
Copyright (c) 2020.
Author: Erik Buchholz
Maintainer: Erik Buchholz
E-mail: buchholz@comsys.rwth-aachen.de
"""
import multiprocessing
import sys
from unittest import TestCase, skip
from lib import config
# Python Version of libPSI
from lib.helpers import get_free_port
sys.path.append(config.WORKING_DIR + 'cython/psi')
# noinspection PyUnresolvedReferences
from cPSIInterface import PyPSISender # noqa
# noinspection PyUnresolvedReferences
from cPSIInterface import PyPSIReceiver # noqa
# Constants
SETSIZE = 10
HOSTNAME = "127.0.0.1"
class PSITest(TestCase):
server_set = list(range(SETSIZE))
client_set = [i + SETSIZE * (i % 2) for i in server_set]
result_set = [i for i in server_set if i % 2 == 0]
def setUp(self) -> None:
"""Create receiver and Sender."""
self.port = get_free_port()
self.recv = self.get_receiver(self.port)
self.sender = self.get_sender(self.port)
def psi_sender(self, scheme: str, tls: bool): # pragma no cover
"""Act as PSI Sender to test receivers."""
sender = self.get_sender(self.port)
sender.tls = tls
sender.execute(scheme, self.server_set)
def psi_receiver(self, queue: multiprocessing.Queue, scheme: str,
tls: bool): # pragma no cover
"""Act as PSI Receiver to test senders."""
receiver = self.get_receiver(self.port)
receiver.tls = tls
r = receiver.execute(scheme, self.client_set)
queue.put(r)
@classmethod
def get_sender(cls, port) -> PyPSISender:
"""Return a configured PSISender."""
sender = PyPSISender()
sender.setSize = SETSIZE
sender.hostName = HOSTNAME
sender.port = port
sender.numThreads = 1
sender.serverCert = config.KEY_TLS_CERT
sender.serverKey = config.KEY_TLS_KEY
return sender
@classmethod
def get_receiver(cls, port) -> PyPSIReceiver:
"""Return a configured PSIReceiver."""
recv = PyPSIReceiver()
recv.setSize = SETSIZE
recv.hostName = HOSTNAME
recv.port = port
recv.numThreads = 1
recv.rootCA = config.TLS_ROOT_CA
return recv
def test_KKRT16_recv(self):
tls = False
self.recv.tls = tls
scheme = "KKRT16"
p = multiprocessing.Process(target=self.psi_sender,
args=(scheme, tls))
p.start()
res = self.recv.execute(scheme, self.client_set)
p.join()
self.assertEqual(set(res), set(self.result_set))
def test_KKRT16_recv_tls(self):
tls = True
self.recv.tls = tls
scheme = "KKRT16"
p = multiprocessing.Process(target=self.psi_sender,
args=(scheme, tls))
p.start()
res = self.recv.execute(scheme, self.client_set)
p.join()
self.assertEqual(set(res), set(self.result_set))
@skip("Very slow")
def test_RR16_recv(self): # pragma no cover
tls = False
self.recv.tls = tls
scheme = "RR16"
p = multiprocessing.Process(target=self.psi_sender,
args=(scheme, tls))
p.start()
res = self.recv.execute(scheme, self.client_set)
p.join()
self.assertEqual(set(res), set(self.result_set))
@skip("Very slow")
def test_RR16_recv_tls(self): # pragma no cover
tls = True
self.recv.tls = tls
scheme = "RR16"
p = multiprocessing.Process(target=self.psi_sender,
args=(scheme, tls))
p.start()
res = self.recv.execute(scheme, self.client_set)
p.join()
self.assertEqual(set(res), set(self.result_set))
@skip("Failes for large sets.")
def test_RR17_recv(self): # pragma no cover
tls = False
self.recv.tls = tls
scheme = "RR17"
p = multiprocessing.Process(target=self.psi_sender,
args=(scheme, tls))
p.start()
res = self.recv.execute(scheme, self.client_set)
p.join()
self.assertEqual(set(res), set(self.result_set))
@skip("Not used in project.")
def test_RR17_recv_tls(self): # pragma no cover
tls = True
self.recv.tls = tls
scheme = "RR17"
p = multiprocessing.Process(target=self.psi_sender,
args=(scheme, tls))
p.start()
res = self.recv.execute(scheme, self.client_set)
p.join()
self.assertEqual(set(res), set(self.result_set))
@skip("Implicitelly tested via receive above.")
def test_KKRT16_send(self): # pragma no cover
tls = False
self.sender.tls = tls
scheme = "KKRT16"
q = multiprocessing.Queue()
p = multiprocessing.Process(target=self.psi_receiver, args=(q,
scheme,
tls))
p.start()
self.sender.execute(scheme, self.server_set)
p.join()
result = q.get()
self.assertEqual(set(result), set(self.result_set))
@skip("Implicitelly tested via receive above.")
def test_KKRT16_send_tls(self): # pragma no cover
tls = True
self.sender.tls = tls
scheme = "KKRT16"
q = multiprocessing.Queue()
p = multiprocessing.Process(target=self.psi_receiver, args=(q,
scheme,
tls))
p.start()
self.sender.execute(scheme, self.server_set)
p.join()
result = q.get()
self.assertEqual(set(result), set(self.result_set))
@skip("Very slow and not needed in production code.")
def test_RR16_send(self): # pragma no cover
tls = False
self.sender.tls = tls
scheme = "RR16"
q = multiprocessing.Queue()
p = multiprocessing.Process(target=self.psi_receiver, args=(q,
scheme,
tls))
p.start()
self.sender.execute(scheme, self.server_set)
p.join()
result = q.get()
self.assertEqual(set(result), set(self.result_set))
@skip("Very slow and not needed in production code.")
def test_RR16_send_tls(self): # pragma no cover
tls = True
self.sender.tls = tls
scheme = "RR16"
q = multiprocessing.Queue()
p = multiprocessing.Process(target=self.psi_receiver, args=(q,
scheme,
tls))
p.start()
self.sender.execute(scheme, self.server_set)
p.join()
result = q.get()
self.assertEqual(set(result), set(self.result_set))
@skip("Not used in project.")
def test_RR17_send(self): # pragma no cover
tls = False
self.sender.tls = tls
scheme = "RR17"
q = multiprocessing.Queue()
p = multiprocessing.Process(target=self.psi_receiver, args=(q,
scheme,
tls))
p.start()
self.sender.execute(scheme, self.server_set)
p.join()
result = q.get()
self.assertEqual(set(result), set(self.result_set))
@skip("Not used in project.")
def test_RR17_send_tls(self): # pragma no cover
tls = True
self.sender.tls = tls
scheme = "RR17"
q = multiprocessing.Queue()
p = multiprocessing.Process(target=self.psi_receiver, args=(q,
scheme,
tls))
p.start()
self.sender.execute(scheme, self.server_set)
p.join()
result = q.get()
self.assertEqual(set(result), set(self.result_set))
| 8,299
| 34.021097
| 75
|
py
|
parameter-exchange
|
parameter-exchange-master/src/test/test_ot_receiver.py
|
#!/usr/bin/env python3
"""Test Class for OTReceiver
Copyright (c) 2020.
Author: Erik Buchholz
Maintainer: Erik Buchholz
E-mail: buchholz@comsys.rwth-aachen.de
"""
import io
import logging
import sys
from unittest import TestCase, mock
from unittest.mock import Mock
import OTReceiver
from lib.helpers import captured_output
class OTReceiverTest(TestCase):
m = Mock()
m2 = Mock(return_value=m)
@classmethod
def setUpClass(cls) -> None:
"""Disable logging"""
logging.getLogger().setLevel(logging.FATAL)
cls.m.execute.return_value = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1]
def setUp(self) -> None:
"""Trap print output"""
text_trap = io.StringIO() # Block print of argparse
sys.stderr = text_trap
def test_main(self):
with self.assertRaises(SystemExit):
with captured_output():
OTReceiver.main([])
@mock.patch("OTReceiver.PyOTReceiver", m2)
def test_receiving(self):
total_ots = 10
res = OTReceiver.main([str(total_ots)])
self.assertEqual(res, [10, 9, 8, 7, 6, 5, 4, 3, 2, 1])
@mock.patch("OTReceiver.PyOTReceiver", m2)
def test_different_port(self):
total_ots = 10
port = 50000
res = OTReceiver.main([str(total_ots), "-p", "50000"])
self.assertEqual(res, [10, 9, 8, 7, 6, 5, 4, 3, 2, 1])
self.assertEqual(self.m.port, port)
| 1,413
| 25.679245
| 68
|
py
|
parameter-exchange
|
parameter-exchange-master/src/test/test_psi_receiver.py
|
#!/usr/bin/env python3
"""Test Class for PSIReceiver.
Copyright (c) 2020.
Author: Erik Buchholz
Maintainer: Erik Buchholz
E-mail: buchholz@comsys.rwth-aachen.de
"""
import io
import logging
import sys
from unittest import TestCase, mock
from unittest.mock import Mock
import PSIReceiver
from lib import config
@mock.patch("lib.config.PSI_SETSIZE", 20)
class PSIReceiverTest(TestCase):
m = Mock()
m2 = Mock(return_value=m)
@classmethod
def setUpClass(cls) -> None:
"""Disable logging"""
logging.getLogger().setLevel(logging.FATAL)
cls.m.execute.return_value = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1]
def setUp(self) -> None:
"""Trap print output"""
text_trap = io.StringIO() # Block print of argparse
sys.stderr = text_trap
@mock.patch("PSIReceiver.PyPSIReceiver", m2)
def test_receiving(self):
res = PSIReceiver.main(["55"])
self.assertEqual(res, [10, 9, 8, 7, 6, 5, 4, 3, 2, 1])
self.m.execute.assert_called_with("KKRT16",
list(range(55)))
@mock.patch("PSIReceiver.PyPSIReceiver", m2)
def test_different_port(self):
port = 50000
res = PSIReceiver.main(["-p", "50000"])
self.assertEqual(res, [10, 9, 8, 7, 6, 5, 4, 3, 2, 1])
self.assertEqual(self.m.port, port)
self.m.execute.assert_called_with("KKRT16",
list(range(config.PSI_SETSIZE)))
@mock.patch("PSIReceiver.PyPSIReceiver", m2)
def test_hostname(self):
res = PSIReceiver.main(["-n", "localhost"])
self.assertEqual(res, [10, 9, 8, 7, 6, 5, 4, 3, 2, 1])
self.assertEqual(self.m.hostName, "localhost")
self.m.execute.assert_called_with("KKRT16",
list(range(config.PSI_SETSIZE)))
@mock.patch("PSIReceiver.PyPSIReceiver", m2)
def test_scheme(self):
res = PSIReceiver.main(["-s", "RR17"])
self.assertEqual(res, [10, 9, 8, 7, 6, 5, 4, 3, 2, 1])
self.m.execute.assert_called_with("RR17",
list(range(config.PSI_SETSIZE)))
| 2,159
| 32.230769
| 74
|
py
|
parameter-exchange
|
parameter-exchange-master/src/test/test_main_scripts.py
|
#!/usr/bin/env python3
"""Hacky test for coverage of main files.
Copyright (c) 2020.
Author: Erik Buchholz
Maintainer: Erik Buchholz
E-mail: buchholz@comsys.rwth-aachen.de
"""
import os
import tempfile
import random_record_generator
import setuptools
from unittest import TestCase, mock, skip
import logging
from unittest.mock import patch
class TestMain(TestCase):
def setUp(self) -> None:
logging.getLogger().setLevel(logging.ERROR)
def test_record_gen(self):
f = tempfile.NamedTemporaryFile(delete=False)
random_record_generator.main(["1", "-o", f"{f.name}"])
os.remove(f.name)
def test_setup(self):
import warnings
warnings.filterwarnings("ignore", category=ImportWarning)
warnings.filterwarnings("ignore", category=DeprecationWarning)
with mock.patch.object(setuptools, "setup"):
import setup
#self.assertIn("parameter-exchange", setup.readme)
self.assertIn("flask", setup.requirements)
@skip("Slow an trivial.")
@patch("lib.config.LOGLEVEL", logging.ERROR)
def test_celery(self): # pragma no cover
import key_server.celery as c1
import storage_server.celery as c2
self.assertEqual('key_server', c1.app.name)
self.assertEqual('storage_server', c2.app.name)
| 1,327
| 28.511111
| 70
|
py
|
parameter-exchange
|
parameter-exchange-master/src/test/test_storage_server.py
|
#!/usr/bin/env python3
"""Test of storage server backend.
Copyright (c) 2020.
Author: Erik Buchholz
Maintainer: Erik Buchholz
E-mail: buchholz@comsys.rwth-aachen.de
"""
import logging
import os
import shutil
from unittest import TestCase, mock
from unittest.mock import Mock, patch
from flask import Flask
from pybloomfilter import BloomFilter
import lib.config as config
import lib.helpers as helpers
import lib.storage_server_backend as server
from lib.record import Record
l1 = [
('a', 'ciphertext1', 'owner'),
('b', 'ciphertext2', 'owner'),
('c', 'ciphertext3', 'owner'),
('d', 'ciphertext4', 'owner'),
('e', 'ciphertext5', 'owner'),
('f', 'ciphertext6', 'owner'),
('g', 'ciphertext7', 'owner'),
]
l2 = ['a', 'b', 'c', 'd', 'e', 'f', 'g']
l3 = ['h', 'i', 'j', 'k', 'l', 'm']
test_dir = config.DATA_DIR + "test/"
mock_app = Flask(__name__)
mock_app.config.from_mapping(
SQLALCHEMY_DATABASE_URI=f"sqlite:///{test_dir}/{config.STORAGE_DB}",
SQLALCHEMY_TRACK_MODIFICATIONS=False
)
@patch("lib.config.BLOOM_CAPACITY", 20)
@patch("lib.config.BLOOM_ERROR_RATE", 0.01)
@patch("lib.config.RECORD_LENGTH", 5)
class StorageServerTest(TestCase):
storage_db_path = test_dir + config.STORAGE_DB
bloom_path = test_dir + config.BLOOM_FILE
def setUp(self) -> None:
"""Create testing directories."""
logging.getLogger().setLevel(logging.FATAL)
shutil.rmtree(test_dir, ignore_errors=True)
os.makedirs(test_dir)
@classmethod
def tearDownClass(cls) -> None:
"""Remove Testing directories"""
shutil.rmtree(test_dir, ignore_errors=True)
def test_bloom(self):
s = server.StorageServer(test_dir)
self.assertFalse(os.path.exists(self.bloom_path))
with patch.object(s, "_initialize_bloom_filter") as m:
b = s.bloom
m.assert_called_once()
b = BloomFilter(2, .1, self.bloom_path)
b.add(5)
c = s.bloom
m.assert_called_once() # No second call
self.assertIn(5, c)
def test_init(self):
# Is Directory created?
os.rmdir(test_dir)
self.assertFalse(os.path.exists(test_dir))
server.StorageServer(test_dir)
self.assertTrue(os.path.exists(test_dir))
@patch("lib.storage_server_backend.StoredRecord")
def test_initialize_bloom_filter(self, m):
m.query.all.return_value = []
s = server.StorageServer(test_dir)
# Update with contents from DB
mock_records = []
for l in l2:
mo = Mock()
mo.hash = l
mock_records.append(mo)
m.query.all.return_value = mock_records
s._initialize_bloom_filter()
b = s.bloom
for e in l2:
self.assertIn(e, b)
for e in l3:
self.assertNotIn(e, b)
def test_store_record(self):
s = server.StorageServer(test_dir)
BloomFilter(20, 0.1, self.bloom_path) # create bloom filter
self.assertNotIn('a', s.bloom)
with patch("lib.storage_server_backend.db") as db:
s.store_record('a', 'record', 'owner')
# Check bloom filter
self.assertIn('a', s.bloom)
# check db
db.session.add.assert_called_once()
def test_batch_store_records_db(self):
with patch("lib.storage_server_backend.db") as db:
server.StorageServer.batch_store_records_db(l1)
# check db
self.assertEqual(len(l2), db.session.add.call_count)
def test_batch_store_records_bloom(self):
s = server.StorageServer(test_dir)
BloomFilter(20, 0.1, self.bloom_path) # create bloom filter
b = s.bloom
for e in l2:
self.assertNotIn(e, b)
s.batch_store_records_bloom(l1)
# Check bloom filter
for e in l2:
self.assertIn(e, b)
def test_get_record(self):
with patch.object(server.StorageServer, "batch_get_records") as c:
server.StorageServer.get_record("hash", "client")
c.assert_called_once_with(["hash"], "client")
with self.assertRaises(ValueError) as e:
with patch.object(server.StorageServer,
"batch_get_records",
return_value=[]):
server.StorageServer.get_record('hash', "client")
self.assertEqual(str(e.exception), "No record for hash exists: 'hash'")
@patch("lib.storage_server_backend.StorageServer._add_to_billing_db",
Mock())
@patch("lib.storage_server_backend.StorageServer._add_to_transaction_db",
Mock())
@patch("lib.storage_server_backend.get_user",
Mock())
def test_batch_get_record(self):
server.db.init_app(mock_app)
with mock_app.test_request_context(), \
patch.object(server.StorageServer, "bloom", new_callable=Mock()):
server.db.create_all()
s = server.StorageServer(test_dir)
s.batch_store_records_db(l1)
s.batch_store_records_bloom(l1)
res = s.batch_get_records(l2[::2], "client")
self.assertEqual(
[(h, r) for (h, r, o) in l1[::2]],
res
)
def test_get_bloom_filter(self):
s = server.StorageServer(test_dir)
b = BloomFilter(20, 0.01, self.bloom_path) # create bloom filter
b.update(l2)
b64 = s.get_bloom_filter()
b = BloomFilter.from_base64(f"{test_dir}bloom-test.bloom", b64)
for e in l2:
self.assertIn(e, b)
for e in l3:
self.assertNotIn(e, b)
def test_offer_psi(self):
port = 5555
s = server.StorageServer()
set = list(range(20))
m = Mock()
with mock.patch("lib.storage_server_backend.PyPSISender",
return_value=m):
with mock.patch.object(server.StorageServer,
"get_all_record_psi_hashes",
return_value=set):
s.offer_psi(22, port)
self.assertEqual(config.PSI_SCHEME, m.execute.call_args[0][0])
self.assertEqual(set, m.execute.call_args[0][1])
set = list(range(100))
mock.patch.object(s, "get_all_record_psi_hashes", return_value=set)
with self.assertRaises(RuntimeError):
with mock.patch.object(server.StorageServer,
"get_all_record_psi_hashes",
return_value=set):
s.offer_psi(22, port=port)
def test_get_all_record_psi_hashes(self):
records = []
correct = []
for i in range(10):
r = Record([1, 2, 3, 4, 5])
r.set_hash_key(b'fake_key')
m = Mock()
m.hash = helpers.to_base64(r.get_long_hash())
records.append(m)
correct.append(r.get_psi_index())
with patch("lib.storage_server_backend.StoredRecord") as c:
c.query.all.return_value = records
s = server.StorageServer()
res = s.get_all_record_psi_hashes()
self.assertEqual(correct, res)
@patch("lib.storage_server_backend.RecordRetrieval")
@patch("lib.storage_server_backend.db")
def test__add_to_transaction_db(self, db, RecordRetrieval):
r1 = Record([1, 2, 3, 4, 5])
r2 = Record([1.1, 2, 3, 4, 5])
r3 = Record([1, 2.2, 3, 4, 5])
r4 = Record([1.1, 2.2, 3, 4, 5])
r5 = Record([1.2, 2.22, 3, 4, 5])
recs = [r1, r2, r3, r4, r5]
for r in recs:
r.set_hash_key(b'hash-key')
hashes = [helpers.to_base64(r.get_long_hash()) for r in recs]
records = [Mock() for _ in range(5)]
records[0].hash = helpers.to_base64(r1.get_long_hash())
records[1].hash = helpers.to_base64(r1.get_long_hash()) # Same
records[2].hash = helpers.to_base64(r3.get_long_hash())
records[3].hash = helpers.to_base64(r4.get_long_hash())
records[4].hash = helpers.to_base64(r5.get_long_hash())
server.StorageServer._add_to_transaction_db(records, "client", hashes)
self.assertEqual(1, RecordRetrieval.call_count) # 2 owners
expected = {
"client": "client",
"enc_keys_by_hash": 5,
"enc_keys_by_records": 4
}
self.assertEqual(expected, RecordRetrieval.call_args[1])
@patch("lib.storage_server_backend.Owner")
@patch("lib.storage_server_backend.BillingInfo")
@patch("lib.storage_server_backend.db")
def test__add_to_billing_db(self, db: Mock, binfo: Mock, owner: Mock):
mockA = Mock()
mockA.owner = "OwnerA"
mockA.username = "OwnerA"
mockB = Mock()
mockB.owner = "OwnerB"
mockB.username = "OwnerB"
t_mock = "transaction"
records = [mockA for _ in range(5)]
for _ in range(3):
records.append(mockB)
owner.query.filter_by.return_value.first = Mock(
side_effect=[mockA, mockB, None])
server.StorageServer._add_to_billing_db(records, "client", t_mock)
self.assertEqual(2, binfo.call_count) # 2 owners
expected = [
({
"provider": mockA,
"count": 5,
"client": "client",
"transaction": "transaction"
},),
({
"provider": mockB,
"count": 3,
"client": "client",
"transaction": "transaction"
},)
]
self.assertEqual(expected, binfo.call_args_list)
with self.assertRaises(ValueError):
server.StorageServer._add_to_billing_db(records, "client", t_mock)
| 9,799
| 35.70412
| 79
|
py
|
parameter-exchange
|
parameter-exchange-master/src/test/test_storage_app.py
|
#!/usr/bin/env python3
"""Test storage server frontend flask application.
Copyright (c) 2020.
Author: Erik Buchholz
Maintainer: Erik Buchholz
E-mail: buchholz@comsys.rwth-aachen.de
"""
import logging
import os
import shutil
import warnings
from datetime import datetime, timedelta
from unittest import TestCase, skip
from unittest.mock import patch, Mock
from flask import g, current_app
import storage_server
from lib import config
from lib.base_client import UserType
from lib.database import Task
from lib.helpers import generate_auth_header
from storage_server import connector, client
from storage_server.connector import TaskType
test_dir = config.DATA_DIR + "test/"
correct_user = 'correct_user'
correct_pw = "correct_pw"
correct_tk = 'correct_token'
test_config = {
'TESTING': True,
'DATA_DIR': test_dir
}
def mock_verify_token(user_type, user, token):
"""Inexpensive mock version of verify token."""
if 'LOGIN_DISABLED' in current_app.config and current_app.config[
'LOGIN_DISABLED']:
return True
return user == correct_user and token == correct_tk and user_type in [
UserType.OWNER, UserType.CLIENT
]
def mock_verify_pw(user, pw):
"""Inexpensive mock version of verify password."""
return user == correct_user and pw == correct_pw
@patch("lib.config.BLOOM_CAPACITY", 100)
@patch("lib.config.BLOOM_ERROR_RATE", 10 ** -5)
class StorageAppTest(TestCase):
user = correct_user
tk = correct_tk
pw = correct_pw
@classmethod
@patch("lib.config.BLOOM_CAPACITY", 100)
@patch("lib.config.BLOOM_ERROR_RATE", 10 ** -5)
def setUpClass(cls) -> None:
"""Disable logging, create test app and pre-generate auth headers."""
logging.getLogger().setLevel(logging.FATAL)
shutil.rmtree(test_dir, ignore_errors=True)
os.makedirs(test_dir, exist_ok=True)
cls.auth_header = generate_auth_header(correct_user, correct_tk)
cls.auth_header_cor_pw = generate_auth_header(correct_user, correct_pw)
cls.auth_header_wrong_user = generate_auth_header("wrong",
correct_tk)
cls.auth_header_wrong_pw = generate_auth_header(correct_user,
"wrong")
cls.app = storage_server.create_app(
test_config,
logging_level=logging.FATAL,
data_dir=test_dir)
cls.client = cls.app.test_client()
@classmethod
def tearDownClass(cls) -> None:
"""Remove tempfiles."""
shutil.rmtree(test_dir, ignore_errors=True)
def setUp(self) -> None:
"""Enable log-in."""
self.app.config.update(LOGIN_DISABLED=False)
# -------------------------------------------------------------------------
# main.py------------------------------------------------------------------
@patch("storage_server.main.is_redis_online", Mock(return_value=False))
@patch("storage_server.main.render_template", Mock(return_value="Text"))
@patch("storage_server.main.is_celery_online", Mock(return_value=False))
def test_main_true(self):
res = self.client.get('/')
self.assertEqual(res.status_code, 200)
self.assertEqual(b"Text", res.data)
@patch("storage_server.celery_app")
def test_celery_status(self, m):
m.control.inspect.return_value.ping.return_value = 1
res = self.client.get('/celery')
self.assertEqual(res.status_code, 200)
self.assertEqual(b"True", res.data)
m.control.inspect.return_value.ping.return_value = None
res = self.client.get('/celery')
self.assertEqual(res.status_code, 200)
self.assertEqual(b"False", res.data)
@patch("storage_server.main.is_redis_online", Mock(return_value=False))
@patch("storage_server.main.render_template", Mock(return_value="Text"))
@patch("storage_server.main.is_celery_online", Mock(return_value=False))
def test_main_false(self):
res = self.client.get('/')
self.assertEqual(res.status_code, 200)
self.assertEqual(b"Text", res.data)
def test_favicon(self):
warnings.filterwarnings("ignore", category=ResourceWarning)
# Bug of flask for unittests that leads to a 'ResourceWarning'
res = self.client.get('/favicon.ico')
self.assertEqual(res.status_code, 200)
# -------------------------------------------------------------------------
# connector.py-------------------------------------------------------------
@patch.object(connector, "StorageServer", Mock)
def test_get_storageserver_backend(self):
with self.app.test_request_context('/'):
self.assertFalse('storageserver' in g)
connector.get_storageserver_backend()
self.assertTrue('storageserver' in g)
@patch.object(connector, "StorageServer")
@patch.object(connector, "insert_bloom")
@patch.object(connector, "database")
def test__batch_store_records(self, d, i, m):
connector._batch_store_records([["test"]], self.user)
m.batch_store_records_db.assert_called_once_with(
[["test"]]
)
i.delay.assert_called_once_with([["test"]])
d.add_task.assert_called_once()
@skip("Slow b/c of celery and trivial.")
@patch("storage_server.connector.StorageServer")
def test_execute_psi(self, m): # pragma no cover
port = 50000
with self.app.test_request_context('/'):
connector.execute_psi.apply(args=(port,))
m.offer_psi.assert_called_once_with(port=port)
@patch.object(connector.Tasks['PSI'], "AsyncResult")
@patch("storage_server.connector.render_template", Mock())
def test_status_overview(self, om):
om.return_value.info = "blub"
om.return_value.state = "FAILURE"
om.return_value.id = "a"
date1 = datetime.now()
date2 = (datetime.now() + timedelta(days=1))
for user_type in [UserType.CLIENT, UserType.OWNER]:
mock_tasks = [
Task(id='a', user_id="userA", task_type=TaskType.PSI,
user_type=user_type, timestamp=date1),
Task(id='b', user_id="userA", task_type=TaskType.PSI,
user_type="client", timestamp=date2),
Task(id='c', user_id="userA", task_type="Bad Task",
user_type=user_type, timestamp=date1)
]
with patch("lib.database.get_tasks",
Mock(return_value=mock_tasks)):
res = [
{
'id': 'b',
'status': "FAILURE",
'type': TaskType.PSI,
'time': date2,
'error': "blub",
'task_url': f"/{user_type}/PSI/status/a",
'kill_url': f"/{user_type}/PSI/kill/a"
},
{
'id': 'a',
'status': "FAILURE",
'type': TaskType.PSI,
'time': date1,
'error': "blub",
'task_url': f"/{user_type}/PSI/status/a",
'kill_url': f"/{user_type}/PSI/kill/a"
}
]
with self.app.test_request_context('/'):
with self.assertRaises(ValueError):
# Bad User Type
connector.status_overview("bad_type")
with self.assertRaises(ValueError):
# Bad Task Type
connector.status_overview(user_type)
del (mock_tasks[2])
connector.status_overview(user_type)
self.assertEqual(res, g.tasks)
@patch.object(connector.Tasks['PSI'], "AsyncResult")
@patch.object(connector, "get_storageserver_backend", Mock())
def test_status(self, pm):
# Bad type
self.assertIn("404 Not Found", connector.task_status("bad", "a")[0])
self.assertEqual(404, connector.task_status("bad", "a")[1])
task = Mock()
task.id = "a"
task.info = "blub"
pm.return_value = task
for t in [TaskType.PSI]:
task.state = "PENDING"
r = connector.task_status(t, "a")
self.assertEqual(r, {'state': "PENDING"})
task.state = "SUCCESS"
r = connector.task_status(t, "a")
self.assertEqual(r, {'state': "SUCCESS"})
task.state = "FAILURE"
r = connector.task_status(t, "a")
self.assertEqual(r,
{'state': "FAILURE", 'status': "blub"})
@patch.object(connector.Tasks['PSI'], "AsyncResult")
@patch.object(connector, "get_storageserver_backend", Mock())
def test_kill(self, pm):
# Bad type
self.assertIn("404 Not Found", connector.kill_task("bad", "a")[0])
self.assertEqual(404, connector.kill_task("bad", "a")[1])
task = Mock()
task.id = "a"
task.info = "blub"
pm.return_value = task
for t in [TaskType.PSI]:
task.state = "PENDING"
r = connector.kill_task(t, "a")
self.assertEqual(
{'success': False, 'msg': 'Task not running.'}, r
)
task.state = "STARTED"
r = connector.kill_task(t, "a")
self.assertEqual(
{'success': True, 'msg': None}, r
)
# -------------------------------------------------------------------------
# client.py----------------------------------------------------------------
@patch("storage_server.client.verify_token", mock_verify_token)
@patch("storage_server.client.get_storageserver_backend")
def test_client_verify_token(self, m):
# Mock bloom filter
m.return_value.get_bloom_filter.return_value.decode.return_value = 1
# No authentication info provided
res = self.client.get('/client/bloom')
self.assertEqual(res.status_code, 401)
self.assertEqual(res.data, bytes('Unauthorized Access',
encoding='UTF-8'))
# Non existing User
auth_head = self.auth_header_wrong_user
res = self.client.get('/client/bloom', headers=auth_head)
self.assertEqual(res.status_code, 401)
self.assertEqual(res.data, bytes('Unauthorized Access',
encoding='UTF-8'))
# Bad Token
auth_head = self.auth_header_wrong_pw
res = self.client.get('/client/bloom', headers=auth_head)
self.assertEqual(res.status_code, 401)
self.assertEqual(res.data, bytes('Unauthorized Access',
encoding='UTF-8'))
# Correct
auth_head = self.auth_header
res = self.client.get('/client/bloom', headers=auth_head)
self.assertEqual(res.status_code, 200)
# noinspection DuplicatedCode
@patch("storage_server.client.gen_token")
def test_client_gen_token(self, m):
m.return_value = {
'success': True,
'token': 'new-token'
}
from storage_server.client import client_pw
client_pw.verify_password(mock_verify_pw) # Mock PW function
# Test authentication bad PW
auth_head = self.auth_header_wrong_pw
res = self.client.get('/client/gen_token', headers=auth_head)
self.assertEqual(res.status_code, 401)
self.assertEqual(res.data, bytes('Unauthorized Access',
encoding='UTF-8'))
# Test authentication bad username
auth_head = self.auth_header_wrong_user
res = self.client.get('/client/gen_token', headers=auth_head)
self.assertEqual(res.status_code, 401)
self.assertEqual(res.data, bytes('Unauthorized Access',
encoding='UTF-8'))
# Success
auth_head = self.auth_header_cor_pw
res = self.client.get('/client/gen_token', headers=auth_head)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.json['success'], True)
self.assertEqual(res.json['token'], "new-token")
@patch("storage_server.client.verify_token", mock_verify_token)
@patch("storage_server.client.get_storageserver_backend")
@patch("storage_server.client._track_bloom_access", Mock())
def test_client_get_bloom(self, m):
# Mock bloom filter
m.return_value.get_bloom_filter.return_value.decode.return_value = 1
# Test authentication
auth_head = self.auth_header_wrong_pw
res = self.client.get('/client/bloom', headers=auth_head)
self.assertEqual(res.status_code, 401)
self.assertEqual(res.data, bytes('Unauthorized Access',
encoding='UTF-8'))
auth_head = self.auth_header
res = self.client.get('/client/bloom', headers=auth_head)
self.assertEqual(res.status_code, 200)
self.assertEqual({
'success': True,
'bloom': 1
}, res.json)
@patch("storage_server.client.verify_token", mock_verify_token)
@patch("storage_server.client.StorageServer")
def test_client_retrieve_record(self, m):
# Test authentication
auth_head = self.auth_header_wrong_pw
res = self.client.post('/client/retrieve_record', headers=auth_head)
self.assertEqual(res.status_code, 401)
self.assertEqual(res.data, bytes('Unauthorized Access',
encoding='UTF-8'))
self.app.config.update(LOGIN_DISABLED=True)
# Empty POST
res = self.client.post('/client/retrieve_record')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.json, {
'success': False,
'msg': "Missing POST value 'hash'."
})
# Bad POST - Wrong Key
res = self.client.post('/client/retrieve_record', json={'T': False})
self.assertEqual(res.status_code, 200)
self.assertEqual(res.json, {
'success': False,
'msg': "Missing POST value 'hash'."
})
# Success
m.get_record.return_value = [['hash5', 'record5']]
j = {'hash': 'hash5'}
res = self.client.post('/client/retrieve_record', json=j)
self.assertEqual(res.json, {
'success': True,
'records': [['hash5', 'record5']]
})
# Non existing hash
m.get_record.side_effect = ValueError(
"No record for hash exists: bad-hash")
j = {'hash': 'bad-hash'}
res = self.client.post('/client/retrieve_record', json=j)
self.assertEqual(res.json, {
'success': False,
'msg': 'No record for hash exists: bad-hash'
})
@patch("storage_server.client.verify_token", mock_verify_token)
@patch("storage_server.client.StorageServer")
def test_client_batch_retrieve(self, m):
# Test authentication
auth_head = self.auth_header_wrong_pw
res = self.client.post('/client/batch_retrieve_records',
headers=auth_head)
self.assertEqual(res.status_code, 401)
self.assertEqual(res.data, bytes('Unauthorized Access',
encoding='UTF-8'))
self.app.config.update(LOGIN_DISABLED=True)
# Empty POST
res = self.client.post('/client/batch_retrieve_records')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.json, {
'success': False,
'msg': "Missing POST value 'hashes'."
})
# Bad POST - Wrong Key
res = self.client.post('/client/batch_retrieve_records',
json={'T': False})
self.assertEqual(res.status_code, 200)
self.assertEqual(res.json, {
'success': False,
'msg': "Missing POST value 'hashes'."
})
# Success
m.batch_get_records.return_value = [
['hash1', 'record1'],
['hash3', 'record3'],
['hash5', 'record5']
]
j = {'hashes': ['hash1', 'hash3', 'hash5']}
res = self.client.post('/client/batch_retrieve_records', json=j)
self.assertEqual({
'success': True,
'records': [
['hash1', 'record1'],
['hash3', 'record3'],
['hash5', 'record5']
]
}, res.json)
# Non existing hash
m.batch_get_records.return_value = []
j = {'hashes': 'bad-hash'}
res = self.client.post('/client/batch_retrieve_records', json=j)
self.assertEqual(res.json, {
'success': True,
'records': []
})
@patch("storage_server.client.verify_token", mock_verify_token)
@patch("storage_server.client._track_PSI_access", Mock())
@patch("lib.database.add_task", Mock())
def test_client_psi(self):
# Test authentication
auth_head = self.auth_header_wrong_pw
res = self.client.get('/client/psi', headers=auth_head)
self.assertEqual(res.status_code, 401)
self.assertEqual(res.data, bytes('Unauthorized Access',
encoding='UTF-8'))
# Success
with patch("storage_server.client.execute_psi") as m:
auth_head = self.auth_header
res = self.client.get('/client/psi', headers=auth_head)
self.assertEqual(res.status_code, 200)
@patch("storage_server.client.get_user", Mock(return_value="user"))
@patch("storage_server.client.BloomAccess", return_value="test")
@patch("storage_server.client.db")
def test__track_bloom_access(self, db, ba):
with self.assertRaises(ValueError):
client._track_bloom_access("bad_type", self.user)
client._track_bloom_access(UserType.CLIENT, self.user)
ba.assert_called_once_with(client="user")
db.session.add.assert_called_once_with("test")
db.session.commit.assert_called_once()
@patch("storage_server.client.get_user", Mock(return_value="user"))
@patch("storage_server.client.PSIAccess", return_value="test")
@patch("storage_server.client.db")
def test__track_psi_access(self, db, pa):
with self.assertRaises(ValueError):
client._track_PSI_access("bad_type", self.user)
client._track_PSI_access(UserType.CLIENT, self.user)
pa.assert_called_once_with(client="user")
db.session.add.assert_called_once_with("test")
db.session.commit.assert_called_once()
# -------------------------------------------------------------------------
# provider.py--------------------------------------------------------------
@patch("storage_server.provider.verify_token", mock_verify_token)
@patch("storage_server.provider.get_storageserver_backend", Mock())
def test_provider_verify_token(self):
# No authentication info provided
res = self.client.post('/provider/store_record')
self.assertEqual(res.status_code, 401)
self.assertEqual(res.data, bytes('Unauthorized Access',
encoding='UTF-8'))
# Non existing User
auth_head = self.auth_header_wrong_user
res = self.client.post('/provider/store_record', headers=auth_head)
self.assertEqual(res.status_code, 401)
self.assertEqual(res.data, bytes('Unauthorized Access',
encoding='UTF-8'))
# Bad Token
auth_head = self.auth_header_wrong_pw
res = self.client.post('/provider/store_record', headers=auth_head)
self.assertEqual(res.status_code, 401)
self.assertEqual(res.data, bytes('Unauthorized Access',
encoding='UTF-8'))
# Correct
auth_head = self.auth_header
res = self.client.post('/provider/store_record', headers=auth_head)
self.assertEqual(res.status_code, 200)
# noinspection DuplicatedCode
@patch("storage_server.provider.gen_token")
def test_provider_gen_token(self, m):
m.return_value = {
'success': True,
'token': 'new-token'
}
from storage_server.provider import provider_pw
provider_pw.verify_password(mock_verify_pw) # Mock PW function
# Test authentication bad PW
auth_head = self.auth_header_wrong_pw
res = self.client.get('/provider/gen_token', headers=auth_head)
self.assertEqual(res.status_code, 401)
self.assertEqual(res.data, bytes('Unauthorized Access',
encoding='UTF-8'))
# Test authentication bad username
auth_head = self.auth_header_wrong_user
res = self.client.get('/provider/gen_token', headers=auth_head)
self.assertEqual(res.status_code, 401)
self.assertEqual(res.data, bytes('Unauthorized Access',
encoding='UTF-8'))
# Success
auth_head = self.auth_header_cor_pw
res = self.client.get('/provider/gen_token', headers=auth_head)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.json['success'], True)
@patch("storage_server.provider.verify_token", mock_verify_token)
def test_provider_store_record_failed(self):
# Test Authentication
auth_head = self.auth_header_wrong_pw
res = self.client.post('/provider/store_record', headers=auth_head)
self.assertEqual(res.status_code, 401)
self.assertEqual(res.data, bytes('Unauthorized Access',
encoding='UTF-8'))
self.app.config.update(LOGIN_DISABLED=True)
# Empty POST
res = self.client.post('/provider/store_record')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.json, {
'success': False,
'msg': "Invalid POST data: Empty POST JSON."
})
# Bad POST - Missing 'hash'
j = {'ciphertext': 'new-record', 'owner': 'userA'}
res = self.client.post('/provider/store_record', json=j)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.json, {
'success': False,
'msg': "Invalid POST data: Require 'hash', 'ciphertext' and 'owner'."
})
# Bad POST - Missing 'ciphertext'
j = {'hash': 'new-hash', 'owner': 'userA'}
res = self.client.post('/provider/store_record', json=j)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.json, {
'success': False,
'msg': "Invalid POST data: Require 'hash', 'ciphertext' and 'owner'."
})
# Bad POST - Missing 'owner'
j = {'hash': 'new-hash', 'ciphertext': 'new-record'}
res = self.client.post('/provider/store_record', json=j)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.json, {
'success': False,
'msg': "Invalid POST data: Require 'hash', 'ciphertext' and 'owner'."
})
# Bad POST - wrong owner
self.app.config.update(LOGIN_DISABLED=False)
auth_head = self.auth_header
j = {'hash': 'new-hash', 'ciphertext': 'new-record', 'owner': 'owner1'}
res = self.client.post('/provider/store_record',
json=j, headers=auth_head)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.json, {
'success': False,
'msg': "Invalid POST data: Owner in JSON not authenticated owner."
})
@patch("storage_server.provider.verify_token", mock_verify_token)
@patch("storage_server.provider.get_storageserver_backend")
def test_provider_store_record_success(self, m):
# Success
auth_head = self.auth_header
j = {'hash': 'new-hash', 'ciphertext': 'new-record', 'owner':
'correct_user'}
res = self.client.post('/provider/store_record',
json=j, headers=auth_head)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.json, {
'success': True,
'msg': None
})
m.return_value.store_record.assert_called_once_with(
j['hash'],
j['ciphertext'],
j['owner']
)
# Exception
m.return_value.store_record.side_effect = RuntimeError("Blub")
res = self.client.post('/provider/store_record',
json=j, headers=auth_head)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.json, {
'success': False,
'msg': "Blub"
})
@patch("storage_server.provider.verify_token", mock_verify_token)
def test_provider_batch_store_record_fail(self):
# Test Authentication
auth_head = self.auth_header_wrong_pw
res = self.client.post('/provider/batch_store_records',
headers=auth_head)
self.assertEqual(res.status_code, 401)
self.assertEqual(res.data, bytes('Unauthorized Access',
encoding='UTF-8'))
self.app.config.update(LOGIN_DISABLED=True)
# Empty POST
res = self.client.post('/provider/batch_store_records')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.json, {
'success': False,
'msg': "Missing POST values."
})
# Non List POST
j = {}
res = self.client.post('/provider/batch_store_records', json=j)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.json, {
'success': False,
'msg': "batch_store_records received non list: {}"
})
# Bad item to short
j = [
('new-hash2', 'new-record2')
]
res = self.client.post('/provider/batch_store_records', json=j)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.json, {
'success': False,
'msg': "Record list contained bad item: ['new-hash2', "
"'new-record2']"
})
# Bad item type
j = [
('new-hash2', 'new-record2', 8)
]
res = self.client.post('/provider/batch_store_records', json=j)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.json, {
'success': False,
'msg': "Record list contained bad item: ['new-hash2', "
"'new-record2', 8]"
})
# Wrong owner
self.app.config.update(LOGIN_DISABLED=False)
records = [
('new-hash2', 'new-record2', 'userA'),
('new-hash1', 'new-record1', 'owner'),
]
auth_head = self.auth_header
res = self.client.post('/provider/batch_store_records',
headers=auth_head, json=records)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.json, {
'success': False,
'msg': "Different owner in record than authenticated owner!"
})
@patch("storage_server.provider.verify_token", mock_verify_token)
@patch("storage_server.provider._batch_store_records")
def test_provider_batch_store_record_success(self, m):
# Success
records = [
['new-hash1', 'new-record1', 'correct_user'],
['new-hash2', 'new-record2', 'correct_user'],
['new-hash3', 'new-record3', 'correct_user']
]
auth_head = self.auth_header
res = self.client.post('/provider/batch_store_records',
headers=auth_head, json=records)
self.assertEqual(res.status_code, 200)
self.assertEqual(True, res.json['success'])
self.assertEqual(None, res.json['msg'])
m.assert_called_once_with(records, 'correct_user')
@patch("storage_server.client.status_overview")
@patch("storage_server.provider.status_overview")
def test_user_status(self, m, m2):
# client and Provider
m.return_value = "Test"
m2.return_value = "Test"
from storage_server.client import client_pw
from storage_server.provider import provider_pw
client_pw.verify_password(mock_verify_pw) # Mock PW function
provider_pw.verify_password(mock_verify_pw)
for user_type in [UserType.CLIENT, UserType.OWNER]:
# Test authentication
auth_head = self.auth_header_wrong_pw
res = self.client.get(f'/{user_type}/status', headers=auth_head)
self.assertEqual(res.status_code, 401)
self.assertEqual(res.data, bytes('Unauthorized Access',
encoding='UTF-8'))
# Success
auth_head = self.auth_header_cor_pw
res = self.client.get(f'/{user_type}/status', headers=auth_head)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.data, b"Test")
@patch("storage_server.client.task_status")
@patch("storage_server.provider.task_status")
def test_task_status(self, m, m2):
# client and Provider
d = {'test': 'test'}
m.return_value = d
m2.return_value = d
from storage_server.client import client_pw
from storage_server.provider import provider_pw
client_pw.verify_password(mock_verify_pw) # Mock PW function
provider_pw.verify_password(mock_verify_pw)
for user_type in [UserType.CLIENT, UserType.OWNER]:
# Test authentication
auth_head = self.auth_header_wrong_pw
res = self.client.get(f'/{user_type}/{TaskType.PSI}/status/a',
headers=auth_head)
self.assertEqual(401, res.status_code)
self.assertEqual(res.data, bytes('Unauthorized Access',
encoding='UTF-8'))
# Success
auth_head = self.auth_header_cor_pw
res = self.client.get(f'/{user_type}/{TaskType.PSI}/status/a',
headers=auth_head)
self.assertEqual(200, res.status_code)
self.assertEqual(d, res.json)
@patch("storage_server.client.kill_task")
@patch("storage_server.provider.kill_task")
def test_task_kill(self, m, m2):
# client and Provider
d = {'test': 'test'}
m.return_value = d
m2.return_value = d
from storage_server.client import client_pw
from storage_server.provider import provider_pw
client_pw.verify_password(mock_verify_pw) # Mock PW function
provider_pw.verify_password(mock_verify_pw)
for user_type in [UserType.CLIENT, UserType.OWNER]:
# Test authentication
auth_head = self.auth_header_wrong_pw
res = self.client.get(f'/{user_type}/{TaskType.PSI}/kill/a',
headers=auth_head)
self.assertEqual(res.status_code, 401)
self.assertEqual(res.data, bytes('Unauthorized Access',
encoding='UTF-8'))
# Success
auth_head = self.auth_header_cor_pw
res = self.client.get(f'/{user_type}/{TaskType.PSI}/kill/a',
headers=auth_head)
self.assertEqual(200, res.status_code)
self.assertEqual(d, res.json)
| 31,798
| 41.398667
| 81
|
py
|
parameter-exchange
|
parameter-exchange-master/src/test/test_data_provider.py
|
#!/usr/bin/env python3
"""Testing the data provider's CLI.
Copyright (c) 2020.
Author: Erik Buchholz
Maintainer: Erik Buchholz
E-mail: buchholz@comsys.rwth-aachen.de
"""
import argparse
import json
import logging
import tempfile
from unittest import TestCase
from unittest.mock import patch
import responses
import data_provider as dp
from lib import config
from lib.base_client import UserType
from lib.helpers import to_base64
from lib.record import Record
@patch("lib.config.RECORD_LENGTH", 5)
class DataProviderTest(TestCase):
d = dp.DataProvider('userA')
@classmethod
def setUpClass(cls) -> None:
"""Disable logging."""
logging.getLogger().setLevel(logging.FATAL)
@patch("lib.base_client.BaseClient.post")
def test_store_record_fail(self, m):
# Fail due to bad owner
url = (f"https://{config.STORAGESERVER_HOSTNAME}:"
f"{config.STORAGE_API_PORT}/"
f"{UserType.OWNER}/store_record")
j = {
'success': False,
'msg': f"Invalid POST data: Owner in JSON not authenticated owner."
}
m.return_value.json.return_value = j
with self.assertRaises(RuntimeError) as cm:
self.d._store_record_on_server(b"hash", "record",
"non-existing-owner")
m.assert_called_once()
self.assertEqual(url, m.call_args[0][0])
self.assertIn("Invalid POST",
str(cm.exception))
@patch("lib.base_client.BaseClient.post")
def test_store_record_success(self, m):
url = (f"https://{config.STORAGESERVER_HOSTNAME}:"
f"{config.STORAGE_API_PORT}/"
f"{UserType.OWNER}/store_record")
j = {
'success': True,
'msg': None
}
m.return_value.json.return_value = j
self.d._store_record_on_server(b"hash", {'cipher': "record"},
"userA")
m.assert_called_once()
expected = json.dumps({
'hash': to_base64(b"hash"),
'record': {'cipher': "record"},
'owner': 'userA'
}).encode()
m.called_with(url, json=expected)
@patch("lib.base_client.BaseClient.post")
def test_batch_store_records_fail(self, m):
# Fail due to bad owner
url = (f"https://{config.STORAGESERVER_HOSTNAME}:"
f"{config.STORAGE_API_PORT}/"
f"{UserType.OWNER}/batch_store_records")
j = {
'success': False,
'msg': f"Invalid POST data: Owner in JSON not authenticated owner."
}
m.return_value.json.return_value = j
with self.assertRaises(RuntimeError) as cm:
self.d._batch_store_records_on_server([])
m.assert_called_once_with(url, json=[])
self.assertIn("Invalid POST",
str(cm.exception))
@patch("lib.base_client.BaseClient.post")
def test_batch_store_records_success(self, m):
url = (f"https://{config.STORAGESERVER_HOSTNAME}:"
f"{config.STORAGE_API_PORT}/"
f"{UserType.OWNER}/batch_store_records")
j = {
'success': True,
'msg': None
}
m.return_value.json.return_value = j
self.d._batch_store_records_on_server([("hash",
"record",
"userA")])
expected = [("hash", "record", "userA")]
m.assert_called_once_with(url, json=expected)
@responses.activate
@patch("lib.config.EVAL", False)
def test_store_records(self):
"""Kind of integrity test, we only mock the server responses."""
# Define server response
# 1 - Get token
url = (f"https://{config.KEYSERVER_HOSTNAME}"
f":{config.KEY_API_PORT}/provider/gen_token")
j = {
'success': True,
'token': 'XIu2a9SDGURRTzQnJdDg19Ii_CS7wy810s3_Lrx-TY7Wvh2Hf0U4xLH'
'NwnY_byYJ71II3kfUXpSZHOqAxA3zrw'
}
responses.add(responses.GET, url, json=j, status=200)
# 2 - Hash Key
url = f"{self.d.KEYSERVER}/hash_key"
hash_key = to_base64(int(1).to_bytes(16, 'big'))
j = {
'success': True,
'hash_key': hash_key
}
responses.add(responses.GET, url, json=j, status=200)
# 3 - Encryption Keys
j = {
'success': True,
'port': 50000,
'host': "127.0.0.1",
'totalOTs': 10,
'tls': config.OT_TLS
}
url = f"https://localhost:" \
f"{config.KEY_API_PORT}/provider/key_retrieval?totalOTs=3"
responses.add(responses.GET, url, json=j, status=200)
# Remember to mock the OT
r1 = Record([1.0, 2.1, 3.3, 4.4, 5.0])
r2 = Record([1.0532, 2.15423, 3.3453, 4.4, 5.0])
r3 = Record([1.52340, 2.1523, 3.35423, 4.4, 5.0])
records = [r1, r2, r3]
# Log in user
self.d.set_password("password")
with patch.object(self.d, "_receive_ots", return_value=[10, 9, 8]):
# Mock OT
with patch.object(self.d, "_batch_store_records_on_server",
return_value=True):
self.d.store_records(records)
def test_parser(self):
# Just syntax errors
p = dp.get_provider_parser()
self.assertTrue(isinstance(p, argparse.ArgumentParser))
def test_store_from_file(self):
res = [
Record([1, 2, 3, 4, 5]),
Record([1, 2, 3, 5, 6])
]
with patch.object(self.d, "store_records") as m:
with tempfile.NamedTemporaryFile() as fd:
fd.write(b'[1,2,3,4, 5]\n')
fd.write(b'[1,2,3,5, 6]\n')
fd.seek(0)
self.d.store_from_file(fd.name)
m.assert_called_with(res)
| 5,974
| 33.738372
| 79
|
py
|
parameter-exchange
|
parameter-exchange-master/src/test/test_base_client.py
|
#!/usr/bin/env python3
"""Test of base client.
Copyright (c) 2020.
Author: Erik Buchholz
Maintainer: Erik Buchholz
E-mail: buchholz@comsys.rwth-aachen.de
"""
import logging
from unittest import TestCase, mock
from unittest.mock import Mock, patch
import requests
import responses
from lib import base_client as bc
from lib import config
from lib.base_client import KEYSERVER, STORAGESERVER, ServerType
from lib.helpers import to_base64, encryption_keys_from_int
class Mockclient(bc.BaseClient):
"""Mock base client to test abstract base class."""
type = 'mock'
class BaseClientTest(TestCase):
choices = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
int_keys = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1]
keys = encryption_keys_from_int(int_keys)
m: Mockclient = None
@classmethod
def setUpClass(cls) -> None:
"""Disable logging."""
logging.getLogger().setLevel(logging.FATAL)
def setUp(self) -> None:
self.m = Mockclient("testuser")
def test_init(self):
m = Mockclient("User1")
self.assertEqual(m.type, "mock")
self.assertEqual(m.user, "User1")
@responses.activate
@patch("lib.base_client.BaseClient.get_token", Mock(return_value="token"))
def test_get_hash_key(self):
url = f"{KEYSERVER}/mock/hash_key"
j = {
'success': True,
'hash_key': to_base64(int(1).to_bytes(16, 'big'))
}
responses.add(responses.GET, url, json=j, status=200)
# Success
key = self.m.get_hash_key()
self.assertEqual(int(1).to_bytes(16, 'big'), key)
@patch("lib.base_client.BaseClient._receive_ots",
Mock(return_value=int_keys))
@patch("lib.base_client.BaseClient.get")
@patch("lib.config.EVAL", False)
@patch("lib.config.PARALLEL", False)
def test_get_enc_keys(self, m):
port = 50000
url = f"{KEYSERVER}/mock/key_retrieval?totalOTs=10"
# Failed retrieval
j = {
'success': False,
'msg': 'Key retrieval failed: No total OT defined.'
}
m.return_value.json.return_value = j
with self.assertRaises(RuntimeError):
self.m._get_enc_keys(self.choices)
# Called in different process
m.assert_called_once_with(url)
m.reset_mock()
# TLS Mismatch
config.OT_TLS = True
j = {
'success': True,
'port': port,
'host': "127.0.0.1",
'totalOTs': 20,
'tls': False
}
m.return_value.json.return_value = j
with self.assertRaises(RuntimeError) as e:
self.m._get_enc_keys(self.choices)
m.reset_mock()
self.assertIn(
"Mismatch of server and client TLS settings.",
str(e.exception))
# Success empty list
res = self.m._get_enc_keys([])
self.assertEqual([], res)
# Success with TLS
j = {
'success': True,
'port': port,
'host': "127.0.0.1",
'totalOTs': 10,
'tls': True
}
m.return_value.json.return_value = j
res = self.m._get_enc_keys(self.choices)
m.assert_called_once_with(url)
m.reset_mock()
# p.join()
self.assertEqual(res, self.keys)
# Success without TLS
config.OT_TLS = False
j = {
'success': True,
'port': port,
'host': "127.0.0.1",
'totalOTs': 10,
'tls': False
}
m.return_value.json.return_value = j
res = self.m._get_enc_keys(self.choices)
m.assert_called_once_with(url)
m.reset_mock()
self.assertEqual(res, self.keys)
@patch("lib.base_client.BaseClient._receive_ots")
@patch("lib.base_client.BaseClient.get")
@patch("lib.config.EVAL", False)
@patch("lib.config.PARALLEL", True)
@patch("lib.config.OT_MAX_NUM", 1) # force parallel
def test_parallel_enc_keys(self, m, _receive_ots):
port = 50000
url = f"{KEYSERVER}/mock/key_retrieval?totalOTs=10"
# Failed retrieval
j = {
'success': False,
'msg': 'Key retrieval failed: No total OT defined.'
}
m.return_value.json.return_value = j
# Parallel execution
config.OT_TLS = False
j = {
'success': True,
'port': port,
'host': "127.0.0.1",
'totalOTs': 10,
'tls': False
}
def mocked_receive(inds, h, p, tls): # pragma no cover
# (Thread)
return [self.int_keys[j] for j in inds]
_receive_ots.side_effect = mocked_receive
m.return_value.json.return_value = j
res = self.m._get_enc_keys(self.choices)
self.assertEqual(res, self.keys)
def test_receive_ots_without_tls(self):
host = "127.0.0.1"
port = 50000
tls = False
m = Mock()
m.execute.return_value = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1]
with mock.patch("lib.base_client.PyOTReceiver", return_value=m):
res = Mockclient._receive_ots(self.choices, host, port, tls,
num_chosen_msgs=20)
self.assertEqual(res, [10, 9, 8, 7, 6, 5, 4, 3, 2, 1])
def test_receive_ots_with_tls(self):
host = "127.0.0.1"
port = 50000
tls = True
m = Mock()
m.execute.return_value = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1]
with mock.patch("lib.base_client.PyOTReceiver", return_value=m):
res = Mockclient._receive_ots(self.choices, host, port, tls,
num_chosen_msgs=20)
self.assertEqual(res, [10, 9, 8, 7, 6, 5, 4, 3, 2, 1])
def test_receive_psi(self):
m = Mock()
m.execute.return_value = range(10)
with mock.patch("lib.base_client.PyPSIReceiver", return_value=m):
res = Mockclient._receive_psi(sorted(self.choices, reverse=True),
"localhost", 5000, True)
self.assertEqual(res, [9, 8, 7, 6, 5, 4, 3, 2, 1, 0])
def test_set_password(self):
m = Mockclient("client")
self.assertIsNone(m.password)
m.set_password("password")
self.assertEqual(m.password, "password")
@responses.activate
def test_get_token_success(self):
urlA = f"{KEYSERVER}/mock/gen_token"
urlB = f"{STORAGESERVER}/mock/gen_token"
j = {
'success': True,
'token': 'XIu2a9SDGURRTzQnJdDg19Ii_CS7wy810s3_Lrx-TY7Wvh2Hf0U4xLH'
'NwnY_byYJ71II3kfUXpSZHOqAxA3zrw'
}
responses.add(responses.GET, urlA, json=j, status=200)
responses.add(responses.GET, urlB, json=j, status=200)
# Success keyserver
self.m.set_password("password")
res = self.m.get_token(ServerType.KeyServer)
self.assertEqual(res, j['token'])
# Success storage server
self.m.set_password("password")
res = self.m.get_token(ServerType.StorageServer)
self.assertEqual(res, j['token'])
@responses.activate
def test_get_token_fail(self):
with self.assertRaises(ValueError):
# no password defined
self.m.get_token(ServerType.KeyServer)
self.m.set_password("password")
# Bad server type
with self.assertRaises(ValueError):
self.m.get_token("Bad-type")
# Server Error
url = f"{KEYSERVER}/mock/gen_token"
j = {
'success': False,
'msg': "Not enough entropy."
}
responses.add(responses.GET, url, json=j, status=200)
with self.assertRaises(RuntimeError):
self.m.get_token(ServerType.KeyServer)
@responses.activate
@patch("lib.base_client.BaseClient.get_auth_data",
Mock(return_value=("a", "b")))
def test_get(self):
url = "http://url"
body = b"Test"
auth = ("user", "password")
method = responses.GET
# Success via 200
responses.add(method, url, body, status=200)
res = self.m.get(url) # Without auth
self.assertEqual(body, res.content)
# success via 202
responses.replace(method, url, body, status=202)
res = self.m.get(url, auth)
self.assertEqual(body, res.content)
# Authentication failed - 401
responses.replace(method, url, body, status=401)
with self.assertRaises(RuntimeError) as e:
self.m.get(url, auth)
self.assertIn("Authentication failed", str(e.exception))
# Internal Server Error - 500
responses.replace(method, url, body, status=500)
with self.assertRaises(requests.exceptions.HTTPError):
self.m.get(url, auth)
@responses.activate
@patch("lib.base_client.BaseClient.get_auth_data",
Mock(return_value=("a", "b")))
def test_post(self):
url = "http://url"
body = b"Test"
auth = ("user", "password")
json = {}
method = responses.POST
# Success via 200
responses.add(method, url, body, status=200)
res = self.m.post(url, json) # without auth
self.assertEqual(body, res.content)
# success via 202
responses.replace(method, url, body, status=202)
res = self.m.post(url, json, auth)
self.assertEqual(body, res.content)
# Authentication failed - 401
responses.replace(method, url, body, status=401)
with self.assertRaises(RuntimeError) as e:
self.m.post(url, json, auth)
self.assertIn("Authentication failed", str(e.exception))
# Internal Server Error - 500
responses.replace(method, url, body, status=500)
with self.assertRaises(requests.exceptions.HTTPError):
self.m.post(url, json, auth)
@patch("lib.base_client.BaseClient.get_token",
Mock(return_value='token'))
def test_get_auth_data(self):
with self.assertRaises(ValueError):
self.m.get_auth_data("bad-url")
self.assertEqual(
(self.m.user, "token"),
self.m.get_auth_data(KEYSERVER + "/something")
)
self.assertEqual(
(self.m.user, "token"),
self.m.get_auth_data(STORAGESERVER + "/something")
)
| 10,410
| 32.583871
| 78
|
py
|
parameter-exchange
|
parameter-exchange-master/src/test/test_client.py
|
#!/usr/bin/env python3
"""Testing the client CLI.
Copyright (c) 2020.
Author: Erik Buchholz
Maintainer: Erik Buchholz
E-mail: buchholz@comsys.rwth-aachen.de
"""
import argparse
import json
import logging
import os
import shutil
import tempfile
from typing import List
from unittest import TestCase
from unittest.mock import patch, Mock, MagicMock
import responses
from pybloomfilter import BloomFilter, b64encode
from responses import GET, POST
import client
from lib import config
from lib.base_client import UserType
from lib.record import Record
from lib.similarity_metrics import RelativeOffsetIterator
@patch("lib.config.RECORD_LENGTH", 5)
@patch("lib.config.BLOOM_CAPACITY", 100)
@patch("lib.config.BLOOM_ERROR_RATE", 10 ** -5)
@patch("lib.config.RECORD_ID_LENGTH", 2)
@patch("lib.config.ROUNDING_VEC", [3, 3])
class ClientTest(TestCase):
test_dir = config.DATA_DIR + "test/"
c = client.Client("userA")
enc_keys = [
b'\x1b\x8fL+\xd2\xfcLQ\x1a\x03:\xcf\x15\x8a\xc7+'
for _ in range(10)
]
enc_keys_int = [int.from_bytes(i, 'big') for i in enc_keys]
hash_key = b'hash_key'
@classmethod
@patch("lib.config.RECORD_LENGTH", 5)
@patch("lib.config.BLOOM_CAPACITY", 100)
@patch("lib.config.BLOOM_ERROR_RATE", 10 ** -5)
@patch("lib.config.RECORD_ID_LENGTH", 2)
@patch("lib.config.ROUNDING_VEC", [3, 3])
def setUpClass(cls) -> None:
"""Disable logging."""
logging.getLogger().setLevel(logging.FATAL)
shutil.rmtree(cls.test_dir, ignore_errors=True)
os.makedirs(cls.test_dir, exist_ok=True)
cls.records = [
Record([0, 0, 0, 0, 0]), # not in Bloom
Record([1, 2, 3, 4, 5]), # in Bloom
Record([2, 2, 3, 4, 5]), # in Bloom
Record([3, 2, 3, 4, 5]), # in Bloom
Record([4, 2, 3, 4, 5]), # not in Bloom
Record([5, 2, 3, 4, 5]), # not in Bloom
]
for r in cls.records:
r.set_hash_key(cls.hash_key)
b = BloomFilter(100, 0.0001, cls.test_dir + "test.bloom")
b.update([1, 2, 3, 4, 5, 6, 7, 8, 9, 'a', 'b', 'c'])
b.add(b64encode(cls.records[1].get_long_hash()).decode())
b.add(b64encode(cls.records[2].get_long_hash()).decode())
b.add(b64encode(cls.records[3].get_long_hash()).decode())
cls.b_encoded = b.to_base64().decode()
cls.b = b
cls.psi_ind = [cls.records[1].get_psi_index(),
cls.records[2].get_psi_index(),
cls.records[3].get_psi_index()
]
def setUp(self) -> None:
"""
Deactivate PSI Mode.
"""
self.c._psi_mode = False
@classmethod
def tearDownClass(cls) -> None:
"""Remove Test files."""
shutil.rmtree(cls.test_dir, ignore_errors=True)
@patch("lib.base_client.BaseClient.post")
def test_get_record_success(self, m):
url = (f"https://{config.STORAGESERVER_HOSTNAME}:"
f"{config.STORAGE_API_PORT}/"
f"{UserType.CLIENT}/retrieve_record")
j = {
'success': True,
'records': [['hash', 'record1'], ['hash', 'record2']]
}
m.return_value.json.return_value = j
res = self.c.get_record('hash')
self.assertEqual(res, j['records'])
m.assert_called_once_with(url, json={'hash': 'hash'})
@patch("lib.base_client.BaseClient.post")
def test_get_record_fail(self, m):
url = (f"https://{config.STORAGESERVER_HOSTNAME}:"
f"{config.STORAGE_API_PORT}/"
f"{UserType.CLIENT}/retrieve_record")
j = {
'success': False,
'msg': "No record for hash exists: 'record'"
}
m.return_value.json.return_value = j
with self.assertRaises(RuntimeError) as cm:
self.c.get_record("hash")
self.assertIn("No record for hash exists: 'record'", str(cm.exception))
m.assert_called_once_with(url, json={'hash': 'hash'})
@patch.object(c, "_get_enc_keys", Mock(return_value=enc_keys))
def test_batch_get_records(self):
records = self.records[:5]
enc_records = []
for i, r in enumerate(records):
enc_records.append(
(
b64encode(r.get_long_hash()).decode(),
json.dumps(r.get_encrypted_record(self.enc_keys[i], b'0'))
))
with patch.object(self.c, "_batch_get_encrpyted_records",
Mock(return_value=enc_records)):
res = self.c.batch_get_records(self.records[:5])
for r in res:
# for comparison
r.set_hash_key(self.hash_key)
self.assertEqual(records, res)
# Empty list
with patch.object(self.c, "_batch_get_encrpyted_records",
Mock(return_value=[])):
self.assertEqual([],
self.c.batch_get_records(self.records[:5]))
@patch("lib.base_client.BaseClient.post")
def test__batch_get_encrpyted_records_success(self, m):
url = (f"https://{config.STORAGESERVER_HOSTNAME}:"
f"{config.STORAGE_API_PORT}/"
f"{UserType.CLIENT}/batch_retrieve_records")
j = {
'success': True,
'records': [['hash1', 'record1'], ['hash2', 'record2']]
}
m.return_value.json.return_value = j
hash_list = ['hash1', 'hash2', 'hash3']
res = self.c._batch_get_encrpyted_records(hash_list)
self.assertEqual(res, j['records'])
m.assert_called_once_with(url, json={'hashes': hash_list})
@patch("lib.base_client.BaseClient.post")
def test__batch_get_encrypted_records_fail(self, m):
url = (f"https://{config.STORAGESERVER_HOSTNAME}:"
f"{config.STORAGE_API_PORT}/"
f"{UserType.CLIENT}/batch_retrieve_records")
j = {
'success': False,
'msg': "Missing POST value 'hashes'."
}
m.return_value.json.return_value = j
hash_list = ['hash1', 'hash2', 'hash3']
with self.assertRaises(RuntimeError) as cm:
self.c._batch_get_encrpyted_records(hash_list)
self.assertIn("Missing POST value 'hashes'.", str(cm.exception))
m.assert_called_once_with(url, json={'hashes': hash_list})
@patch("lib.base_client.BaseClient.get")
def test_get_bloom_success(self, m):
url = (f"https://{config.STORAGESERVER_HOSTNAME}:"
f"{config.STORAGE_API_PORT}/"
f"{UserType.CLIENT}/bloom")
j = {
'success': True,
'bloom': self.b_encoded
}
m.return_value.json.return_value = j
res = self.c._get_bloom_filter()
res_b = res.to_base64()
self.assertEqual(res_b, self.b_encoded.encode())
m.assert_called_once_with(url)
@patch("lib.base_client.BaseClient.get")
def test_get_bloom_fail(self, m):
url = (f"https://{config.STORAGESERVER_HOSTNAME}:"
f"{config.STORAGE_API_PORT}/"
f"{UserType.CLIENT}/bloom")
j = {
'success': False,
'msg': "Failed to retrieve bloom filter: "
}
m.return_value.json.return_value = j
with self.assertRaises(RuntimeError) as cm:
self.c._get_bloom_filter()
self.assertIn("Failed to retrieve bloom filter:", str(cm.exception))
m.assert_called_once_with(url)
def test_compute_matches_bloom_success(self):
for v in [True, False]:
with patch("lib.config.PARALLEL", v):
with patch.object(self.c, "_get_bloom_filter", return_value=self.b):
rec_list = [self.records[1].record,
self.records[2].record,
self.records[4].record,
self.records[5].record]
self.c._hash_key = self.hash_key
m = MagicMock()
m.__iter__.return_value = rec_list
m.split.return_value = [m]
res = self.c.compute_matches_bloom(m)
self.assertEqual(res, [self.records[1], self.records[2]])
m.__iter__.return_value = [self.records[1].record, self.records[2].record,
self.records[3].record, self.records[4].record,
self.records[5].record]
res = self.c.compute_matches_bloom(m)
self.assertEqual(res, [self.records[1], self.records[2],
self.records[3]])
m.__iter__.return_value = [self.records[4].record,
self.records[5].record]
res = self.c.compute_matches_bloom(m)
self.assertEqual(res, [])
def test_compute_matches_bloom_fail(self):
self.c._psi_mode = True
with patch.object(self.c, "_get_bloom_filter",
return_value=[]):
with self.assertRaises(RuntimeError)as e:
self.c.compute_matches_bloom(Mock)
self.assertIn("PSI-Mode is enabled", str(e.exception))
def test_compute_matches_psi_success(self):
self.c._psi_mode = True
self.c._hash_key = self.hash_key
with patch.object(self.c, "_perform_psi", return_value=self.psi_ind):
res = self.c.compute_matches_psi(
[self.records[1].record, self.records[2].record,
self.records[4].record, self.records[5].record])
self.assertEqual(res, [self.records[1], self.records[2]])
res = self.c.compute_matches_psi(
[self.records[1].record, self.records[2].record, self.records[3].record,
self.records[4].record, self.records[5].record])
self.assertEqual(res, [self.records[1], self.records[2],
self.records[3]])
res = self.c.compute_matches_psi(
[self.records[4].record, self.records[5].record])
self.assertEqual(res, [])
@patch("lib.config.RECORD_LENGTH", 10)
@patch("lib.config.RECORD_ID_LENGTH", 10)
def test_compute_matches_psi_fail(self):
# No PSI Mode
with patch.object(self.c, "_perform_psi",
return_value=[]):
with self.assertRaises(RuntimeError)as e:
self.c.compute_matches_psi(
[])
self.assertIn("PSI-Mode is not enabled", str(e.exception))
self.c._psi_mode = True
with self.assertRaises(RuntimeError) as e:
self.c.compute_matches_psi(
RelativeOffsetIterator(
[float(i + 1) for i in range(config.RECORD_LENGTH)],
10,
[6 for _ in range(config.RECORD_ID_LENGTH)]
)
)
self.assertIn("too large for PSI", str(e.exception))
@patch.object(c, "_receive_psi", Mock(return_value=[1, 2, 3]))
@patch("lib.base_client.BaseClient.get")
@patch("lib.config.EVAL", False)
def test__perform_psi(self, m):
self.assertEqual([], self.c._perform_psi([]))
url = (
f"https://{config.STORAGESERVER_HOSTNAME}:"
f"{config.STORAGE_API_PORT}/client/"
f"psi")
for tls in [True, False]:
with patch("lib.config.PSI_TLS", tls):
j = {
'success': True,
'tls': tls,
'host': '127.0.0.1',
'port': 1234,
'setSize': 100,
'msg': 'blub'
}
m.return_value.json.return_value = j
res = self.c._perform_psi([1, 2, 3, 4, 5, 6])
self.assertEqual([1, 2, 3], res)
m.assert_called_once_with(url)
m.reset_mock()
j['success'] = False
m.return_value.json.return_value = j
with self.assertRaises(RuntimeError) as e:
self.c._perform_psi([1, 2, 3, 4, 5, 6])
m.assert_called_once_with(url)
m.reset_mock()
self.assertEqual("PSI failed: blub", str(e.exception))
j['success'] = True
j['setSize'] = 1
m.return_value.json.return_value = j
with self.assertRaises(RuntimeError) as e:
self.c._perform_psi([1, 2, 3, 4, 5, 6])
m.assert_called_once_with(url)
m.reset_mock()
self.assertEqual("Client Set larger than PSI Setsize.",
str(e.exception))
j['setSize'] = 100
j['tls'] = not tls
m.return_value.json.return_value = j
with self.assertRaises(RuntimeError) as e:
self.c._perform_psi([1, 2, 3, 4, 5, 6])
m.assert_called_once_with(url)
m.reset_mock()
self.assertIn("Mismatch", str(e.exception))
@patch("lib.similarity_metrics.AbsoluteOffsetIterator")
def test_compute_candidates(self, m):
# Default
r = [1, 2, 3]
self.c.compute_candidates(r)
m.assert_called_once_with(r, 1)
# Non default
self.c.compute_candidates(r, "offset-7.77")
m.assert_called_with(r, 7.77)
def test_parser(self):
# Just syntax errors
p = client.get_client_parser()
self.assertTrue(isinstance(p, argparse.ArgumentParser))
@responses.activate
@patch("lib.config.OT_TLS", False)
@patch("lib.config.EVAL", False)
@patch("lib.config.OT_SETSIZE", 10)
@patch("client.Client._receive_ots", Mock(return_value=enc_keys_int[:3]))
@patch("client.Client.get_token", Mock(return_value="token"))
def test_full_retrieve(self):
c = client.Client("userA")
target = [2.0, 2.0, 3.0, 4.0, 5.0]
# Server Records:
sr: List[Record] = [
[2.01, 2.01, 3.3, 4.4, 5.0], # Match
[2.5, 4.4, 3.9, 5.0, 5.0], # No Match
[2.0, 7.0, 3.0, 4.0, 5.0], # No Match
[2.0, 2.0, 10.6, 10.0, 5.0], # Match
[3.0, 2.0, 3.0, 4.0, 5.0], # No Match
[2.01, 2.004, 5, 9, 5.0], # Match
[2.0, 2.0, 3.0, 4.0, 5.0] # No Match
]
# Server Bloom Filter
tmp = tempfile.NamedTemporaryFile(delete=False)
b = BloomFilter(len(sr), 0.00001, tmp.name)
c.metric = "offset-0.01"
for i, r in enumerate(sr):
sr[i]: Record = Record(r)
sr[i].set_hash_key(self.hash_key)
matches = [sr[0], sr[3], sr[5]]
for m in matches:
b.add(b64encode(m.get_long_hash()).decode())
b_encoded = b.to_base64().decode()
# Responses
# -----------------------------------------------------------
# 1. Hash Key
url = f"https://localhost:" \
f"{config.KEY_API_PORT}/client/hash_key"
j = {
'success': True,
'hash_key': b64encode(self.hash_key).decode()
}
responses.add(responses.GET, url, json=j, status=200)
# 2. PSI
url = (
f"https://{config.STORAGESERVER_HOSTNAME}:"
f"{config.STORAGE_API_PORT}/client/psi")
j = {
'success': True,
'tls': False,
'host': '127.0.0.1',
'port': 1234,
'setSize': 10
}
responses.add(GET, url, status=200, json=j)
# 2. Bloom filter
url = (f"https://{config.STORAGESERVER_HOSTNAME}:"
f"{config.STORAGE_API_PORT}/"
f"{UserType.CLIENT}/bloom")
j = {
'success': True,
'bloom': b_encoded
}
responses.add(GET, url, status=200, json=j)
# 3. Encryption Keys
url = f"https://localhost:" \
f"{config.KEY_API_PORT}/client/key_retrieval?totalOTs=3"
j = {
'success': True,
'port': 5000,
'host': "127.0.0.1",
'totalOTs': 3,
'tls': False
}
responses.add(responses.GET, url, json=j, status=200)
# 4. Ciphertexts
url = (f"https://{config.STORAGESERVER_HOSTNAME}:"
f"{config.STORAGE_API_PORT}/"
f"{UserType.CLIENT}/batch_retrieve_records")
j = {
'success': True,
'records': [
(b64encode(m.get_long_hash()).decode(),
json.dumps(
m.get_encrypted_record(self.enc_keys[i], b'0'))
)
for i, m in enumerate(matches)
]
}
responses.add(POST, url, status=200, json=j)
# ---------------------------------------------------------------------
for psi in [True, False]:
with patch.object(c, "_receive_psi", Mock(return_value=[
m.get_psi_index()
for m in matches
])):
c._psi_mode = psi
res = c.full_retrieve(target)
# Set hash key for comparison
for r in res:
r.set_hash_key(self.hash_key)
# Compare
self.assertEqual(matches, res)
def test_activate_psi_mode(self):
self.assertEqual(False, self.c._psi_mode)
self.c.activate_psi_mode()
self.assertEqual(True, self.c._psi_mode)
@patch("client.log.debug", Mock(side_effect=RuntimeError))
def test_full_error(self):
with self.assertRaises(RuntimeError):
self.c.full_retrieve([1, 2, 3])
| 17,897
| 38.861915
| 94
|
py
|
parameter-exchange
|
parameter-exchange-master/src/test/test_user_database.py
|
#!/usr/bin/env python3
"""Test user databases, i.e. Client and Data Provider DB.
Copyright (c) 2020.
Author: Erik Buchholz
Maintainer: Erik Buchholz
E-mail: buchholz@comsys.rwth-aachen.de
"""
import logging
import os
import shutil
from time import time
from unittest import TestCase
from unittest.mock import patch, Mock
from flask import Flask
from lib import user_database as ud, config
from lib.base_client import UserType
test_dir = config.DATA_DIR + "test/"
def create_mock_app():
"""Create a low overhead flask app for testing."""
app = Flask(__name__)
app.config.from_mapping(
TESTING=True,
DATA_DIR=test_dir,
SQLALCHEMY_DATABASE_URI=f"sqlite:///{test_dir}/{config.STORAGE_DB}",
SQLALCHEMY_TRACK_MODIFICATIONS=False
)
return app
class UserDBTest(TestCase):
app = create_mock_app()
username = "username"
password = "password"
token = "token"
t = ud.Token(value=token)
tokens = [t]
@classmethod
def setUpClass(cls) -> None:
start = time()
shutil.rmtree(test_dir, ignore_errors=True)
os.makedirs(test_dir, exist_ok=True)
ud.db.init_app(cls.app)
cls.app.app_context().push()
ud.db.create_all()
# Add users
cls.c = ud.Client(username=cls.username, password=cls.password)
ud.db.session.add(cls.c)
ud.db.session.commit()
# print(f"setUpClass took: {1000 * (time() - start)} ms")
def setUp(self) -> None:
"""Clear test directory, remove logging"""
logging.getLogger().setLevel(logging.ERROR)
@classmethod
def tearDownClass(cls) -> None:
"""Remove test directory."""
shutil.rmtree(test_dir, ignore_errors=True)
@patch("lib.user_database.generate_password_hash",
return_value="token_hash")
def test_generate_token(self, m):
with self.assertRaises(ValueError):
# User does not exist
ud.generate_token(UserType.CLIENT, "bad")
t = ud.generate_token(UserType.CLIENT, self.username)
m.assert_called_once_with(t, salt_length=32)
@patch("lib.user_database.check_password_hash", Mock(return_value=True))
def test_verify_password(self):
with self.assertRaises(ValueError):
ud.verify_password(UserType.CLIENT, "user", "pwd")
ud.verify_password(UserType.CLIENT, self.username, self.password)
@patch("lib.user_database.check_password_hash")
def test_verify_token(self, m):
with self.assertRaises(ValueError):
# User does not exist
ud.verify_token(UserType.CLIENT, "user", "pwd")
self.c.tokens = []
ud.db.session.commit()
with self.assertRaises(ValueError):
# Token is none
ud.verify_token(UserType.CLIENT, self.username, "pwd")
self.c.tokens = self.tokens
ud.db.session.commit()
m.return_value = False
self.assertFalse(
ud.verify_token(UserType.CLIENT, self.username, self.token))
m.return_value = True
self.assertTrue(
ud.verify_token(UserType.CLIENT, self.username, self.token))
# Token has been removed
self.assertEqual([], self.c.tokens)
def test__generate_token(self):
token = ud._generate_token()
self.assertTrue(isinstance(token, str))
self.assertEqual(len(token), 86)
# Test "Randomness"
l1 = []
for _ in range(10):
t = ud._generate_token()
self.assertFalse(t in l1)
l1.append(t)
@patch("lib.user_database.check_password_hash")
@patch("lib.user_database.generate_password_hash")
def test_update_password(self, gen, m):
m.return_value = False
with self.assertRaises(ValueError):
# Bad Login
ud.update_password(UserType.CLIENT, self.username, "wrong-pwd",
"password")
m.return_value = True
with self.assertRaises(ValueError):
# Pwd too short
ud.update_password(UserType.CLIENT, self.username, self.password,
"pwd")
gen.return_value = "new-password-hash"
# change
ud.update_password(UserType.CLIENT, self.username, self.password,
"new-password")
# Verify
gen.assert_called_once_with("new-password", salt_length=32)
self.assertEqual("new-password-hash",
ud.Client.query.filter_by(
username=self.username).first().password)
# change back
self.c.password = self.password
ud.db.session.commit()
def test_get_all_users(self):
res = ud.get_all_users(UserType.CLIENT)
self.assertEqual([self.username], res)
res = ud.get_all_users(UserType.OWNER)
self.assertEqual([], res)
@patch("lib.user_database.generate_password_hash",
Mock(return_value="password"))
def test_add_user(self):
with self.assertRaises(ValueError):
# Too Short pw
ud.add_user(UserType.OWNER, "blub", "short")
with self.assertRaises(ValueError):
# User exists
ud.add_user(UserType.CLIENT, self.username, self.password)
self.assertEqual(
None,
ud.Owner.query.filter_by(username="new-user").first())
ud.add_user(UserType.OWNER, "new-user", "new-password")
user = ud.Owner.query.filter_by(username="new-user").first()
self.assertNotEqual(
None,
user)
ud.db.session.delete(user)
ud.db.session.commit()
def test_get_user_type(self):
with self.assertRaises(TypeError):
ud.get_user_type("Bad Type")
self.assertEqual(ud.Client, ud.get_user_type(UserType.CLIENT))
self.assertEqual(ud.Owner, ud.get_user_type(UserType.OWNER))
def test_get_user(self):
with self.assertRaises(ValueError):
ud.get_user(UserType.CLIENT, "non-existing")
self.assertEqual(self.c, ud.get_user(UserType.CLIENT, self.username))
| 6,139
| 33.689266
| 77
|
py
|
parameter-exchange
|
parameter-exchange-master/src/test/__init__.py
| 0
| 0
| 0
|
py
|
|
parameter-exchange
|
parameter-exchange-master/src/test/test_base_server.py
|
#!/usr/bin/env python3
"""Test of the base server code.
Copyright (c) 2020.
Author: Erik Buchholz
Maintainer: Erik Buchholz
E-mail: buchholz@comsys.rwth-aachen.de
"""
import json
import logging
import shutil
from unittest import TestCase
from unittest.mock import patch
from flask import Flask
import lib.base_server as bserver
from lib import config
from lib.base_client import UserType
from lib.helpers import get_free_port
test_dir = config.DATA_DIR + "test/"
client = "client"
provider = "provider"
password = "password"
token = "token"
def mock_verify_pw(user_type, user, pw):
"""Mock method for password check with few overhead (without expensive
hashing)."""
return (user == client or user == provider) and pw == password
def mock_verify_token(user_type, user, tk):
"""Mock method for password check with few overhead (without expensive
hashing)."""
return (user == client or user == provider) and tk == token
class BaseServerTest(TestCase):
@classmethod
def setUpClass(cls) -> None:
"""Create mock app and remove old test data."""
logging.getLogger().setLevel(logging.ERROR)
shutil.rmtree(test_dir, ignore_errors=True)
cls.app = get_mock_app()
cls.client = cls.app.test_client()
def setUp(self) -> None:
"""Enable log-in for testing."""
self.app.config.update(LOGIN_DISABLED=False)
def test_is_redis_online(self):
self.assertFalse(bserver.is_redis_online(get_free_port()))
def test_disable_login(self):
with self.app.test_request_context('/'):
self.app.config.update(LOGIN_DISABLED=True)
self.assertTrue(bserver.verify_token('client', "user", "token"))
@patch("lib.base_server.db")
def test_verify_token(self, mock_db):
with self.app.test_request_context('/'):
mock_db.verify_token.side_effect = TypeError()
with self.assertRaises(TypeError) as e:
bserver.verify_token("wrong", "user", "token")
mock_db.verify_token.side_effect = ValueError()
self.assertFalse(bserver.verify_token(
UserType.CLIENT, "non-existing-user", "token"))
self.assertFalse(bserver.verify_token(
UserType.OWNER, "non-existing-user", "token"))
mock_db.verify_password.side_effect = None
mock_db.verify_token = mock_verify_token
self.assertFalse(bserver.verify_token(UserType.CLIENT, "client",
"wrong"))
self.assertFalse(bserver.verify_token(UserType.OWNER,
"provider", "wrong"))
self.assertTrue(bserver.verify_token(UserType.CLIENT, "client",
token))
self.assertTrue(bserver.verify_token(UserType.OWNER, "provider",
token))
@patch("lib.base_server.db")
def test_verify_client_pw(self, mock_db):
with self.app.test_request_context('/'):
mock_db.verify_password.side_effect = ValueError()
self.assertFalse(bserver.verify_client_pw("non-existing-user",
"wrong"))
mock_db.verify_password.side_effect = None
mock_db.verify_password = mock_verify_pw
self.assertFalse(bserver.verify_client_pw("client", "wrong"))
self.assertTrue(bserver.verify_client_pw("client", "password"))
self.app.config.update(LOGIN_DISABLED=True)
self.assertTrue(bserver.verify_client_pw("client", "wrong"))
self.assertTrue(bserver.verify_client_pw("non-existing-user",
"wrong"))
@patch("lib.base_server.db")
def test_verify_provider_pw(self, mock_db):
with self.app.test_request_context('/'):
mock_db.verify_password.side_effect = ValueError()
self.assertFalse(bserver.verify_provider_pw("non-existing-user",
"wrong"))
mock_db.verify_password.side_effect = None
mock_db.verify_password = mock_verify_pw
self.assertFalse(bserver.verify_provider_pw("provider", "wrong"))
self.assertTrue(bserver.verify_provider_pw("provider", "password"))
self.app.config.update(LOGIN_DISABLED=True)
self.assertTrue(bserver.verify_provider_pw("provider", "wrong"))
self.assertTrue(bserver.verify_provider_pw("non-existing-user",
"wrong"))
@patch("lib.base_server.db")
def test_gen_token(self, mock_db):
with self.app.test_request_context('/'):
# Non existing user
mock_db.generate_token.side_effect = ValueError(
'Could not generate token: No user non-existing-user exists.')
j = json.loads(bserver.gen_token(UserType.CLIENT,
"non-existing-user").data)
self.assertEqual(j['success'], False)
self.assertEqual(j['msg'], "Could not generate token: No user "
"non-existing-user exists.")
j = json.loads(bserver.gen_token(UserType.OWNER,
"non-existing-user").data)
self.assertEqual(j['success'], False)
self.assertEqual(j['msg'], "Could not generate token: No user "
"non-existing-user exists.")
# Success
mock_db.generate_token.return_value = "new_token"
mock_db.generate_token.side_effect = None
j = json.loads(bserver.gen_token(UserType.CLIENT,
"client").data)
self.assertEqual(j['success'], True)
self.assertEqual(j['token'], "new_token")
j = json.loads(bserver.gen_token(UserType.OWNER,
"provider").data)
self.assertEqual(j['success'], True)
self.assertEqual(j['token'], "new_token")
def get_mock_app() -> Flask:
"""Return a mock flask app with few overhead."""
app = Flask(__name__)
app.config.from_mapping(
TESTING=True,
DATA_DIR=test_dir,
SQLALCHEMY_DATABASE_URI=f"sqlite:///{test_dir}/{config.STORAGE_DB}",
SQLALCHEMY_TRACK_MODIFICATIONS=False
)
return app
| 6,562
| 41.070513
| 79
|
py
|
parameter-exchange
|
parameter-exchange-master/src/test/test_database.py
|
#!/usr/bin/env python3
"""Test the key server database.
Copyright (c) 2020.
Author: Erik Buchholz
Maintainer: Erik Buchholz
E-mail: buchholz@comsys.rwth-aachen.de
"""
import contextlib
import logging
import os
from unittest import TestCase
from flask import Flask
from lib import config
from lib import database
test_dir = config.DATA_DIR + '/test/'
mock_app = Flask(__name__)
mock_app.config.from_mapping(
SQLALCHEMY_DATABASE_URI=f"sqlite:///{test_dir}/{config.KEYSERVER_DB}",
SQLALCHEMY_TRACK_MODIFICATIONS=False
)
class TestKeyDB(TestCase):
def setUp(self) -> None:
logging.getLogger().setLevel(logging.FATAL)
os.makedirs(test_dir, exist_ok=True)
with contextlib.suppress(FileNotFoundError):
os.remove(test_dir + f"{config.KEYSERVER_DB}")
database.db.init_app(mock_app)
def test_add_get_task(self):
with mock_app.test_request_context():
database.db.create_all()
t1 = database.Task(user_id="test-user", user_type="bluber",
id="123", task_type="nada")
t2 = database.Task(user_id="test-user", user_type="bluber",
id="124", task_type="nada")
database.add_task("test-user", "bluber", "123", "nada")
database.add_task("test-user", "bluber", "124", "nada")
res = database.get_tasks("bluber", "test-user")
# We can't compare the whole objects because of the timestamps.
self.assertEqual(t1.user_id, res[0].user_id)
self.assertEqual(t1.user_type, res[0].user_type)
self.assertEqual(t1.id, res[0].id)
self.assertEqual(t1.task_type, res[0].task_type)
self.assertEqual(t2.user_id, res[1].user_id)
self.assertEqual(t2.user_type, res[1].user_type)
self.assertEqual(t2.id, res[1].id)
self.assertEqual(t2.task_type, res[1].task_type)
res = database.get_tasks("bluber", "test-user2")
self.assertEqual([], res)
# Check representation
self.assertEqual("<Task 123>", str(t1))
| 2,125
| 35.033898
| 75
|
py
|
parameter-exchange
|
parameter-exchange-master/src/test/test_key_server_backend.py
|
#!/usr/bin/env python3
"""Test the key server backend.
Copyright (c) 2020.
Author: Erik Buchholz
Maintainer: Erik Buchholz
E-mail: buchholz@comsys.rwth-aachen.de
"""
import logging
import os
import pickle
import shutil
from unittest import TestCase, mock
from unittest.mock import Mock, patch
import lib.config as config
import lib.key_server_backend as key_server
logging.getLogger(config.KEY_LOGNAME).setLevel(logging.ERROR)
test_dir = config.DATA_DIR + "test/"
@patch("lib.config.DATA_DIR", test_dir)
@patch("lib.config.OT_SETSIZE", 20)
class TestKeyServer(TestCase):
def setUp(self) -> None:
"""Clear test dir."""
shutil.rmtree(test_dir, ignore_errors=True)
@classmethod
def setUpClass(cls) -> None:
"""Clear test directory."""
shutil.rmtree(test_dir, ignore_errors=True)
@classmethod
def tearDownClass(cls) -> None:
"""Remove test files."""
shutil.rmtree(test_dir, ignore_errors=True)
def test_init(self):
self.assertFalse(os.path.exists(test_dir))
self.assertFalse(
os.path.exists(test_dir + config.KEY_HASHKEY_PATH))
self.assertFalse(os.path.exists(test_dir + config.KEY_ENCKEY_PATH))
k = key_server.KeyServer(test_dir) # generate new files
generated_hash_key = k._hash_key
generated_enc_keys = k._enc_keys
# check that Files are created
self.assertTrue(os.path.exists(test_dir + config.KEY_HASHKEY_PATH))
self.assertTrue(os.path.exists(test_dir + config.KEY_ENCKEY_PATH))
# Check that stored keys are same
with open(test_dir + config.KEY_HASHKEY_PATH, 'rb') as fd:
loaded_hash_key = pickle.load(fd)
with open(test_dir + config.KEY_ENCKEY_PATH, 'rb') as fd:
loaded_enc_keys = pickle.load(fd)
self.assertEqual(generated_hash_key, loaded_hash_key)
self.assertEqual(generated_enc_keys, loaded_enc_keys)
# Test loading
k2 = key_server.KeyServer(test_dir)
self.assertEqual(generated_hash_key, k2._hash_key)
def test_gen_key(self):
with self.assertRaises(ValueError):
# Keys should have byte length
key_server.KeyServer._gen_key(5)
self.assertTrue(isinstance(key_server.KeyServer._gen_key(16), bytes))
def test_generate_hash_key(self):
self.assertFalse(os.path.exists(test_dir))
k = key_server.KeyServer(test_dir)
old_hash_key = k._hash_key
k._generate_hash_key()
self.assertNotEqual(old_hash_key, k._hash_key)
with open(test_dir + config.KEY_HASHKEY_PATH, 'rb') as fd:
loaded_key = pickle.load(fd)
self.assertEqual(k._hash_key, loaded_key)
def test_generate_enc_keys(self):
self.assertFalse(os.path.exists(test_dir))
k = key_server.KeyServer(test_dir)
old_enc_keys = k._enc_keys
k._generate_enc_keys()
self.assertNotEqual(old_enc_keys, k._enc_keys)
with open(test_dir + config.KEY_ENCKEY_PATH, 'rb') as fd:
loaded_keys = pickle.load(fd)
self.assertEqual(k._enc_keys, loaded_keys)
def test_offerOT(self):
with patch("lib.config.OT_SETSIZE", 20):
total_ots = 10
port = 55555
k = key_server.KeyServer(test_dir)
m = Mock()
with mock.patch("lib.key_server_backend.PyOTSender",
return_value=m):
k.offer_ot(total_ots, port)
int_keys = [int.from_bytes(i, 'big') for i in k._enc_keys]
self.assertEqual(m.executeSame.call_args[0][0], int_keys)
with patch("lib.config.OT_SETSIZE", 1000):
with self.assertRaises(RuntimeError):
config.OT_SETSIZE = 1000
k.offer_ot(total_ots, port)
@patch("lib.key_server_backend.KeyServer.__init__", Mock(return_value=None))
def test_get_hash_key(self):
k = key_server.KeyServer(test_dir)
with self.assertRaises(RuntimeError):
k.get_hash_key()
k._hash_key = 5
self.assertEqual(5, k.get_hash_key())
| 4,111
| 35.714286
| 80
|
py
|
parameter-exchange
|
parameter-exchange-master/src/test/test_ot.py
|
#!/usr/bin/env python3
"""Test Cython Port of libOTe.
Copyright (c) 2020.
Author: Erik Buchholz
Maintainer: Erik Buchholz
E-mail: buchholz@comsys.rwth-aachen.de
"""
import multiprocessing
import sys
from unittest import TestCase, skip
from lib import config
from lib.helpers import get_free_port
config.OT_SETSIZE = 20 # noqa
# Necessary to overwrite because number of OTs problematic.
# Python Version of libOTe
sys.path.append(config.WORKING_DIR + 'cython/ot')
# noinspection PyUnresolvedReferences
from cOTInterface import PyOTSender # noqa
# noinspection PyUnresolvedReferences
from cOTInterface import PyOTReceiver # noqa
class OTTest(TestCase):
values = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0, 1, 2, 3, 4, 5, 6, 7, 8,
9]
choices = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
def setUp(self) -> None:
"""Create receiver and Sender."""
port = get_free_port()
self.recv = self.get_receiver(port, False)
self.sender = self.get_sender(port, False)
self.port = port
def ot_sender(self, port: int, tls: bool, mal_secure: bool): # pragma no cover
"""Act as OT Sending Server to test receivers."""
sender = PyOTSender()
sender.totalOTs = len(self.choices)
sender.port = port
sender.numChosenMsgs = len(self.values)
sender.serverKey = config.KEY_TLS_KEY
sender.serverCert = config.KEY_TLS_CERT
if mal_secure:
sender.maliciousSecure = True
sender.inputBitCount = 76
else:
sender.maliciousSecure = False
sender.inputBitCount = 128
sender.executeSame(self.values, tls)
def ot_receiver(self, queue, tls: bool, mal_secure: bool): # pragma no cover
"""Act as OT Reciever and write result into queue."""
recv = PyOTReceiver()
recv.totalOTs = len(self.choices)
recv.numChosenMsgs = len(self.values)
recv.hostName = "127.0.0.1"
recv.port = self.port
recv.rootCA = config.TLS_ROOT_CA
if mal_secure:
recv.maliciousSecure = True
recv.inputBitCount = 76
else:
recv.maliciousSecure = False
recv.inputBitCount = 128
res = recv.execute(self.choices, tls)
queue.put(res)
@classmethod
def get_receiver(cls, port: int, mal_secure: bool) -> PyOTReceiver:
"""Return configured receiver."""
recv = PyOTReceiver()
recv.totalOTs = len(cls.choices)
recv.numThreads = config.OT_THREADS
recv.hostName = "127.0.0.1"
recv.port = port
recv.rootCA = config.TLS_ROOT_CA
if mal_secure: # pragma no cover
recv.maliciousSecure = True
recv.inputBitCount = 76
else:
recv.maliciousSecure = False
recv.inputBitCount = 128
recv.statSecParam = config.OT_STATSECPARAM
recv.numChosenMsgs = len(cls.values)
return recv
@classmethod
def get_sender(cls, port: int, mal_secure: bool) -> PyOTSender:
"""Return configured sender."""
sender = PyOTSender()
sender.totalOTs = len(cls.choices)
sender.port = port
sender.numChosenMsgs = len(cls.values)
sender.serverKey = config.KEY_TLS_KEY
sender.serverCert = config.KEY_TLS_CERT
if mal_secure: # pragma no cover
sender.maliciousSecure = True
sender.inputBitCount = 76
else:
sender.maliciousSecure = False
sender.inputBitCount = 128
return sender
def test_kkrt_receiving_with_tls(self):
tls = True
p = multiprocessing.Process(target=self.ot_sender,
args=(self.port, tls, False))
p.start()
res = self.recv.execute(self.choices, tls)
p.join()
self.assertEqual(res, self.values[:10])
def test_kkrt_receiving_without_tls(self):
tls = False
p = multiprocessing.Process(target=self.ot_sender,
args=(self.port, tls, False))
p.start()
res = self.recv.execute(self.choices, tls)
p.join()
self.assertEqual(res, self.values[:10])
@skip("Implicitelly tested via receive above.")
def test_kkrt_sending_without_tls(self): # pragma no cover
tls = False
q = multiprocessing.Queue()
p = multiprocessing.Process(target=self.ot_receiver,
args=(q, tls, False))
p.start()
self.sender.executeSame(self.values, tls)
p.join()
result = q.get()
self.assertEqual(result, self.values[:10])
@skip("Implicitelly tested via receive above.")
def test_kkrt_sending_with_tls(self): # pragma no cover
tls = True
q = multiprocessing.Queue()
p = multiprocessing.Process(target=self.ot_receiver,
args=(q, tls, False))
p.start()
self.sender.executeSame(self.values, tls)
p.join()
result = q.get()
self.assertEqual(result, self.values[:10])
def test_oos_receiving_with_tls(self):
tls = True
p = multiprocessing.Process(target=self.ot_sender,
args=(self.port, tls, True))
p.start()
self.recv.maliciousSecure = True
with self.assertRaises(RuntimeError):
# Bad inputbitcount
self.recv.execute(self.choices, tls)
self.recv.inputBitCount = 76
res = self.recv.execute(self.choices, tls)
p.join()
self.assertEqual(res, self.values[:10])
def test_oos_receiving_without_tls(self):
tls = False
p = multiprocessing.Process(target=self.ot_sender,
args=(self.port, tls, True))
p.start()
self.recv.maliciousSecure = True
with self.assertRaises(RuntimeError):
# Bad inputbitcount
self.recv.execute(self.choices, tls)
self.recv.inputBitCount = 76
res = self.recv.execute(self.choices, tls)
p.join()
self.assertEqual(res, self.values[:10])
@skip("Implicitelly tested via receive above.")
def test_oos_sending_without_tls(self): # pragma no cover
tls = False
q = multiprocessing.Queue()
p = multiprocessing.Process(target=self.ot_receiver,
args=(q, tls, True))
p.start()
self.sender.maliciousSecure = True
with self.assertRaises(RuntimeError):
# Bad inputbitcount
self.sender.executeSame(self.values, tls)
self.sender.inputBitCount = 76
self.sender.executeSame(self.values, tls)
p.join()
result = q.get()
self.assertEqual(result, self.values[:10])
@skip("Implicitelly tested via receive above.")
def test_oos_sending_with_tls(self): # pragma no cover
tls = True
q = multiprocessing.Queue()
p = multiprocessing.Process(target=self.ot_receiver,
args=(q, tls, True))
p.start()
self.sender.maliciousSecure = True
with self.assertRaises(RuntimeError):
# Bad inputbitcount
self.sender.executeSame(self.values, tls)
self.sender.inputBitCount = 76
self.sender.executeSame(self.values, tls)
p.join()
result = q.get()
self.assertEqual(result, self.values[:10])
| 7,501
| 34.386792
| 83
|
py
|
parameter-exchange
|
parameter-exchange-master/src/test/test_keyserver_app.py
|
#!/usr/bin/env python3
"""Test key server frontend flask application.
Copyright (c) 2020.
Author: Erik Buchholz
Maintainer: Erik Buchholz
E-mail: buchholz@comsys.rwth-aachen.de
"""
import json
import logging
import os
import socket
import warnings
from datetime import datetime, timedelta
from unittest import TestCase, skip
from unittest.mock import Mock, patch
from flask import g, current_app
import key_server
from key_server import connector
from key_server.connector import TaskType
from lib import config
from lib.base_client import UserType
from lib.database import Task
from lib.helpers import generate_auth_header, to_base64
test_dir = config.DATA_DIR + "test/"
correct_user = 'correct_user'
correct_pw = "correct_pw"
correct_tk = 'correct_token'
def mock_verify_token(user_type, user, token):
"""Inexpensive mock version of verify token."""
if 'LOGIN_DISABLED' in current_app.config and current_app.config[
'LOGIN_DISABLED']:
return True # pragma no cover
return user == correct_user and token == correct_tk and user_type in [
UserType.CLIENT, UserType.OWNER
]
def mock_verify_pw(user, pw):
"""Inexpensive mock version of verify password."""
return user == correct_user and pw == correct_pw
class KeyCompTest(TestCase):
user = correct_user
tk = correct_tk
pw = correct_pw
@classmethod
def setUpClass(cls) -> None:
"""Disable logging, create dummy flask app and pre-generate
auth-headers"""
logging.getLogger().setLevel(logging.ERROR)
os.makedirs(test_dir, exist_ok=True)
test_config = {
'TESTING': True,
'DATA_DIR': test_dir
}
cls.app = key_server.create_app(test_config,
logging_level=logging.FATAL)
cls.client = cls.app.test_client()
cls.auth_header = generate_auth_header(correct_user,
correct_tk)
cls.auth_header_cor_pw = generate_auth_header(correct_user,
correct_pw)
cls.auth_header_wrong_user = generate_auth_header("wrong",
correct_tk)
cls.auth_header_wrong_pw = generate_auth_header(correct_user,
"wrong")
def setUp(self) -> None:
"""Reset Login to be enabled."""
self.app.config.update(LOGIN_DISABLED=False)
# -------------------------------------------------------------------------
# main.py------------------------------------------------------------------
@patch("key_server.main.is_redis_online", Mock(return_value=False))
@patch("key_server.main.render_template", Mock(return_value="Text"))
@patch("key_server.main.is_celery_online", Mock(return_value=False))
def test_main_true(self):
res = self.client.get('/')
self.assertEqual(res.status_code, 200)
self.assertEqual(b"Text", res.data)
@patch("key_server.main.is_redis_online", Mock(return_value=False))
@patch("key_server.main.render_template", Mock(return_value="Text"))
@patch("key_server.main.is_celery_online", Mock(return_value=False))
def test_main_false(self):
res = self.client.get('/')
self.assertEqual(res.status_code, 200)
self.assertEqual(b"Text", res.data)
@patch("key_server.celery_app")
def test_celery_status(self, m):
m.control.inspect.return_value.ping.return_value = 1
res = self.client.get('/celery')
self.assertEqual(res.status_code, 200)
self.assertEqual(b"True", res.data)
m.control.inspect.return_value.ping.return_value = None
res = self.client.get('/celery')
self.assertEqual(res.status_code, 200)
self.assertEqual(b"False", res.data)
def test_favicon(self):
warnings.filterwarnings("ignore", category=ResourceWarning)
# Bug of flask for unittests that leads to a 'ResourceWarning'
res = self.client.get('/favicon.ico')
self.assertEqual(res.status_code, 200)
# -------------------------------------------------------------------------
# connector.py-------------------------------------------------------------
@patch.object(connector, "KeyServer", Mock)
def test_get_keyserver_backend(self):
with self.app.test_request_context('/'):
self.assertFalse('keyserver' in g)
connector.get_keyserver_backend()
self.assertTrue('keyserver' in g)
@patch.object(connector, "get_keyserver_backend")
@patch.object(connector, "_add_to_hash_key_db", Mock())
def test_get_hash_key(self, m):
key = int(1).to_bytes(int(config.HASHKEY_LEN / 8), 'big')
m.return_value.get_hash_key.return_value = key
with self.app.test_request_context():
res = connector.get_hash_key(UserType.CLIENT, "client")
j = {
'success': True,
'hash_key': to_base64(key)
}
self.assertEqual(res, j)
@patch("key_server.connector.execute_ot", Mock())
@patch("key_server.database.db", Mock())
@patch("key_server.connector._add_to_key_retrieval_db", Mock())
def test_retrieve_keys(self):
port = 1213
host = "127.0.0.1"
total_ots = 10
tls = config.OT_TLS
j = {
'success': True,
'port': port,
'host': host,
'totalOTs': total_ots,
'tls': tls
}
with self.app.test_request_context('/'):
# No total OTs argument defined
res = connector.retrieve_keys(UserType.CLIENT, "client")
self.assertFalse(res['success'])
self.assertEqual(res['msg'], "No total OTs defined.")
with self.app.test_request_context(f'/?totalOTs={total_ots}'):
for user_type in [UserType.OWNER, UserType.CLIENT]:
# Normal process without randomization
self.app.config.update(KEY_RANDOMIZE_PORTS=False)
res = connector.retrieve_keys(user_type, "client")
# self.assertEqual(j, res)
# Block port 1213
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
s.bind(('', 1213))
except OSError: # pragma no cover
# Already blocked
pass
res = connector.retrieve_keys(user_type, "client")
self.assertNotEqual(res['port'], 1213)
s.close()
# Normal process without randomization
self.app.config.update(KEY_RANDOMIZE_PORTS=True)
res = connector.retrieve_keys(user_type, "client")
self.assertNotEqual(res['port'], 1213)
self.assertEqual(res['success'], True)
self.assertEqual(res['host'], host)
self.assertEqual(res['totalOTs'], total_ots)
self.assertEqual(res['tls'], tls)
@skip("Slow b/c of celery and trivial")
@patch.object(connector, "get_keyserver_backend") # pragma no cover
def test_execute_ot(self, m):
mock_backend = Mock()
m.return_value = mock_backend
port = 50000
total_ots = 10
setsize = 20
with self.app.test_request_context('/'):
config.OT_SETSIZE = setsize
connector.execute_ot.apply(args=(total_ots, port))
mock_backend.offer_ot.assert_called_once_with(total_ots, port)
# -------------------------------------------------------------------------
# client.py----------------------------------------------------------------
@patch("key_server.client.verify_token", mock_verify_token)
@patch("key_server.client.get_hash_key", Mock(return_value=1))
def test_client_verify_token(self):
self.app.config.update(LOGIN_DISABLED=False)
# No authentication info provided
res = self.client.get('/client/hash_key')
self.assertEqual(res.status_code, 401)
self.assertEqual(res.data, bytes('Unauthorized Access',
encoding='UTF-8'))
# Non existing User
auth_head = self.auth_header_wrong_user
res = self.client.get('/client/hash_key', headers=auth_head)
self.assertEqual(res.status_code, 401)
self.assertEqual(res.data, bytes('Unauthorized Access',
encoding='UTF-8'))
# Bad Token
auth_head = self.auth_header_wrong_pw
res = self.client.get('/client/hash_key', headers=auth_head)
self.assertEqual(res.status_code, 401)
self.assertEqual(res.data, bytes('Unauthorized Access',
encoding='UTF-8'))
# Correct
auth_head = self.auth_header
res = self.client.get('/client/hash_key', headers=auth_head)
self.assertEqual(res.status_code, 200)
# noinspection DuplicatedCode
@patch("key_server.client.gen_token")
def test_client_gen_token(self, m):
m.return_value = {
'success': True,
'token': 'new-token'
}
from key_server.client import client_pw
client_pw.verify_password(mock_verify_pw) # Mock PW function
# Test authentication bad PW
auth_head = self.auth_header_wrong_pw
res = self.client.get('/client/gen_token', headers=auth_head)
self.assertEqual(res.status_code, 401)
self.assertEqual(res.data, bytes('Unauthorized Access',
encoding='UTF-8'))
# Test authentication bad username
auth_head = self.auth_header_wrong_user
res = self.client.get('/client/gen_token', headers=auth_head)
self.assertEqual(res.status_code, 401)
self.assertEqual(res.data, bytes('Unauthorized Access',
encoding='UTF-8'))
# Success
auth_head = self.auth_header_cor_pw
res = self.client.get('/client/gen_token', headers=auth_head)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.json['success'], True)
self.assertEqual(res.json['token'], 'new-token')
@patch("key_server.client.verify_token", mock_verify_token)
@patch("key_server.client.get_hash_key", Mock(return_value=1))
def test_client_get_hash_key(self):
# Test authentication
auth_head = self.auth_header_wrong_pw
res = self.client.get('/client/hash_key', headers=auth_head)
self.assertEqual(res.status_code, 401)
self.assertEqual(res.data, bytes('Unauthorized Access',
encoding='UTF-8'))
# Success
correct_json = 1
auth_head = self.auth_header
res = self.client.get('/client/hash_key', headers=auth_head)
res_json = json.loads(res.data)
self.assertEqual(res_json, correct_json)
@patch("key_server.client.verify_token", mock_verify_token)
@patch("key_server.client.retrieve_keys",
Mock(return_value={'success': True}))
def test_client_retrieve_keys(self):
# Test authentication
auth_head = self.auth_header_wrong_pw
res = self.client.get('/client/key_retrieval', headers=auth_head)
self.assertEqual(res.status_code, 401)
self.assertEqual(res.data, bytes('Unauthorized Access',
encoding='UTF-8'))
# Success
auth_head = self.auth_header
res = self.client.get('/client/key_retrieval', headers=auth_head)
self.assertEqual(res.status_code, 200)
@patch.object(connector.Tasks['OT'], "AsyncResult")
@patch("key_server.connector.render_template", Mock())
def test_status_overview(self, om):
om.return_value.info = "blub"
om.return_value.state = "FAILURE"
om.return_value.id = "a"
date1 = datetime.now()
date2 = (datetime.now() + timedelta(days=1))
for user_type in [UserType.CLIENT, UserType.OWNER]:
mock_tasks = [
Task(id='a', user_id="userA", task_type=TaskType.OT,
user_type=user_type, timestamp=date1),
Task(id='b', user_id="userA", task_type=TaskType.OT,
user_type="client", timestamp=date2),
Task(id='c', user_id="userA", task_type="Bad Task",
user_type=user_type, timestamp=date1)
]
with patch("lib.database.get_tasks",
Mock(return_value=mock_tasks)):
res = [
{
'id': 'b',
'status': "FAILURE",
'type': TaskType.OT,
'time': date2,
'error': "blub",
'task_url': f"/{user_type}/OT/status/a",
'kill_url': f"/{user_type}/OT/kill/a"
},
{
'id': 'a',
'status': "FAILURE",
'type': TaskType.OT,
'time': date1,
'error': "blub",
'task_url': f"/{user_type}/OT/status/a",
'kill_url': f"/{user_type}/OT/kill/a"
}
]
with self.app.test_request_context('/'):
with self.assertRaises(ValueError):
# Bad User Type
connector.status_overview("bad_type")
with self.assertRaises(ValueError):
# Bad Task Type
connector.status_overview(user_type)
del (mock_tasks[2])
connector.status_overview(user_type)
self.assertEqual(res, g.tasks)
@patch.object(connector.Tasks['OT'], "AsyncResult")
@patch.object(connector, "get_keyserver_backend", Mock())
def test_status(self, m):
# Bad type
self.assertIn("404 Not Found", connector.task_status("bad", "a")[0])
self.assertEqual(404, connector.task_status("bad", "a")[1])
m.return_value.id = "a"
m.return_value.info = "blub"
for t in [TaskType.OT]:
m.return_value.state = "PENDING"
r = connector.task_status(t, "a")
self.assertEqual(r, {'state': "PENDING"})
m.return_value.state = "SUCCESS"
r = connector.task_status(t, "a")
self.assertEqual(r, {'state': "SUCCESS"})
m.return_value.state = "FAILURE"
r = connector.task_status(t, "a")
self.assertEqual(r,
{'state': "FAILURE", 'status': "blub"})
@patch.object(connector.Tasks['OT'], "AsyncResult")
@patch.object(connector, "get_keyserver_backend", Mock())
def test_kill(self, m):
# Bad type
self.assertIn("404 Not Found", connector.kill_task("bad", "a")[0])
self.assertEqual(404, connector.kill_task("bad", "a")[1])
m.return_value.id = "a"
m.return_value.info = "blub"
for t in [TaskType.OT]:
m.return_value.state = "PENDING"
r = connector.kill_task(t, "a")
self.assertEqual(
{'success': False, 'msg': 'Task not running.'}, r
)
m.return_value.state = "STARTED"
r = connector.kill_task(t, "a")
self.assertEqual(
{'success': True, 'msg': None}, r
)
# -------------------------------------------------------------------------
# provider.py--------------------------------------------------------------
@patch("key_server.provider.verify_token", mock_verify_token)
@patch("key_server.provider.get_hash_key", Mock(return_value=1))
def test_provider_verify_token(self):
self.app.config.update(LOGIN_DISABLED=False)
# No authentication info provided
res = self.client.get('/provider/hash_key')
self.assertEqual(res.status_code, 401)
self.assertEqual(res.data, bytes('Unauthorized Access',
encoding='UTF-8'))
# Non existing User
auth_head = self.auth_header_wrong_user
res = self.client.get('/provider/hash_key', headers=auth_head)
self.assertEqual(res.status_code, 401)
self.assertEqual(res.data, bytes('Unauthorized Access',
encoding='UTF-8'))
# Bad Token
auth_head = self.auth_header_wrong_pw
res = self.client.get('/provider/hash_key', headers=auth_head)
self.assertEqual(res.status_code, 401)
self.assertEqual(res.data, bytes('Unauthorized Access',
encoding='UTF-8'))
# Correct
auth_head = self.auth_header
res = self.client.get('/provider/hash_key', headers=auth_head)
self.assertEqual(res.status_code, 200)
# noinspection DuplicatedCode
@patch("key_server.provider.gen_token")
def test_provider_gen_token(self, m):
m.return_value = {
'success': True,
'token': 'new-token'
}
from key_server.provider import provider_pw
provider_pw.verify_password(mock_verify_pw) # Mock PW function
# Test authentication bad PW
auth_head = self.auth_header_wrong_pw
res = self.client.get('/provider/gen_token', headers=auth_head)
self.assertEqual(res.status_code, 401)
self.assertEqual(res.data, bytes('Unauthorized Access',
encoding='UTF-8'))
# Test authentication bad username
auth_head = self.auth_header_wrong_user
res = self.client.get('/provider/gen_token', headers=auth_head)
self.assertEqual(res.status_code, 401)
self.assertEqual(res.data, bytes('Unauthorized Access',
encoding='UTF-8'))
# Success
auth_head = self.auth_header_cor_pw
res = self.client.get('/provider/gen_token', headers=auth_head)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.json['success'], True)
@patch("key_server.provider.verify_token", mock_verify_token)
@patch("key_server.provider.get_hash_key", Mock(return_value=1))
def test_provider_get_hash_key(self):
# Test authentication
auth_head = self.auth_header_wrong_pw
res = self.client.get('/provider/hash_key', headers=auth_head)
self.assertEqual(res.status_code, 401)
self.assertEqual(res.data, bytes('Unauthorized Access',
encoding='UTF-8'))
# Success
correct_json = 1
auth_head = self.auth_header
res = self.client.get('/provider/hash_key', headers=auth_head)
res_json = json.loads(res.data)
self.assertEqual(res_json, correct_json)
@patch("key_server.provider.verify_token", mock_verify_token)
def test_provider_retrieve_keys(self):
# Test authentication
auth_head = self.auth_header_wrong_pw
res = self.client.get('/provider/key_retrieval', headers=auth_head)
self.assertEqual(res.status_code, 401)
self.assertEqual(res.data, bytes('Unauthorized Access',
encoding='UTF-8'))
# Success
with patch("key_server.provider.retrieve_keys") as m:
m.return_value = {'success': True}
auth_head = self.auth_header
res = self.client.get('/provider/key_retrieval', headers=auth_head)
self.assertEqual(res.status_code, 200)
@patch("key_server.client.status_overview")
@patch("key_server.provider.status_overview")
def test_user_status(self, m, m2):
# client and Provider
m.return_value = "Test"
m2.return_value = "Test"
from key_server.client import client_pw
from key_server.provider import provider_pw
client_pw.verify_password(mock_verify_pw) # Mock PW function
provider_pw.verify_password(mock_verify_pw)
for user_type in [UserType.CLIENT, UserType.OWNER]:
# Test authentication
auth_head = self.auth_header_wrong_pw
res = self.client.get(f'/{user_type}/status', headers=auth_head)
self.assertEqual(res.status_code, 401)
self.assertEqual(res.data, bytes('Unauthorized Access',
encoding='UTF-8'))
# Success
auth_head = self.auth_header_cor_pw
res = self.client.get(f'/{user_type}/status', headers=auth_head)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.data, b"Test")
@patch("key_server.client.task_status")
@patch("key_server.provider.task_status")
def test_task_status(self, m, m2):
# client and Provider
d = {'test': 'test'}
m.return_value = d
m2.return_value = d
from key_server.client import client_pw
from key_server.provider import provider_pw
client_pw.verify_password(mock_verify_pw) # Mock PW function
provider_pw.verify_password(mock_verify_pw)
for user_type in [UserType.CLIENT, UserType.OWNER]:
# Test authentication
auth_head = self.auth_header_wrong_pw
res = self.client.get(f'/{user_type}/{TaskType.OT}/status/a',
headers=auth_head)
self.assertEqual(401, res.status_code)
self.assertEqual(res.data, bytes('Unauthorized Access',
encoding='UTF-8'))
# Success
auth_head = self.auth_header_cor_pw
res = self.client.get(f'/{user_type}/{TaskType.OT}/status/a',
headers=auth_head)
self.assertEqual(200, res.status_code)
self.assertEqual(d, res.json)
@patch("key_server.client.kill_task")
@patch("key_server.provider.kill_task")
def test_task_kill(self, m, m2):
# client and Provider
d = {'test': 'test'}
m.return_value = d
m2.return_value = d
from key_server.client import client_pw
from key_server.provider import provider_pw
client_pw.verify_password(mock_verify_pw) # Mock PW function
provider_pw.verify_password(mock_verify_pw)
for user_type in [UserType.CLIENT, UserType.OWNER]:
# Test authentication
auth_head = self.auth_header_wrong_pw
res = self.client.get(f'/{user_type}/{TaskType.OT}/kill/a',
headers=auth_head)
self.assertEqual(res.status_code, 401)
self.assertEqual(res.data, bytes('Unauthorized Access',
encoding='UTF-8'))
# Success
auth_head = self.auth_header_cor_pw
res = self.client.get(f'/{user_type}/{TaskType.OT}/kill/a',
headers=auth_head)
self.assertEqual(200, res.status_code)
self.assertEqual(d, res.json)
@patch("key_server.connector.HashKeyRetrieval", return_value="transaction")
@patch("key_server.connector.get_user", Mock(return_value="user"))
@patch("key_server.connector.db")
def test__add_to_hash_key_db(self, db, h):
with self.assertRaises(ValueError):
connector._add_to_hash_key_db("bad_type", "blub")
# Client
connector._add_to_hash_key_db(UserType.CLIENT, "blub")
h.assert_called_once_with(client="user")
db.session.add.assert_called_once()
self.assertEqual("transaction", db.session.add.call_args[0][0])
db.session.commit.assert_called_once()
h.reset_mock()
db.reset_mock()
# Provider
connector._add_to_hash_key_db(UserType.OWNER, "blub")
h.assert_called_once_with(provider="user")
db.session.add.assert_called_once()
self.assertEqual("transaction", db.session.add.call_args[0][0])
db.session.commit.assert_called_once()
@patch("key_server.connector.KeyRetrieval", return_value="transaction")
@patch("key_server.connector.get_user", Mock(return_value="user"))
@patch("key_server.connector.db")
def test__add_to_key_retrieval_db(self, db, h):
with self.assertRaises(ValueError):
connector._add_to_key_retrieval_db("bad_type", "blub", 5)
# Client
connector._add_to_key_retrieval_db(UserType.CLIENT, "blub", 5)
h.assert_called_once_with(client="user", retrieved_keys=5)
db.session.add.assert_called_once()
self.assertEqual("transaction", db.session.add.call_args[0][0])
db.session.commit.assert_called_once()
h.reset_mock()
db.reset_mock()
# Provider
connector._add_to_key_retrieval_db(UserType.OWNER, "blub", 5)
h.assert_called_once_with(provider="user", retrieved_keys=5)
db.session.add.assert_called_once()
self.assertEqual("transaction", db.session.add.call_args[0][0])
db.session.commit.assert_called_once()
| 25,395
| 42.635739
| 79
|
py
|
parameter-exchange
|
parameter-exchange-master/src/test/test_helpers.py
|
#!/usr/bin/env python3
"""Test class for helper functions.
Copyright (c) 2020.
Author: Erik Buchholz
Maintainer: Erik Buchholz
E-mail: buchholz@comsys.rwth-aachen.de
"""
import os
import shutil
import socket
import ssl
from unittest import TestCase
import multiprocessing as mp
import lib.config as config
import lib.helpers as helpers
class HelpersTest(TestCase):
# noinspection DuplicatedCode
def test_create_data_dir(self):
testdir = config.DATA_DIR + '/test/'
shutil.rmtree(testdir, ignore_errors=True)
self.assertFalse(os.path.exists(testdir))
self.assertFalse(os.path.exists(testdir + '/logs/'))
helpers.create_data_dir(testdir)
self.assertTrue(os.path.exists(testdir))
self.assertTrue(os.path.exists(testdir + '/logs/'))
def test_parse_list(self):
l1_s = "[1,2,3,4,5]"
l1 = [1., 2., 3., 4., 5.]
l2_s = "[1,2.4123,3.45, 4.884231,5]\n"
l2 = [1., 2.4123, 3.45, 4.884231, 5.]
self.assertEqual(l1, helpers.parse_list(l1_s))
self.assertEqual(l2, helpers.parse_list(l2_s))
def test_port_free(self):
self.assertTrue(helpers.port_free(60000))
def test_port_free_false(self):
for port in range(50000, 65535):
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
s.bind(('', port))
except OSError as e: # pragma no cover
if e.errno is 98 or e.errno is 48:
s.close()
else:
raise e
self.assertFalse(helpers.port_free(port))
s.close()
break
with self.assertRaises(TypeError):
helpers.port_free("string")
def test_get_tls_context(self):
ctx = helpers.get_tls_context(config.STORAGE_TLS_CERT,
config.STORAGE_TLS_KEY)
self.assertTrue(isinstance(ctx, ssl.SSLContext))
def test_generate_auth_header(self):
self.assertEqual(helpers.generate_auth_header("user", "pwd"),
[('Authorization', 'Basic dXNlcjpwd2Q=')])
def test_base64(self):
b = b'Test'
self.assertEqual(b,
helpers.from_base64(helpers.to_base64(b)))
def test_keys_to_int(self):
ints = [1, 2, 3, 4, 5, 6, 7, 8]
self.assertEqual(
ints,
helpers.keys_to_int(helpers.encryption_keys_from_int(ints))
)
def test_print_time(self):
t = 10.0 / 1000
self.assertEqual(
"10.0ms",
helpers.print_time(t)
)
t = 5.555
self.assertEqual(
"5.55s",
helpers.print_time(t)
)
t = 340.600
self.assertEqual(
"5min 40.6s",
helpers.print_time(t)
)
t = 8113.000
self.assertEqual(
"2h 15min 13.0s",
helpers.print_time(t)
)
def test_queue_to_list(self):
q = mp.Queue()
q.put("A")
q.put("B")
q.put("C")
self.assertEqual(
["A", "B", "C"],
helpers.queue_to_list(q)
)
def test_get_temp_file(self):
tmp = helpers.get_temp_file()
self.assertIn(
config.TEMP_DIR,
tmp
)
| 3,336
| 27.279661
| 71
|
py
|
parameter-exchange
|
parameter-exchange-master/src/lib/base_server.py
|
#!/usr/bin/env python3
"""This file contains base server functionalities shared by both server
components.
Copyright (c) 2020.
Author: Erik Buchholz
Maintainer: Erik Buchholz
E-mail: buchholz@comsys.rwth-aachen.de
"""
import logging
import flask.wrappers
import redis
from flask import jsonify, current_app as app
from flask_httpauth import HTTPBasicAuth
from lib.base_client import UserType
import lib.user_database as db
log: logging.Logger = logging.getLogger(__name__)
def is_redis_online(port: int) -> bool:
"""Return true if redis is online."""
rs = redis.Redis("localhost", port)
try:
rs.ping()
return True # pragma no cover
except redis.ConnectionError:
log.info("Redis offline.")
return False
def verify_token(user_type: str, user: str, token: str) -> bool:
"""Verify if the get_token is correct for the given user and return the
username if so or raise an error otherwise."""
if 'LOGIN_DISABLED' in app.config and app.config['LOGIN_DISABLED']:
return True
try:
if not db.verify_token(user_type, user, token):
return False
except ValueError as e:
log.warning(str(e))
return False
return True
client_pw = HTTPBasicAuth()
provider_pw = HTTPBasicAuth()
@client_pw.verify_password
def verify_client_pw(user: str, pw: str) -> bool:
"""
Verify that the credentials match those in the database.
:param user: Username
:param pw: Password
:return: Authentication result.
"""
if 'LOGIN_DISABLED' in app.config and app.config['LOGIN_DISABLED']:
return True
try:
return db.verify_password(UserType.CLIENT, user, pw)
except ValueError:
return False
@provider_pw.verify_password
def verify_provider_pw(user: str, pw: str) -> bool:
"""
Verify that the credentials match those in the database.
:param user: Username
:param pw: Password
:return: Authentication result.
"""
if 'LOGIN_DISABLED' in app.config and app.config['LOGIN_DISABLED']:
return True
try:
return db.verify_password(UserType.OWNER, user, pw)
except ValueError:
return False
def gen_token(user_type: str, user: str) -> flask.wrappers.Response:
"""
Generate and return a token for the given User.
:param user_type: UserType.CLIENT or UserType.OWNER
:param user: Username
:return: A Jsonify response that can directly be returned
"""
log.debug('Token requested.')
try:
resp = jsonify(
{'success': True,
'token': db.generate_token(user_type, user)
})
except ValueError as e:
log.warning("gen_token: " + str(e))
resp = jsonify(
{
'success': False,
'msg': str(e)
}
)
return resp
| 2,852
| 25.915094
| 75
|
py
|
parameter-exchange
|
parameter-exchange-master/src/lib/storage_server_backend.py
|
#!/usr/bin/env python3
"""This file contains the implementation of the platform's storage server
component
Copyright (c) 2020.
Author: Erik Buchholz
Maintainer: Erik Buchholz
E-mail: buchholz@comsys.rwth-aachen.de
"""
import logging
import os
import sys
from typing import List, Iterable, Tuple
from pybloomfilter import BloomFilter
import lib.config as config
from lib.base_client import UserType
from lib.helpers import from_base64
from lib.record import hash_to_index
from lib.user_database import Owner, Client, get_user
from storage_server.storage_database import StoredRecord, db, \
BillingInfo, RecordRetrieval
sys.path.append(config.WORKING_DIR + 'cython/psi')
# Python Version of libPSIe
# noinspection PyUnresolvedReferences
from cPSIInterface import PyPSISender # noqa
# Log configuration
log: logging.Logger = logging.getLogger(__name__)
class StorageServer:
"""Implements the storage server of the platform."""
_bloom: BloomFilter = None
_data_dir: str = config.DATA_DIR
@property
def bloom(self):
"""
Return bloom filter containing the record hashes (as base64 encoding).
Needs to be a property to avoid concurrency problems with mutltiple
threads. Initialize with database contents it no bloom filter exists.
:return: Bloom Filter
"""
if self._bloom is None:
# Initialize
bloom_file = self.data_dir + config.BLOOM_FILE
if os.path.isfile(bloom_file):
self._bloom = BloomFilter.open(
filename=bloom_file)
log.info(f"Bloom Filter loaded from file {bloom_file}!")
else:
# new Bloom filter
self._initialize_bloom_filter()
return self._bloom
def __init__(self, data_dir=config.DATA_DIR) -> None:
"""Set data directory and create it, if it does not exist."""
self.data_dir = data_dir
os.makedirs(data_dir, exist_ok=True)
def _initialize_bloom_filter(self) -> None:
"""
Create new bloom filter and add all values from storage DB.
"""
bloom_file = self.data_dir + config.BLOOM_FILE
self._bloom = BloomFilter(config.BLOOM_CAPACITY,
config.BLOOM_ERROR_RATE,
bloom_file)
records = StoredRecord.query.all()
for r in records:
self._bloom.add(r.hash)
log.info(f"Created new Bloom Filter @ {bloom_file}.")
def store_record(self, hash_val: str, ciphertext: str, owner: str) -> None:
"""
Store the record with the given attributes into the DB.
:param hash_val: Base64 of record's long hash
:param ciphertext: json.dumps(ciphertext-dict)
:param owner: owner of record as string
:return:
"""
log.debug("Store record called.")
records = [(hash_val, ciphertext, owner)]
self.batch_store_records_db(records)
self.batch_store_records_bloom(records)
log.info(f"Stored record: {hash_val} - {ciphertext} of {owner}")
@staticmethod
def batch_store_records_db(records: List[Iterable[str]]) -> None:
"""Store all records in the list into the database.
:param records: List of records, each represented as a tuple of the
base64 encoded long hash, the ciphertext as json.dumps and the
owner as string:
[
('Base64(HASH-1)', 'json.dumps(CIPHERTEXT-1)', 'owner-1'),
('Base64(HASH-2)', 'json.dumps(CIPHERTEXT-2)', 'owner-2'),
...
]
"""
log.debug("Batch store record DB called.")
recs = [
StoredRecord(hash=h, ciphertext=c, owner=o)
for (h, c, o) in records
]
for r in recs:
db.session.add(r)
db.session.commit()
log.info("Successfully stored records into DB.")
def batch_store_records_bloom(self, records: List[Iterable[str]]) -> None:
"""Store all records in the list into the bloom filter.
:param records: List of records, each represented as a tuple of the
base64 encoded long hash, the ciphertext as json.dumps and the
owner as string:
[
('Base64(HASH-1)', 'json.dumps(CIPHERTEXT-1)', 'owner-1'),
('Base64(HASH-2)', 'json.dumps(CIPHERTEXT-2)', 'owner-2'),
...
]
"""
log.debug("Batch store record Bloom called.")
for (hash_val, record, owner) in records:
self.bloom.add(hash_val)
@staticmethod
def get_record(hash_base64: str,
client: str) -> List[Tuple[str, str]]:
"""Retrieve records with defined base64 encoded hash. There might be
multiple ciphertexts with the same hash.
:param hash_base64: Base64 encoded hash
:param client: Username of client requesting the records
:return: List of matching records, each represented as a tuple with
two elements ['Base64(HASH)', 'json.dumps(CIPHERTEXT)'].
I.e. an example returned value might be:
[
['Base64(HASH)', 'json.dumps(CIPHERTEXT-1)'],
['Base64(HASH)', 'json.dumps(CIPHERTEXT-2)']
]
"""
log.debug("Get record called.")
res = StorageServer.batch_get_records([hash_base64], client)
if not res:
raise ValueError(f"No record for hash exists: '{hash_base64}'")
else:
return res
@staticmethod
def _add_to_billing_db(records: List[StoredRecord], client: Client,
transaction: RecordRetrieval):
"""
Compute and store billing information.
:param transaction: The corresponding transaction
:param records: The records retrieved from the database
:param client: The client performing the query
:return: None
"""
# Count per owner
owners = {}
for r in records:
if r.owner in owners:
owners[r.owner] += 1
else:
owners[r.owner] = 1
# Add to billing db
for owner in owners:
o: Owner = Owner.query.filter_by(username=owner).first()
if o is None:
raise ValueError(f"Owner '{o}'does not exist!")
s = BillingInfo(provider=o,
count=owners[o.username],
client=client,
transaction=transaction)
db.session.add(s)
db.session.commit()
@staticmethod
def _add_to_transaction_db(records: List[StoredRecord], client: Client,
hashes: List[str]) -> RecordRetrieval:
"""
Create a transaction and store the number of encryption keys the
client would have to retrieve from the key server.
:param records: The records retrieved from the database
:param client: The client performing the query
:param hashes: The hashes send by the client
:return: The created RecordRetrieval
"""
bh = 0
# Number of encryption keys for all requested hashes
br = 0
# Number of encryption keys fo all returned records
ot_indices = []
for r in records:
ind = hash_to_index(from_base64(r.hash), config.OT_INDEX_LEN)
if ind not in ot_indices:
br += 1
ot_indices.append(ind)
ot_indices.clear()
for h in hashes:
ind = hash_to_index(from_base64(h), config.OT_INDEX_LEN)
if ind not in ot_indices:
bh += 1
ot_indices.append(ind)
t = RecordRetrieval(
client=client,
enc_keys_by_hash=bh,
enc_keys_by_records=br
)
db.session.add(t)
db.session.commit()
return t
@staticmethod
def batch_get_records(hashes: List[str],
client: str) -> List[Tuple[str, str]]:
"""
Return all records matching at least one hash in the list.
Store access into billing database.
:param hashes: List of base64 encoded hashes:
['Base64(HASH-1)', 'Base64(HASH-2)', 'Base64(HASH-3)']
:param client: Username of client requesting the records
:return: List of matching records, each represented as a tuple with
two elements ['Base64(HASH)', 'json.dumps(CIPHERTEXT)'].
I.e. an example returned value might be:
[
('Base64(HASH-1)', 'json.dumps(CIPHERTEXT-1)'),
('Base64(HASH-1)', 'json.dumps(CIPHERTEXT-2)'),
('Base64(HASH-2)', 'json.dumps(CIPHERTEXT-3)')
]
( Multiple ciphertexts per hash possible)
"""
res: List[StoredRecord] = StoredRecord.query.filter(
StoredRecord.hash.in_(hashes)).all()
c = get_user(UserType.CLIENT, client)
t = StorageServer._add_to_transaction_db(res, c, hashes)
StorageServer._add_to_billing_db(res, c, t)
return [
(r.hash, r.ciphertext)
for r in res
]
def get_bloom_filter(self) -> bytes:
"""
Return a base64 encoding of the server's bloom filter.
:return: bytes: base64 encoding of bloom filter
"""
return self.bloom.to_base64()
@staticmethod
def get_all_record_psi_hashes() -> List[int]:
"""
Return the PSI hashes for all stored records.
:return: List of PSI Indices as Ints
"""
records = StoredRecord.query.all()
res = [get_psi_index(r.hash) for r in records]
return res
@staticmethod
def offer_psi(setSize: int = config.PSI_SETSIZE,
port: int = config.PSI_PORT,
scheme: str = config.PSI_SCHEME) -> None:
"""
Initialize an PSI to transit the Indices.
:param setSize: Size of the sets compared with PSI
:param port: For PSI server to listen on
:param scheme: Which PSI to use
:return: None
"""
sender = PyPSISender()
records = StorageServer.get_all_record_psi_hashes()
log.debug(f"Server set without dummies: {str(records)}")
if len(records) > setSize:
raise RuntimeError("More records than PSI Setsize allows.")
# Add unique dummies
dummy = config.PSI_DUMMY_START_SERVER
while len(records) < setSize:
records.append(dummy)
dummy += 1
sender.statSecParam = config.PSI_STATSECPARAM
sender.setSize = setSize
sender.hostName = config.PSI_HOST
sender.port = port
sender.numThreads = config.PSI_THREADS
sender.tls = config.PSI_TLS
sender.serverCert = config.KEY_TLS_CERT
sender.serverKey = config.KEY_TLS_KEY
log.info(
f"Listening for PSI connection on {sender.hostName}:{sender.port}."
f"TLS: {sender.tls}")
sender.execute(scheme, records)
log.debug(f"PSI done. Thread for port {sender.port} terminating.")
def get_psi_index(long_hash_base64: str) -> int:
"""
Convert the base64 encoded long hash into the corresponding
PSI Index.
:param long_hash_base64: Long hash in Base64 encoding
:return: PSI Index as Integer
"""
long_hash_bytes: bytes = from_base64(long_hash_base64)
psi_index = hash_to_index(long_hash_bytes, config.PSI_INDEX_LEN)
return psi_index
| 11,645
| 35.280374
| 79
|
py
|
parameter-exchange
|
parameter-exchange-master/src/lib/key_server_backend.py
|
#!/usr/bin/env python3
"""This file contains the implementation of the platform's key server
component
Copyright (c) 2020.
Author: Erik Buchholz
Maintainer: Erik Buchholz
E-mail: buchholz@comsys.rwth-aachen.de
"""
import logging
import pickle
import secrets
import sys
from os import path
from typing import List
from lib import config
from lib.helpers import create_data_dir, keys_to_int
sys.path.append(config.WORKING_DIR + 'cython/ot')
# Python Version of libOTe
# noinspection PyUnresolvedReferences
from cOTInterface import PyOTSender # noqa
log: logging.Logger = logging.getLogger(__name__)
class KeyServer:
"""Implements backend key server functionality."""
_hash_key: bytes = None
_enc_keys: List[bytes] = []
def __init__(self, data_dir=config.DATA_DIR) -> None:
"""
Initialize key server.
Either load keys from file or generate keys for all possible indices.
:param data_dir: The directory containing the key files.
"""
self.data_dir = data_dir
create_data_dir(data_dir)
if path.exists(self.data_dir + config.KEY_HASHKEY_PATH):
log.info(
"Loading hash key from file: " + self.data_dir +
config.KEY_HASHKEY_PATH)
with open(self.data_dir + config.KEY_HASHKEY_PATH, "rb") as fd:
self._hash_key = pickle.load(fd)
else:
log.info("No key-file found. Generating hash key...")
self._generate_hash_key()
if path.exists(self.data_dir + config.KEY_ENCKEY_PATH):
log.info(
"Loading encryption keys from file: " + self.data_dir +
config.KEY_ENCKEY_PATH)
with open(self.data_dir + config.KEY_ENCKEY_PATH, "rb") as fd:
self._enc_keys = pickle.load(fd)
else:
log.info(
f"No key-file found. Generating {config.OT_SETSIZE} "
f"encryption keys into {self.data_dir + config.KEY_ENCKEY_PATH}.")
self._generate_enc_keys()
super().__init__()
log.info("Key Server initialization completed.")
def get_hash_key(self) -> bytes:
"""
Return hash key.
:return: Hash Key
"""
if self._hash_key is None:
raise RuntimeError("No hash key has been generated yet!")
return self._hash_key
@staticmethod
def _gen_key(bit_length: int) -> bytes:
"""
Return a cryptographically random key with given length.
:param bit_length: Length of key [Multiple of 8]
:return: A key with given bit length
"""
if bit_length % 8 != 0:
raise ValueError("Bit length of keys to generate should be "
"multiple of 8/ 1 Byte.")
return secrets.token_bytes(bit_length // 8)
def _generate_hash_key(self) -> None:
"""Generate a hash key and store to file."""
self._hash_key = self._gen_key(config.HASHKEY_LEN)
# Store to file
with open(self.data_dir + config.KEY_HASHKEY_PATH, "wb") as fd:
pickle.dump(self._hash_key, fd)
def _generate_enc_keys(self) -> None:
"""Generate one encryption key for each possible OT
index and store to file."""
self._enc_keys = []
for _ in range(config.OT_SETSIZE):
self._enc_keys.append(self._gen_key(config.ENCKEY_LEN))
# Store to file
with open(self.data_dir + config.KEY_ENCKEY_PATH, "wb") as fd:
pickle.dump(self._enc_keys, fd)
def offer_ot(self, total_ots: int, port: int = config.OT_PORT) -> None:
"""
Initialize an OT to transit the keys.
:param total_ots: Number of OTs to perform
:param port: Port to use for OT Server
:return: None
"""
sender = PyOTSender()
sender.totalOTs = total_ots
if len(self._enc_keys) != config.OT_SETSIZE:
raise RuntimeError(
f"Key Server has {len(self._enc_keys)} keys but OT setsize is "
f"{config.OT_SETSIZE}.")
sender.numChosenMsgs = len(self._enc_keys)
sender.numThreads = config.OT_THREADS
sender.statSecParam = config.OT_STATSECPARAM
sender.inputBitCount = config.OT_INPUT_BIT_COUNT
sender.maliciousSecure = config.OT_MAL_SECURE
sender.hostName = config.OT_HOST
sender.port = port
sender.serverCert = config.KEY_TLS_CERT
sender.serverKey = config.KEY_TLS_KEY
# keys to bytes
keys = keys_to_int(self._enc_keys)
log.info(
f"Listening for OT connection on {sender.hostName}:{port}. TLS: "
f"{config.OT_TLS}")
sender.executeSame(keys, config.OT_TLS)
log.debug(f"OTs done. Thread for port {port} terminating.")
| 4,830
| 35.323308
| 82
|
py
|
parameter-exchange
|
parameter-exchange-master/src/lib/similarity_metrics.py
|
"""This module contains various similarity metrics.
Copyright (c) 2020.
Author: Erik Buchholz
Maintainer: Erik Buchholz
E-mail: buchholz@comsys.rwth-aachen.de
"""
import copy
import logging
import math
import re
from abc import ABC, abstractmethod
from collections.abc import Iterator
from typing import List, Iterable, Sized
from lib import config as cnf
from lib.record import Record, round_s, get_power
log: logging.Logger = logging.getLogger(__name__)
class SimilarityMetricIterator(Iterator, ABC): # pragma no cover
"""Base class for similarity metrics."""
_rounding_vec: List[int] = None
id_len: int = None
def __iter__(self):
return self
def __init__(self, target: List[float],
*args,
rounding_vec: List[int] = None,
record_id_length: int = None):
if rounding_vec is None:
rounding_vec = cnf.ROUNDING_VEC
if record_id_length is None:
record_id_length = cnf.RECORD_ID_LENGTH
if len(rounding_vec) != record_id_length:
raise ValueError("Rounding Vector has to be as long as ID length:"
f"{len(rounding_vec)} vs {record_id_length}")
self._rounding_vec = rounding_vec
self.id_len = record_id_length
@abstractmethod
def split(self, n: int, j: int = 0) -> List[Iterator]:
"""
Try to split iterator into at least (!) n smaller, as equal sized as
possible, iterators if possible and into the maximal number otherwise.
:param n: # Iterators to generate
:param j: Index to split on
:return: List of created Iterators
"""
pass
@abstractmethod
def __len__(self):
pass
def __copy__(self):
return copy.deepcopy(self)
class RecordIterator(Iterator):
"""
Iterator encapsulating a similarity metric iterator but returning record
objects.
"""
def __init__(self, it: Iterable, hash_key: bytes):
# noinspection PyTypeChecker
self._iterator = iter(it)
if isinstance(it, Sized):
self._len = len(it)
else:
self._len = 0
self._hash_key = hash_key
def __iter__(self):
return self
def __next__(self) -> Record:
vec = next(self._iterator)
r = Record(vec, hash_key=self._hash_key)
return r
def __len__(self) -> int:
if isinstance(self._iterator, Sized):
return len(self._iterator)
return self._len
class OffsetIterator(SimilarityMetricIterator, ABC):
"""Common base clase for offset metrics."""
def __repr__(self) -> str:
return f"<OffsetIterator from {self.min} to {self.max}>"
end = False
def split(self, n: int, j: int = 0) -> List[Iterator]:
"""
Try to split iterator into at least n smaller, as equal sized as
possible, iterators if possible and into the maximal number otherwise.
:param n: # Iterators to generate
:param j: Index to split on
:return: List of created Iterators
"""
if self.cur_vec != self.min or self.end:
raise RuntimeError("Cannot call split on used iterator!")
iterators = []
inc = self.increments[j]
diff = (self.max[j] - self.min[j]) / n
diff = max(diff / inc, 1)
for i in range(n):
if i == 0:
it = copy.deepcopy(self)
it.min[j] = round_s(it.min[j], self._rounding_vec[j])
it.max[j] = round_s(it.min[j] + int(diff) * inc,
self._rounding_vec[j])
iterators.append(it)
elif round_s(iterators[-1].max[j] + inc,
self._rounding_vec[j]) <= self.max[
j]:
it = copy.deepcopy(self)
it.min[j] = round_s(iterators[-1].max[j] + inc,
self._rounding_vec[j])
it.max[j] = round_s(
self.min[j] + int((i + 1) * diff) * inc,
self._rounding_vec[j])
iterators.append(it)
else: # pragma no cover
break
# Last iterator has to go till end
iterators[-1].max[j] = self.max[j]
for it in iterators:
it.cur_vec = it.min[:]
final_iterators = []
if len(iterators) < n and j < self.id_len - 1:
# Try to split on next index
sub_num = int(math.ceil((n - len(iterators)) / len(iterators)))
for it in iterators:
final_iterators.extend(it.split(sub_num, j + 1))
else:
final_iterators = iterators
if j == 0:
log.debug(f"Final split into {len(final_iterators)} iterators.")
# Debug
# for it in final_iterators:
# print(it.min, it.max)
return final_iterators
@abstractmethod # min and max are dependent on concrete implementation
def __init__(self, target: List[float], offsets: List[float],
rounding_vec: List[int] = None,
record_id_length: int = None):
super().__init__(target, rounding_vec=rounding_vec,
record_id_length=record_id_length)
self.offsets = offsets
self.start_pos = 0
self.pos = self.id_len - 1
self.end = False
self.increments = []
self.min = []
self.max = []
self.cur_vec = []
def __next__(self) -> tuple:
if self.end:
raise StopIteration
state = self.cur_vec[:]
# Increment state
while self.pos >= 0 and (
self.increments[self.pos] == 0 or (
round_s(self.cur_vec[self.pos] + self.increments[self.pos],
self._rounding_vec[self.pos]) > self.max[self.pos]
)
):
# We are at the maximum for this position, so we move one position
# to the left.
# 1. Reset current pos. to minimum
self.cur_vec[self.pos] = self.min[self.pos]
# 2. The increment may decrease the value over potency such that the
# increment itself changes
self.increments[self.pos] = compute_increment(
self.cur_vec[self.pos], self._rounding_vec[self.pos])
# 3. Move cursor left
self.pos -= 1
if self.pos >= 0 and \
self.increments[self.pos] > 0 and \
round_s(self.cur_vec[self.pos] + self.increments[self.pos],
self._rounding_vec[self.pos]) <= self.max[self.pos]:
# We can increment the value at the current position
self.cur_vec[self.pos] = round_s(
self.cur_vec[self.pos] + self.increments[self.pos],
self._rounding_vec[self.pos])
# The increment may increase the value over potency such that the
# increment itself changes
self.increments[self.pos] = compute_increment(
self.cur_vec[self.pos], self._rounding_vec[self.pos])
# Go back to last value
self.pos = self.id_len - 1
else:
# No increment possible
self.end = True
return tuple(state)
def __len__(self) -> int:
return comp_offset_num(self)
class AbsoluteOffsetIterator(OffsetIterator):
"""
Offset Metric that uses an absolute distance for each item in list.
"""
def __init__(self, target: List[float], offset: float,
rounding_vec: List[int] = None,
record_id_length: int = None):
offsets = [offset for _ in range(len(target))]
super().__init__(target, offsets, rounding_vec, record_id_length)
self.min = [] # most negative value
self.max = [] # most positive value
for i, e in enumerate(target):
if i < self.id_len:
self.min.append(round_s(e - offset, self._rounding_vec[i]))
self.max.append(round_s(e + offset, self._rounding_vec[i]))
self.increments.append(
compute_increment(self.min[i], self._rounding_vec[i])
)
if self.min[-1] + self.increments[i] > self.max[-1]:
# Otherwise we might get conflicts for 100 and 99
self.min[-1] = e
else:
self.min.append(e)
self.cur_vec = self.min[:]
class RelativeOffsetIterator(OffsetIterator):
"""
Offset Metric that uses a relative distance for each item in list.
"""
def __init__(self, target: List[float], offset: float,
rounding_vec: List[int] = None,
record_id_length: int = None):
"""
:param target:
:param offset: Offset in Percent < 100
:param rounding_vec: Vector with rounding values
:param record_id_length:
"""
offsets = [offset for _ in range(len(target))]
super().__init__(target, offsets, rounding_vec=rounding_vec,
record_id_length=record_id_length)
offset = offset / 100
self.min = [] # most negative value
self.max = [] # most positive value
for i, e in enumerate(target):
if i < self.id_len:
self.min.append(round_s(e * (1 - offset),
self._rounding_vec[i]))
self.max.append(round_s(e * (1 + offset),
self._rounding_vec[i]))
self.increments.append(
compute_increment(self.min[i], self._rounding_vec[i])
)
if self.min[-1] + self.increments[i] > self.max[-1]:
# Otherwise we might get conflicts for 100 and 99
self.min[-1] = e
else:
self.min.append(e)
self.cur_vec = self.min[:]
# print(self.min, self.max, self.increments)
class VariableOffsetIterator(OffsetIterator):
"""
Offset Metric that uses a different relative distance for each item in
list.
"""
def __init__(self, target: List[float],
offsets: List[float],
positive_only: bool = False,
rounding_vec: List[int] = None,
record_id_length: int = None):
"""
:param target:
:param offsets: List of offsets for each entry of the target vector.
len(offset) == len(target) !
Special values:
0: No variation
:param rounding_vec: Vector with rounding values
:param record_id_length:
"""
super().__init__(target, offsets, rounding_vec=rounding_vec,
record_id_length=record_id_length)
if self.id_len != len(offsets):
raise ValueError(
f"Offset List {len(offsets)} has to have ID length "
f"({self.id_len}).")
offsets = [i / 100 for i in offsets]
self.min = [] # most negative value
self.max = [] # most positive value
for i, e in enumerate(target):
if i < self.id_len:
if positive_only:
self.min.append(
round_s(e, self._rounding_vec[i]))
else:
self.min.append(
round_s(e * (1 - offsets[i]), self._rounding_vec[i]))
self.max.append(
round_s(e * (1 + offsets[i]), self._rounding_vec[i]))
self.increments.append(
compute_increment(self.min[i], self._rounding_vec[i])
)
if self.min[-1] + self.increments[i] > self.max[-1]:
# Otherwise we might get conflicts for 100 and 99
self.min[-1] = e
else:
self.min.append(e)
self.cur_vec = self.min[:]
def compute_increment(n: float, rnd: int) -> float:
"""
Compute the smallest increment for n.
Special case rnd==0: increment is 1, n is exact int
:param n: Value to increment
:param rnd: Rounding value
:return: Increment
"""
if rnd == 0:
# Special case
return 1
if n == 0:
# Special case
return 10 ** (1 - rnd)
power = get_power(n)
inc_power = power + 1 - rnd
increment = 10 ** inc_power
return increment
def comp_offset_num(o: OffsetIterator) -> int:
"""Compute the number of elements produces by an offest iterator"""
total = 1
var_per_pos = []
for i in range(o.id_len):
possibilities = 0
# increment might change
cur_min = o.min[i]
inc = compute_increment(o.min[i], o._rounding_vec[i])
if cur_min == 0:
power = 0
else:
power = get_power(cur_min) + 1
if 10 ** power < o.max[i]:
cur_max = 10 ** power
# +0.5inc b/c of float imprecision
possibilities += int((cur_max - cur_min + 0.5 * inc) / inc)
# Update increment
inc = compute_increment(cur_max, o._rounding_vec[i])
cur_min = cur_max
power = get_power(cur_min) + 1
# Now, we can cound till end
# +0.5inc b/c of float imprecision
possibilities += int((o.max[i] - cur_min + 0.5 * inc) / inc)
possibilities += 1
# +1 because min and max are always contained
var_per_pos.append(possibilities)
for n in var_per_pos:
total *= n
return round(total)
def map_metric(name: str) -> (SimilarityMetricIterator, list):
"""
Map the given string name to a similarity metric.
:param name: The name of the similarity metric.
:return: The similarity metric class and Arguments
"""
if (re.match(r'absOffset-\d+', name) is not None
or re.match(r'offset-\d+', name) is not None):
found = re.findall(r'\d+', name)
if len(found) == 1:
# int
args = (int(found[0]),)
else:
# float
args = (float(f"{found[0]}.{found[1]}"),)
return AbsoluteOffsetIterator, args
elif re.match(r'relOffset-\d+', name) is not None:
found = re.findall(r'\d+', name)
if len(found) == 1:
# int
args = (int(found[0]),)
else:
# float
args = (float(f"{found[0]}.{found[1]}"),)
return RelativeOffsetIterator, args
elif name == "wzl1":
# Any werkstueck, rest exact
offsets = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1000, 0, 0]
return VariableOffsetIterator, (offsets, True)
elif name == "wzl2":
# Any werkstueck, rest exact
offsets = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 400]
return VariableOffsetIterator, (offsets, True)
else:
raise ValueError(f"No similarity metric with name {name} exists.")
| 15,058
| 35.11271
| 80
|
py
|
parameter-exchange
|
parameter-exchange-master/src/lib/base_client.py
|
#!/usr/bin/env python3
"""Base Client for both client applications.
Copyright (c) 2020.
Author: Erik Buchholz
Maintainer: Erik Buchholz
E-mail: buchholz@comsys.rwth-aachen.de
"""
import atexit
import logging
import math
import multiprocessing as mp
import sys
import time
from abc import ABC, abstractmethod
from typing import List, Tuple, Iterable
import requests
import urllib3
# noinspection PyUnresolvedReferences
from memory_profiler import profile
from lib import config, helpers
from lib.helpers import from_base64, encryption_keys_from_int
sys.path.append(config.WORKING_DIR + 'cython/ot/')
sys.path.append(config.WORKING_DIR + 'cython/psi/')
# Python Version of libOTe
# noinspection PyUnresolvedReferences
from cOTInterface import PyOTReceiver # noqa
# noinspection PyUnresolvedReferences
from cPSIInterface import PyPSIReceiver # noqa
log: logging.Logger = logging.getLogger(__name__)
KEYSERVER = f"https://{config.KEYSERVER_HOSTNAME}:{config.KEY_API_PORT}"
STORAGESERVER = f"https://{config.STORAGESERVER_HOSTNAME}:" \
f"{config.STORAGE_API_PORT}"
class UserType:
"""Allowed user types in this system."""
CLIENT = "client"
OWNER = "provider"
class ServerType:
"""Type of servers used by the system."""
KeyServer = "key_server"
StorageServer = "storage_server"
class BaseClient(ABC):
"""Abstract base class for the end-user clients of client and data
providers. """
user: str = None
password: str = None
KEYSERVER: str = None
STORAGESERVER: str = None
_hash_key: bytes = None
eval = {
'ot_tcpdump_sent': [],
'ot_tcpdump_recv': [],
'psi_tcpdump_sent': [],
'psi_tcpdump_recv': []
}
@property
@abstractmethod
def type(self) -> str:
"""It has to be defined whether this is a client or owner client
app, see UserType above."""
pass # pragma no cover
def __init__(self, username: str) -> None:
"""Create object."""
# Disable warnings for self-signed cert.
urllib3.disable_warnings(urllib3.exceptions.SubjectAltNameWarning)
self.user = username
self.KEYSERVER = KEYSERVER + "/" + self.type
self.STORAGESERVER = STORAGESERVER + "/" + self.type
def get_auth_data(self, url: str) -> Tuple[str, str]:
"""Return authentication information for authentication towards
key or storage server.
:type url: URL to determine server from
:return (Username, Token
"""
if KEYSERVER in url:
server_type = ServerType.KeyServer
elif STORAGESERVER in url:
server_type = ServerType.StorageServer
else:
raise ValueError(f"Unknown server type for url: {url}")
return self.user, self.get_token(server_type)
def get(self, url: str,
auth: Tuple[str, str] or None = None) -> requests.Response:
"""
Perform a get request and check result.
:param url: URL to request
:param auth: Only if no Token Authentication used.
:return: Response object.
"""
if auth is None:
auth = self.get_auth_data(url)
r = requests.get(url, verify=config.TLS_ROOT_CA, auth=auth)
if r.status_code == 401:
raise RuntimeError(
f"Authentication failed at: {url}.")
elif r.status_code != 200 and r.status_code != 202:
r.raise_for_status()
else:
return r
def post(self, url: str, json: dict or Iterable,
auth: Tuple[str, str] or None = None) -> requests.Response:
"""
Perform a POST request and check result.
:param url: URL to request
:param json: JSON to transmit with request
:param auth: Only if no Token Authentication used.
:return: Response object.
"""
if auth is None:
auth = self.get_auth_data(url)
r = requests.post(url, verify=config.TLS_ROOT_CA, auth=auth, json=json)
if r.status_code == 401:
raise RuntimeError(
f"Authentication failed at: {url}.")
elif r.status_code != 200 and r.status_code != 202:
r.raise_for_status()
else:
return r
def get_hash_key(self) -> bytes:
"""Return hash key retrieved from key server
:return Hash Key as Bytes object
"""
if self._hash_key is None:
r = self.get(f"{self.KEYSERVER}/hash_key")
# Revert encoding -> sent as Base64
self._hash_key = from_base64(r.json()['hash_key'])
log.debug("Retrieved Hash Key: " + str(self._hash_key))
return self._hash_key
# noinspection PyUnboundLocalVariable
def _retrieve_keys(self, inidices: list, q=None) -> List[int]:
"""Return the encryption keys for the given indizes after retrieval
from the key server.
:indices: List of Indices to retrieve
:return: List of retrieved keys (as ints) in same order
"""
num_ots = len(inidices)
r = self.get(
f"{self.KEYSERVER}/key_retrieval?totalOTs={num_ots}")
d = r.json()
if not d['success']:
raise RuntimeError(f"Key retrieval failed: {d['msg']}")
if d['tls'] != config.OT_TLS:
raise RuntimeError(
f"Mismatch of server and client TLS settings. Client: "
f"{config.OT_TLS} Server: {d['tls']}.")
host = d['host']
port = d['port']
if d['tls']:
tls = "with"
else:
tls = "without"
log.info(f"Connecting for OT to host {host} on port {port} {tls} "
f"TLS. Will perform {num_ots} OTs.")
if config.EVAL: # pragma no cover
to_svr, f1 = helpers.start_trans_measurement(
port, direction="dst", sleep=False)
from_svr, f2 = helpers.start_trans_measurement(
port, direction="src", sleep=False)
if q is not None:
q.put(f1) # Sent first
q.put(f2) # Recv second
time.sleep(1) # Wait for startup
keys = self._receive_ots(inidices,
host, port, d['tls'])
log.debug(f"Completed OT.")
return keys
def _get_enc_keys(self, all_indices: List[int]) -> List[bytes]:
"""Return the encryption keys for the given indizes after retrieval
from the key server.
:param all_indices: List of Indices to retrieve
:return: List of retrieved keys (as bytes) in same order
"""
if len(all_indices) == 0:
return []
# The index list may stil contain duplicates.
indices = []
mapping = {} # Map for 'index: entry_in_indices_list'
for index in all_indices:
if index not in indices:
mapping[index] = len(indices)
indices.append(index)
keys = []
step = config.OT_MAX_NUM
if config.PARALLEL and len(indices) > step:
if len(indices) / step > config.MAX_PROCS:
# Would spawn too many processes
step = int(math.ceil(len(indices) / config.MAX_PROCS))
m = mp.Manager()
results = m.dict() # Order needs to hold
errors = m.list()
processes = []
queues = []
j = 0
def parallel_func(
indices: List[int],
proc_num: int,
q: mp.Queue): # pragma no cover
"""Process function."""
try:
res = self._retrieve_keys(indices, q=q)
results[proc_num] = res
except Exception as e:
errors.append(e)
for i in range(0, len(indices), step):
queue = mp.Queue()
queues.append(queue)
p = mp.Process(target=parallel_func,
args=(indices[i:i + step], j,
queue))
processes.append(p)
p.start()
atexit.register(p.terminate)
j += 1
log.info("All OT processes started.")
# Wait for termination
for i, p in enumerate(processes):
p.join()
atexit.unregister(p.terminate)
if config.EVAL: # pragma no cover
q = queues[i]
self.eval['ot_tcpdump_sent'].append(q.get()) # First sent
self.eval['ot_tcpdump_recv'].append(q.get()) # Send recv
# Check result
for e in errors: # pragma no cover
log.error(str(e))
raise e
# Reassemble
for key in sorted(results.keys()):
keys.extend(results[key])
else:
q = None
if config.EVAL: # pragma no cover
q = mp.Queue()
for i in range(0, len(indices), step):
keys.extend(self._retrieve_keys(indices[i:(i + step)], q=q))
if config.EVAL: # pragma no cover
self.eval['ot_tcpdump_sent'].append(q.get()) # First sent
self.eval['ot_tcpdump_recv'].append(q.get()) # Send recv
# Convert keys appropriately
converted_keys = encryption_keys_from_int(keys)
# Map back to original indices with duplicates
result = []
for index in all_indices:
# get index in converted_keys:
ind = mapping[index]
result.append(converted_keys[ind])
return result
def set_password(self, pwd: str) -> None:
"""
Set password for this user
:param pwd: Password of this used
:return: None
"""
self.password = pwd
def get_token(self, server_type: str) -> str:
"""Retrieve a token from the given server.
:param server_type: The type of server to get the token from
:return Token as string, can be used for authentication as is.
"""
log.debug("Get token from key server.")
if self.password is None:
raise ValueError("To retrieve a token, the user has to be "
"authenticated.")
if server_type == ServerType.StorageServer:
server = self.STORAGESERVER
elif server_type == ServerType.KeyServer:
server = self.KEYSERVER
else:
raise ValueError(f"No Server '{server_type}' exists.")
r = self.get(
f"{server}/gen_token",
auth=(self.user, self.password))
r = r.json()
if not r['success']:
msg = f"Token generation failed: {r['msg']}"
raise RuntimeError(msg)
else:
return r['token']
@staticmethod
def _receive_ots(
choices, host, port, tls, threads: int =
config.OT_THREADS, root_ca: str = config.TLS_ROOT_CA,
mal_sec: bool = config.OT_MAL_SECURE,
stat_sec: int = config.OT_STATSECPARAM, input_bit_count:
int = config.OT_INPUT_BIT_COUNT,
num_chosen_msgs: int = config.OT_SETSIZE) -> List[int]:
"""
Execute an OT with the given choices
:return: List of received INTEGERS
"""
log.debug("Starting OT.")
recv = PyOTReceiver()
recv.totalOTs = len(choices)
recv.numThreads = threads
recv.hostName = host
recv.port = port
recv.rootCA = root_ca
recv.maliciousSecure = mal_sec
recv.statSecParam = stat_sec
recv.inputBitCount = input_bit_count
recv.numChosenMsgs = num_chosen_msgs
result = recv.execute(choices, tls)
log.debug("OTs complete.")
return result
@staticmethod
def _receive_psi(
client_set, host, port, tls, threads: int =
config.OT_THREADS, root_ca: str = config.TLS_ROOT_CA,
stat_sec: int = config.OT_STATSECPARAM,
scheme: str = config.PSI_SCHEME) -> List[int]:
"""
Perform a PSI with the given client_set
:return: All items that matches the server_set
"""
log.debug("Starting PSI.")
recv = PyPSIReceiver()
recv.statSecParam = stat_sec
recv.setSize = len(client_set)
recv.hostName = host
recv.port = port
recv.numThreads = threads
recv.tls = tls
recv.rootCA = root_ca
# We use a new process to make killing possible
q = mp.Queue()
def exec(): # pragma no cover
result = recv.execute(scheme, client_set)
q.put(result)
p = mp.Process(target=exec)
p.start()
atexit.register(p.terminate)
p.join()
atexit.unregister(p.terminate())
result = q.get()
# result = recv.execute(scheme, client_set)
# The PSI only returns the indices of the matching client_set
log.debug("PSI complete.")
return [client_set[r] for r in result]
| 13,198
| 34.01061
| 79
|
py
|
parameter-exchange
|
parameter-exchange-master/src/lib/record.py
|
#!/usr/bin/env python3
"""
Class representing one record.
Copyright (c) 2020.
Author: Erik Buchholz
Maintainer: Erik Buchholz
E-mail: buchholz@comsys.rwth-aachen.de
"""
import hashlib
import json
import logging
import struct
from typing import List, Tuple, Any
from Crypto.Cipher import AES
from lib import config
from lib.helpers import to_base64, from_base64
log: logging.Logger = logging.getLogger(__name__)
def get_power(n: float) -> int:
"""Get power of number in scientific representation."""
if n == 0:
raise ValueError("log10(0) undefined!")
power = 0
abs_n = abs(n)
if abs_n >= 1:
while 10 ** (power + 1) <= abs_n:
power += 1
else:
while 10 ** power > abs_n:
power -= 1
return power
def round_s(n: float, rnd: int) -> float:
"""Round to rnd digits, including those before point.
rnd = 0 means exact value (no rounding).
Examples for rnd = 3:
1.1111 = 1.11
22.2222 = 22.2
222.2222 = 222
2222.2222 = 2220
66666.66666 = 66700
"""
if rnd < 0:
raise ValueError(
f"Rounding values has to be 0 or larger, but is: {rnd}")
if rnd == 0 or n == 0:
# Exact
return n
power = get_power(n)
n = n * 10 ** (-power)
n = round(n, rnd - 1) # One number before point
n = n * 10 ** power
fac = max(0, rnd - 1 - power)
n = round(n, fac) # Round because of imprecision
return n
def hash_to_index(hash_v: bytes, bit_len: int) -> int:
"""Return a index of given bit length derived from the hash value."""
byte_len = bit_len // 8
overhang = bit_len % 8
in_bytes = hash_v[:byte_len]
num = int.from_bytes(in_bytes, byteorder='little')
if overhang != 0:
num += (hash_v[byte_len] % (2 ** overhang)) * (2 ** (byte_len * 8))
return num
def round_record(record: List[float],
rnd_vec: List[int] = config.ROUNDING_VEC,
id_len: int = config.RECORD_ID_LENGTH) -> List[float]:
"""
Transfer record to rounded representation:
(11.1, 222.2, 3333.33) _ (1.11, 222, 3330)
:param rnd_vec: Record Rounding Parameters
:param id_len: ID length of record (X Part)
:param record: Vector to transform
:return: Transformed result vector
"""
res = []
for i, e in enumerate(record[:id_len]):
res.append(round_s(e, rnd_vec[i]))
return res
class Record:
"""Class representing one data record."""
_long_hash: bytes = None
_hash_key: bytes = None
_encryption_key: bytes = None
_rounding_vector: List[int] = None
def __ne__(self, o: object) -> bool:
return not self.__eq__(o)
def __eq__(self, o: object) -> bool:
if not isinstance(o, Record):
return False
if self.owner is None or o.owner is None:
return self.record == o.record
else:
return self.record == o.record and self.owner == o.owner
def __init__(self, record: List[float] or Tuple[float], owner: str = None,
hash_key: bytes = None
) -> None:
"""Store record and owner. Hashes are only generated on-demand."""
for i in record:
if not isinstance(i, int) and not isinstance(i, float):
raise TypeError("Records must only contain numbers!")
if len(record) != config.RECORD_LENGTH:
raise ValueError(
f"Record has not a length of {config.RECORD_LENGTH},"
f"But: {len(record)}!")
self.record = [float(i) for i in record]
self.owner = owner
self._identfier_length = config.RECORD_ID_LENGTH
self._rounding_vector = config.ROUNDING_VEC
if hash_key is not None:
self.set_hash_key(hash_key)
def get_long_hash(self) -> bytes:
"""Return the long hash used in the bloom filter and for record
retrieval. It is a 512 bit SHA-3. Hashes are only generated on-demand.
"""
if self._hash_key is None:
raise ValueError("The hash key has to be set before hashes can "
"be computed!")
if self._long_hash is None:
m = hashlib.sha3_512(self._hash_key)
m.update(self._get_identifier())
self._long_hash = m.digest()
return self._long_hash
def get_psi_index(self) -> int:
"""Return the shorter hash used for PSI converted to an int.
Hashes are only generated on-demand.
:return: PSI Index as Integer
"""
return hash_to_index(self.get_long_hash(), config.PSI_INDEX_LEN)
def get_ot_index(self) -> int:
"""Return the shorter hash used for OT converted to an int.
Hashes are only generated on-demand.
:return: OT Value as Integer
"""
return hash_to_index(self.get_long_hash(), config.OT_INDEX_LEN)
def _get_rounded_record(self) -> List[float]:
"""Return the record with each entry rounded according to the"""
return round_record(self.record, self._rounding_vector,
config.RECORD_ID_LENGTH)
def set_hash_key(self, key: bytes) -> None:
"""Set the key ussed in hashing."""
self._hash_key = key
def set_encryption_key(self, key: bytes) -> None:
"""Define the key used for encryption."""
self._encryption_key = key
def _get_identifier(self) -> bytes:
"""Return portion of record that is used for hashing as byte
string."""
# The * 2 is caused by the usage of a scientific representation with
# two entries per position.
return str(self._get_rounded_record()).encode('utf-8')
def get_encrypted_record(self, enc_key: bytes = None, nonce: bytes =
None) -> dict:
"""
Return the encrypted form of the record.
:param enc_key: Key used for encryption.
:param nonce: DEBUG ONLY!
:return: Encrypted record as dict
"""
if enc_key is None and self._encryption_key is None:
raise ValueError("No encryption key defined.")
if enc_key is not None:
self._encryption_key = enc_key
length = len(self.record).to_bytes((len(self.record).bit_length() + 7)
// 8,
byteorder='big')
buf = struct.pack('%sd' % len(self.record), *self.record)
data = buf
longhash = self.get_long_hash()
key = self._encryption_key
# log.debug(f"Encryption - Using key: {self._encryption_key}")
if nonce is not None:
cipher = AES.new(key, AES.MODE_GCM, nonce=nonce)
else:
cipher = AES.new(key, AES.MODE_GCM)
cipher.update(length)
cipher.update(longhash)
ciphertext, mac = cipher.encrypt_and_digest(data)
json_k = ['nonce', 'length', 'hash', 'ciphertext', 'mac']
json_v = [to_base64(x) for x in
(cipher.nonce, length, longhash, ciphertext, mac)]
result = dict(zip(json_k, json_v))
return result
@classmethod
def from_ciphertext(cls, ciphertext: dict, key: bytes) -> Any:
"""Create a record by decrypting a ciphertext"""
log.debug(f"Decryption - Using key: {key}")
b64 = ciphertext
json_k = ['nonce', 'length', 'hash', 'ciphertext', 'mac']
jv = {k: from_base64(b64[k]) for k in json_k}
cipher = AES.new(key, AES.MODE_GCM, nonce=jv['nonce'])
cipher.update(jv['length'])
cipher.update(jv['hash'])
length = int.from_bytes(jv['length'], byteorder='big')
plaintext = cipher.decrypt_and_verify(jv['ciphertext'], jv['mac'])
record_list = struct.unpack('%sd' % length, plaintext)
record = Record(record_list)
return record
def to_hash_rec_tuple(self) -> Tuple[str, List[float]]:
"""Return record as a tuple (long-hash HEX, record)"""
return "0x" + self.get_long_hash().hex(), self.record
def to_full_tuple(self) -> Tuple[str, List[float], str]:
"""Return record as a tuple (long-hash. Hex, reocrd, owner)"""
if self.owner is None:
raise ValueError(
"Full tuple can only be provided if an owner is defined.")
return "0x" + self.get_long_hash().hex(), self.record, self.owner
def get_owner(self):
"""Return owner."""
if self.owner is None:
raise RuntimeError("No owner set.")
return self.owner
def get_upload_format(self) -> Tuple[str, str, str]:
"""
Return format required for upload to storage server
:return: [Base64(long_hash), json.dumps(ciphertext), owner]
"""
return (
to_base64(self.get_long_hash()),
json.dumps(self.get_encrypted_record()),
self.get_owner()
)
def __str__(self) -> str:
if self._hash_key is None and self.owner is None:
return str((self.record,))
elif self._hash_key is None:
return str((self.record, self.owner))
if self.owner is None:
return str(self.to_hash_rec_tuple())
else:
return str(self.to_full_tuple())
| 9,312
| 34.143396
| 78
|
py
|
parameter-exchange
|
parameter-exchange-master/src/lib/logging.py
|
#!/usr/bin/env python3
"""Custom logger supporting colors.
Adapted from:
https://stackoverflow.com/questions/384076/how-can-i-color-python-logging
-output
Copyright (c) 2020.
Author: Erik Buchholz
Maintainer: Erik Buchholz
E-mail: buchholz@comsys.rwth-aachen.de
"""
import copy
import logging
import os
import sys
from lib import config
BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = range(8)
# The background is set with 40 plus the number of the color, and the
# foreground with 30
# These are the sequences need to get colored output
RESET_SEQ = "\033[0m"
COLOR_SEQ = "\033[1;%dm"
BOLD_SEQ = "\033[1m"
COLORS = {
'WARNING': YELLOW,
'INFO': WHITE,
'DEBUG': BLUE,
'CRITICAL': YELLOW,
'ERROR': RED
}
FORMAT = "[%(asctime)s][%(levelname)-18s][$BOLD%(name)-22s$RESET] " \
"%(message)s ($BOLD%(filename)s$RESET:%(lineno)d) "
class ColoredFormatter(logging.Formatter):
"""Formatter for colored console output."""
def __init__(self, use_color=True):
if use_color:
fmt = FORMAT.replace("$RESET", RESET_SEQ).replace("$BOLD",
BOLD_SEQ)
else: # pragma no cover
fmt = FORMAT.replace("$RESET", "").replace("$BOLD", "")
logging.Formatter.__init__(self, fmt)
self.use_color = use_color
def format(self, record): # pragma no cover
"""Format the given message."""
record = copy.copy(record)
levelname = record.levelname
if self.use_color and levelname in COLORS:
color: str = COLOR_SEQ % (30 + COLORS[levelname])
record.msg = color + record.msg + RESET_SEQ
record.levelname = color + levelname + RESET_SEQ
return logging.Formatter.format(self, record)
def add_colored_formatter(
logger: logging.Logger = logging.getLogger()) -> None:
"""Add ColoredFormatter to a given logger or to the root logger."""
console = logging.StreamHandler(sys.stdout)
console.setFormatter(ColoredFormatter())
logger.addHandler(console)
def add_filehandler(file: str,
logger: logging.Logger = logging.getLogger()
) -> logging.FileHandler:
"""Add filehandler for given file to a given logger or to the root
logger."""
filehandler = logging.FileHandler(file)
filehandler.setFormatter(logging.Formatter(
"[%(asctime)s][%(levelname)-7s][%(name)-22s] "
"%(message)s (%(filename)s:%(lineno)d) "))
logger.addHandler(filehandler)
return filehandler
def configure_root_loger(logging_level: int,
file: str or None = None) -> logging.Logger:
"""Add both the colored formatter and the filehandler to the root logger.
"""
root = logging.getLogger()
for h in root.handlers:
root.removeHandler(h)
root.setLevel(logging_level)
add_colored_formatter(logger=root)
if file is not None:
os.makedirs(os.path.dirname(file), exist_ok=True)
add_filehandler(file, logger=root)
error_handler = add_filehandler(
config.WORKING_DIR + 'data/error.log',
logger=root)
error_handler.setLevel(logging.ERROR)
return root
| 3,225
| 30.019231
| 77
|
py
|
parameter-exchange
|
parameter-exchange-master/src/lib/config.py
|
#!/usr/bin/env python3
"""This module contains central configurations.
Copyright (c) 2020.
Author: Erik Buchholz
Maintainer: Erik Buchholz
E-mail: buchholz@comsys.rwth-aachen.de
"""
import logging
import math
import multiprocessing
import os
# GENERAL----------------------------------------------------------------------
DEBUG = True
PSI_MODE = False
EVAL = True
PARALLEL = True
MAX_PROCS = int(math.ceil(multiprocessing.cpu_count() / 2))
# Celery can only process as many tasks as CPUs.
# -----------------------------------------------------------------------------
# DIRECTORY STRUCTURE----------------------------------------------------------
# os.path.dirname moves on directory up
_cur_dir = os.path.dirname(
os.path.dirname(
os.path.dirname(os.path.abspath(__file__))))
WORKING_DIR = os.path.abspath(_cur_dir) + '/'
DATA_DIR = WORKING_DIR + 'data/'
EVAL_DIR = WORKING_DIR + 'eval/'
LOG_DIR = DATA_DIR + 'logs/'
EVAL_RESULT_DIR = WORKING_DIR + 'results_eval/'
# -----------------------------------------------------------------------------
# TLS--------------------------------------------------------------------------
TLS_CERT_DIR = DATA_DIR + "certs/"
TLS_ROOT_CA = TLS_CERT_DIR + "rootCA.crt"
# -----------------------------------------------------------------------------
# EVAL SETTINGS----------------------------------------------------------------
if EVAL:
DATA_DIR += 'eval/'
os.makedirs(DATA_DIR, exist_ok=True)
TEMP_DIR = DATA_DIR + 'tmp/'
os.makedirs(TEMP_DIR, exist_ok=True)
RAM_INTERVAL = 0.5 # s
# -----------------------------------------------------------------------------
# LOGGING----------------------------------------------------------------------
LOGLEVEL = logging.DEBUG
# -----------------------------------------------------------------------------
# KEY SERVER SETTINGS----------------------------------------------------------
KEYSERVER_HOSTNAME = "localhost"
KEY_API_PORT = 5000
KEY_TLS_CERT = TLS_CERT_DIR + "keyserver.crt"
KEY_TLS_KEY = TLS_CERT_DIR + "keyserver.key"
RANDOMIZE_PORTS = True
KEY_LOGNAME = "key_server"
KEY_LOGFILE = "key_server.log"
KEY_HASHKEY_PATH = "hash_key.pyc"
KEY_ENCKEY_PATH = "encryption_keys.pyc"
KEY_REDIS_PORT = 6379
KEY_CELERY_BROKER_URL = f'redis://localhost:{KEY_REDIS_PORT}/0'
KEYSERVER_DB = "keyserver.db"
# -----------------------------------------------------------------------------
# STORAGE SERVER SETTINGS------------------------------------------------------
STORAGESERVER_HOSTNAME = "localhost"
STORAGE_API_PORT = 5001
STORAGE_TLS_CERT = TLS_CERT_DIR + "storageserver.crt"
STORAGE_TLS_KEY = TLS_CERT_DIR + "storageserver.key"
STORAGE_LOGFILE = "storage_server.log"
STORAGE_DB = "storage.db"
STORAGE_REDIS_PORT = 6380
BLOOM_FILE = 'storage.bloom'
if EVAL: # pragma no cover
BLOOM_CAPACITY = 10 ** 8
BLOOM_ERROR_RATE = 10 ** -20
else: # pragma no cover
BLOOM_CAPACITY = 10 ** 5
BLOOM_ERROR_RATE = 10 ** -8
STORAGE_CELERY_BROKER_URL = f'redis://localhost:{STORAGE_REDIS_PORT}/0'
# -----------------------------------------------------------------------------
# OT Parameters ---------------------------------------------------------------
OT_SETSIZE = 2**20
# equals PyOTSender/PyOTReceiver.numChosenMsgs
OT_THREADS = 1
OT_STATSECPARAM = 40
OT_MAL_SECURE = False
OT_INPUT_BIT_COUNT = 128
OT_PORT = 1213
OT_HOST = "127.0.0.1"
OT_TLS = False
OT_MAX_NUM = 10 # Maximal number of simultaneous OTs
# -----------------------------------------------------------------------------
# PSI Parameters ---------------------------------------------------------------
PSI_SCHEME = "KKRT16"
PSI_SETSIZE = 2**20
PSI_THREADS = 1
PSI_STATSECPARAM = 40
PSI_MAL_SECURE = False
PSI_INPUT_BIT_COUNT = 128
PSI_PORT = 1214
PSI_HOST = "127.0.0.1"
PSI_TLS = False
# -----------------------------------------------------------------------------
# KEY SETTINGS-----------------------------------------------------------------
HASHKEY_LEN = 128
ENCKEY_LEN = 128
# -----------------------------------------------------------------------------
# HASH SETTINGS----------------------------------------------------------------
PSI_INDEX_LEN = 127 # Bit (127 so that we can use the remainder for dummies)
PSI_DUMMY_START_SERVER = 2 ** 127
PSI_DUMMY_START_CLIENT = 2 ** 127 + PSI_SETSIZE
OT_INDEX_LEN = 20 # in Bit
# -----------------------------------------------------------------------------
# DISCRETIZATION SETTINGS------------------------------------------------------
RECORD_ID_LENGTH = 10
# RECORD_ROUNDING = 3
RECORD_LENGTH = 100
ROUNDING_VEC = [3 for _ in range(RECORD_ID_LENGTH)]
# -----------------------------------------------------------------------------
| 4,658
| 38.483051
| 80
|
py
|
parameter-exchange
|
parameter-exchange-master/src/lib/user_database.py
|
#!/usr/bin/env python3
"""User management for access control.
Copyright (c) 2020.
Author: Erik Buchholz
Maintainer: Erik Buchholz
E-mail: buchholz@comsys.rwth-aachen.de
"""
import logging
import secrets
import sqlite3
from abc import abstractmethod
from typing import List, Callable
import sqlalchemy
from werkzeug.security import generate_password_hash, check_password_hash
from lib.base_client import UserType
from lib.database import db
log: logging.Logger = logging.getLogger(__name__)
class Token(db.Model):
"""Represents one token"""
__tablename__ = "tokens"
id = db.Column(db.Integer,
nullable=False,
primary_key=True) # Auto
value = db.Column(db.Text, nullable=False)
client_id = db.Column(db.Integer,
db.ForeignKey("client.id"))
provider_id = db.Column(db.Integer,
db.ForeignKey("owner.id"))
class User:
"""Abstract base class for users."""
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.Text, nullable=False, unique=True)
password = db.Column(db.Text, nullable=False)
@property
@abstractmethod
def tokens(self): # pragma no cover
"""List of tokens of user"""
pass
# Was needed for shared table
# type = db.Column(db.String(20), nullable=False)
# __mapper_args__ = {
# 'polymorphic_on': type,
# 'polymorphic_identity': 'User'
# }
class Client(User, db.Model):
"""Client-type end-users."""
tokens = db.relationship("Token",
uselist=True,
backref='client',
lazy=False)
# __mapper_args__ = {
# 'polymorphic_identity': UserType.CLIENT
# }
class Owner(User, db.Model):
"""Data Providers."""
tokens = db.relationship("Token",
uselist=True,
backref='provider',
lazy=False)
# __mapper_args__ = {
# 'polymorphic_identity': UserType.OWNER
# }
def verify_password(user_type: str, user_id: str, pwd: str) -> bool:
"""Return whether the password is correct for the user with the
given user_id. Raises Value error if user does not exist."""
UserCls = get_user_type(user_type)
u: User = UserCls.query.filter_by(username=user_id).first()
if u is None:
raise ValueError(f"No {user_type} with ID '{user_id}' exists.")
return check_password_hash(u.password, pwd)
def verify_token(user_type: str, user_id: str, token: str):
"""Return whether the token is correct for the user with the
given user_id.
Furthermore, remove token hash from DB because tokens can only be
used once.
"""
UserCls = get_user_type(user_type)
u: User = UserCls.query.filter_by(username=user_id).first()
if u is None:
raise ValueError(f"No {user_type} with ID '{user_id}' exists.")
tokens = u.tokens
if len(u.tokens) == 0:
msg = f"No token for user '{user_id}' exists."
raise ValueError(msg)
for t in tokens:
if check_password_hash(t.value, token):
logging.debug("Token correct.")
# Remove token from DB
db.session.delete(t)
db.session.commit()
return True
return False
def _generate_token() -> str:
"""Generate a random token and return it along the corresponding
SHA3 Hash."""
token = secrets.token_urlsafe(64)
return token
def generate_token(user_type: str, user_id: str):
"""Generate and return a new token for the user with the given
ID. """
UserCls = get_user_type(user_type)
token = _generate_token()
u: User = UserCls.query.filter_by(username=user_id).first()
if u is None:
raise ValueError(f"Could not generate token: No user '{user_id}' "
f"exists.")
token_val = generate_password_hash(token, salt_length=32)
t = Token(value=token_val)
db.session.add(t)
u.tokens.append(t)
db.session.commit()
log.info(f"Generated new token for '{user_id}'.")
return token
def update_password(user_type: str, user_id: str, old_pwd: str, new_pwd: str):
"""Update the password if the credentials are correct."""
UserCls = get_user_type(user_type)
if not verify_password(user_type, user_id, old_pwd):
msg = f"Password change for user '{user_id}' failed because old " \
f'password is wrong.'
raise ValueError(msg)
if len(new_pwd) < 8:
msg = "Password needs to have at least 8 characters!"
raise ValueError(msg)
pwd_hash = generate_password_hash(new_pwd, salt_length=32)
u: UserCls = UserCls.query.filter_by(username=user_id).first()
u.password = pwd_hash
db.session.commit()
log.info(f"Successfully updated password for '{user_id}'.")
def get_all_users(user_type: str) -> List[str]:
"""Return a list containing all user IDs"""
UserCls = get_user_type(user_type)
users = UserCls.query.all()
return [u.username for u in users]
def add_user(user_type: str, user_id: str, password: str):
"""Add a new client, generate a token for API access and return
it.
"""
UserCls: Callable = get_user_type(user_type)
if len(password) < 8:
msg = "Password needs to have at least 8 characters!"
raise ValueError(msg)
pwd_hash = generate_password_hash(password, salt_length=32)
token = generate_password_hash(_generate_token(), salt_length=32)
# noinspection PyUnresolvedReferences
try:
u = UserCls(username=user_id, password=pwd_hash)
t = Token(value=token)
u.tokens.append(t)
db.session.add(t)
db.session.add(u)
db.session.commit()
except (sqlalchemy.orm.exc.FlushError,
sqlalchemy.exc.InvalidRequestError,
sqlalchemy.exc.IntegrityError,
sqlite3.IntegrityError):
db.session.rollback()
msg = f"{user_type.capitalize()} ID '{user_id}' already in use!"
raise ValueError(msg)
log.info(f"Successfully stored '{user_id}' in DB.")
return token
def get_user_type(user_type: str) -> [User]:
"""
Return the class corresponding to the given user_type
:param user_type: Type of user
:return: Class representing that user user_type.
"""
if user_type == UserType.CLIENT:
return Client
elif user_type == UserType.OWNER:
return Owner
else:
raise TypeError(f"No such User Type exists: {user_type}")
def get_user(user_type: str, username: str) -> [Client, Owner]:
"""
Return the user with the given username
:param user_type: Client or Owner
:param username: username to check
:return: The user object
"""
UserCls = get_user_type(user_type)
c = UserCls.query.filter_by(username=username).first()
if c is None:
raise ValueError(
f"{user_type.capitalize()} '{username}'does not exist!")
return c
| 7,043
| 30.587444
| 78
|
py
|
parameter-exchange
|
parameter-exchange-master/src/lib/db_argparser.py
|
#!/usr/bin/env python3
"""Argument Parser for DB CLIs.
Copyright (c) 2020.
Author: Erik Buchholz
Maintainer: Erik Buchholz
E-mail: buchholz@comsys.rwth-aachen.de
"""
import argparse
def get_db_parser() -> argparse.ArgumentParser:
"""Return an argument parser for the DB CLIs."""
db_parser = argparse.ArgumentParser(description="DB CLI")
action_group = db_parser.add_mutually_exclusive_group(required=True)
db_parser.add_argument("ID", help="ID of User", type=str,
action="store", nargs='?')
db_parser.add_argument("password", help="Password of User", type=str,
action="store", nargs='?')
action_group.add_argument("-a", "--add", action='store_true',
help="Add User with given ID and password to DB."
)
action_group.add_argument("-t", "--get_token", action='store_true',
help="Retrieve get_token for user with given "
"ID.")
action_group.add_argument("--verify", action='store_true',
help="Verfiy that password is correct.")
action_group.add_argument("-n", "--new", action="store", type=str,
dest="new",
help="Replace password for user with given ID.")
action_group.add_argument("-l", "--list", action="store_true",
help="List all existing users.")
action_group.add_argument("-s", "--verify-token", action='store',
dest='token_val',
help="Verfiy that get_token is correct. ("
"Destroys token, for testing only.)",
type=str)
return db_parser
| 1,816
| 44.425
| 79
|
py
|
parameter-exchange
|
parameter-exchange-master/src/lib/database.py
|
#!/usr/bin/env python3
"""Database Models shared by both server types.
Copyright (c) 2020.
Author: Erik Buchholz
Maintainer: Erik Buchholz
E-mail: buchholz@comsys.rwth-aachen.de
"""
from datetime import datetime
from typing import List
from flask_sqlalchemy import SQLAlchemy
import logging
db = SQLAlchemy()
log: logging.Logger = logging.getLogger(__name__)
class Task(db.Model):
"""
SQLAlchemy Class representing one celery task
"""
id = db.Column(db.Text, primary_key=True, nullable=False)
user_id = db.Column(db.Text, nullable=False)
task_type = db.Column(db.Text, nullable=False)
user_type = db.Column(db.Text, nullable=False)
timestamp = db.Column(db.DateTime, default=datetime.now())
def __repr__(self):
return f"<Task {self.id}>"
def add_task(username: str, user_type: str, task_id: str,
task_type: str) -> None:
"""Store task into DB."""
task = Task(user_id=username, user_type=user_type,
id=task_id, task_type=task_type)
db.session.add(task)
db.session.commit()
log.debug(f"Successfully stored task {task_id} into DB.")
def get_tasks(user_type: str, username: str) -> List[Task]:
"""Return all tasks of the given user."""
res = Task.query.filter_by(user_id=username,
user_type=user_type
).order_by(Task.timestamp).all()
return res
| 1,419
| 27.4
| 63
|
py
|
parameter-exchange
|
parameter-exchange-master/src/lib/__init__.py
| 0
| 0
| 0
|
py
|
|
parameter-exchange
|
parameter-exchange-master/src/lib/db_cli.py
|
#!/usr/bin/env python3
"""DB CLI for user DB.
Copyright (c) 2020.
Author: Erik Buchholz
Maintainer: Erik Buchholz
E-mail: buchholz@comsys.rwth-aachen.de
"""
import logging
from typing import List
from flask import Flask
from lib import config
from lib.db_argparser import get_db_parser
from lib.user_database import db
import lib.user_database as user_db
NO_PRINT = False
log: logging.Logger = logging.getLogger(__name__)
def output(*args: str) -> None:
"""Print either via print or via logging."""
if not NO_PRINT:
print(*args)
else:
log.info(" ".join([str(i) for i in args]))
def main(user_type: str, args: List[str], data_dir: str = config.DATA_DIR,
no_print: bool = False) -> \
None:
"""
Manage the database according to the given CL arguments.
(Update both databases)
:param user_type: Type of database that shall be managed
:param args: Command line arguments. (argv[1:])
:param data_dir: [optional] Directory where SQLite files are located.
:param no_print: [optional] Use log instead of print
"""
global NO_PRINT
NO_PRINT = no_print
if len(args) > 0 and (args[0] == "-l" or args[0] == "--list"):
# We want to skip mandatory args.
show_list = True
else:
show_list = False
args = get_db_parser().parse_args(args)
databases = {
'storage': config.STORAGE_DB,
'key': config.KEYSERVER_DB
}
for d in databases:
db_file = databases[d]
app = Flask(__name__)
app.config.from_mapping(
SQLALCHEMY_DATABASE_URI=f"sqlite:///{data_dir}/{db_file}",
SQLALCHEMY_TRACK_MODIFICATIONS=False
)
db.init_app(app)
with app.app_context():
# Init DB
db.create_all()
if show_list:
users = user_db.get_all_users(user_type)
output(f"> Result for {d.capitalize()}-Database: "
f"({len(users)} Users found.)")
for i, user in enumerate(users):
output(f"{i}: {user}")
else:
if args.add:
try:
if args.ID is None or args.password is None:
raise ValueError(
"User ID and Password have to defined.")
user_db.add_user(user_type, args.ID, args.password)
output(f"> {d.capitalize()}: Successfully added user "
f"{args.ID}.")
except ValueError as e:
output(f"> {d.capitalize()}: Add user failed: {e}")
elif args.get_token:
try:
if args.ID is None or args.password is None:
raise ValueError(
"User ID and Password have to defined.")
if user_db.verify_password(user_type, args.ID,
args.password):
output(f"> {d.capitalize()} database: ",
user_db.generate_token(user_type, args.ID))
else:
output(f"> {d.capitalize()}: Incorrect password!")
except ValueError as e:
log.error(f"{d.capitalize()}: Token generation"
f" failed: {e}")
elif args.new is not None:
try:
if args.ID is None or args.password is None:
raise ValueError(
"User ID and Password have to defined.")
user_db.update_password(user_type, args.ID,
args.password, args.new)
output(
f"> {d.capitalize()}: Successfully updated"
f"password for user {args.ID}.")
except ValueError as e:
log.error(f"{d.capitalize()}: "
f"Password update failed: {e}")
elif args.verify:
try:
if args.ID is None or args.password is None:
raise ValueError(
"User ID and Password have to defined.")
if user_db.verify_password(user_type, args.ID,
args.password):
output(f"> {d.capitalize()}: "
f"Credentials are correct.")
else:
output(f"> {d.capitalize()}: "
f"Password is not correct.")
except ValueError as e:
log.error(f"{d.capitalize()}: Password verfication"
f"failed: {e}")
elif args.token_val is not None:
try:
if args.ID is None:
raise ValueError("User ID has to be defined.")
if user_db.verify_token(user_type,
args.ID, args.token_val):
output(f"> {d.capitalize()}: Token correct. "
f"Token destroyed.")
else:
output(f"> {d.capitalize()}: Bad Token.")
except ValueError as e:
log.error(f"{d.capitalize()}: "
f"Token verfication failed: {e}")
| 5,779
| 40.582734
| 78
|
py
|
parameter-exchange
|
parameter-exchange-master/src/lib/helpers.py
|
#!/usr/bin/env python3
"""This module contains small helper functions.
Copyright (c) 2020.
Author: Erik Buchholz
Maintainer: Erik Buchholz
E-mail: buchholz@comsys.rwth-aachen.de
"""
import base64
import logging
import multiprocessing
import os
import platform
import random
import re
import socket
import ssl
import subprocess
import sys
import time
import uuid
from contextlib import contextmanager
from io import StringIO
from typing import List, Tuple
import lib.config as config
log: logging.Logger = logging.getLogger(__name__)
def get_temp_file() -> str:
"""Generate random tempfile and create directory if not exits."""
return config.TEMP_DIR + str(uuid.uuid4())
def queue_to_list(q: multiprocessing.Queue) -> list:
"""
Convert a multiprocessing.Queue into a list.
:param q: mp.Queue
:return: Queue converted to list.
"""
result = []
q.put('STOP')
for i in iter(q.get, 'STOP'):
result.append(i)
return result
def start_trans_measurement(
port: int, protocol: str = None,
direction: str = "both",
sleep: bool = True,
file=None) -> (
subprocess.Popen, str): # pragma no cover
"""
Measure transmitted data on port.
:param file: File to write pcap to. If undef. a tempfile is used
:param sleep: Attention: Short manual sleep required (0.01s)
:param port: Port to listen on
:param protocol: [OPTIONAL] Protocol to listen for, otherwise all
:param direction: [OPTIONAL] € {src, dst}
:return: Popen object that can be given to stop_trans_measurement
"""
"""
TCPSTAT Command:
-i Interface. Loopback 'lo' for Linux and 'lo0' for Mac
-f FilterRule. See TCPDUMP.
-o Output Format. %N = # Bytes, %n = # packets
-1 Measure until termination
sudo tcpstat -i lo -f "dst port 1213 and tcp" -o "b=%N\np=%n\n" -1
"""
# TCPSTAT -----------------------------------------------------------------
# if platform.system() == "Darwin": # pragma no cover
# interface = 'lo0'
# base_cmd = ["tcpstat"]
# else: # pragma no cover
# interface = 'lo'
# base_cmd = ["sudo", "tcpstat"]
# if direction == "both":
# flt = f"(dst port {port} or src port {port})"
# else:
# flt = f"({direction} port {port})"
# if protocol is not None:
# flt += f" and {protocol}"
# log.debug(f"Starting transmission measurement: {flt}")
# out_fmt = 'B=%N:p=%n'
# cmd = base_cmd + ["-i", f"{interface}",
# "-f", f"{flt}", "-o", f"{out_fmt}", "-1"]
# s = subprocess.Popen(cmd, stdout=subprocess.PIPE,
# stderr=subprocess.PIPE)
# -------------------------------------------------------------------------
# Use TCPDUMP because of issue on butthead
if file is None:
file = get_temp_file()
if platform.system() == "Darwin": # pragma no cover
interface = 'lo0'
else: # pragma no cover
interface = 'lo'
cmd = ['sudo', 'tcpdump', '-i', interface, '-w', file, '-U',
'--immediate-mode']
# -U is no buffer
if direction != 'both':
cmd.append(direction)
cmd.extend(['port', str(port)])
if protocol is not None:
cmd.append('and')
cmd.append(str(protocol))
log.debug(f"Starting transmission measurement: f{str(cmd)}")
s = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
if sleep:
time.sleep(0.01)
return s, file
def read_tcpstat_from_file(file: str) -> (int, int): # pragma no cover
"""
Read transmitted bytes and packets from pcap file.
:param file:
:return:
"""
out_fmt = 'B=%N:p=%n'
s = subprocess.run(["tcpstat", "-r", file, "-o", f"{out_fmt}", "-1"],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out = s.stdout
try:
m = re.search("B=(\d+):p=(\d+)", str(out))
b = int(m.group(1))
packets = int(m.group(2))
except AttributeError: # pragma no cover
raise (ValueError("No valid output of TCPSTAT."))
if b == 0:
raise RuntimeError("Capturing Packets failed.")
return b, packets
def kill_tcpdump() -> None: # pragma no cover
"""
Kill all TCPDUMP processes with SIGINT signal.
:return: None
"""
# Kill tcpdump gracefully
if platform.system() == "Darwin":
subprocess.run(["sudo", "pkill", "-2", "tcpdump"],
stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
else:
subprocess.run(["sudo", "killall", "-s", "2", "tcpdump"],
stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
def reset_port(): # pragma no cover
"""
Removes artifical latency and bandwidth limits
from the port or from all ports [Linux only]
# :param port: Only remove latency of one port
:return:
"""
# if port is None:
log.debug("Reset Latency and Rate for all ports.")
s = subprocess.Popen(["sudo", "tcdel", "lo", "--all"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = s.communicate()
# Error msg if there is no rule set.
# if err != b'':
# log.warning(str(err))
# else:
# log.debug(f"Reset Latency and Rate for port {port}.")
# subprocess.run(["sudo", "tcdel", "lo", "--port", str(port)],
# stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
# subprocess.run(["sudo", "tcdel", "lo", "--src-port", str(port)],
# stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
def add_latency(latency: int): # pragma no cover
"""
Adds latency to the defined port. [Linux only]
:param latency: Amount of latency in ms
:return:
"""
log.debug(f"Set Latency of {latency}ms for all ports.")
s = subprocess.Popen(
["sudo", "tcset", "lo", "--delay",
f"{latency}ms"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = s.communicate()
if err != b'':
log.warning(str(err))
def add_bandwidth(bw: int) -> None: # pragma no cover
"""Limit bandwidth.
:param bw: Bandwidth in kbit/s
:return: None
"""
log.debug(f"Set bandwidth of {bw}kbit/s for all ports.")
s = subprocess.Popen(
["sudo", "tcset", "lo", "--rate", f"{bw}kbit/s"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = s.communicate()
if err != b'':
log.warning(str(err))
def add_async_bandwidth(bw: int, port: int) -> None: # pragma no cover
"""
Limit bandwith with ratio 1:10.
:param port: Port to apply restriction to (assumed to be server)
:param bw: Downrate
:return:
"""
# Upload
cmd = ["sudo", "tcset", "--add", "lo", "--rate", f"{int(bw/10)}kbit/s",
"--dst-port", str(port)]
# log.info(f"Execute: {str(cmd)}")
s = subprocess.Popen(
cmd,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = s.communicate()
if err != b'':
log.warning(str(err))
# Download
cmd = ["sudo", "tcset", "--add", "lo", "--rate", f"{int(bw)}kbit/s",
"--src-port", str(port)]
# log.info(f"Execute: {str(cmd)}")
s = subprocess.Popen(
cmd,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = s.communicate()
if err != b'':
log.warning(str(err))
def to_base64(b: bytes) -> str:
"""
Transfer a bytes object to a Base64 encoded string for transmission.
:param b: The bytes object to transmit
:return: Base64 encoded string for transmission
"""
return base64.b64encode(b).decode()
def from_base64(b64: str) -> bytes:
"""
Convert as Base64 encoded string back into the bytes object.
:param b64: Base64 encoded string
:return: Bytes object
"""
return base64.b64decode(b64.encode())
def encryption_keys_from_int(in_list: List[int]) -> List[bytes]:
"""
Convert a list of integers into 128 Bit encryption keys
:param in_list: List of Integers to be converted
:return: List of bytes
"""
return [
i.to_bytes(128 // 8, 'big')
for i in in_list
]
def keys_to_int(keys: List[bytes]) -> List[int]:
"""
Convert a list of bytes encryption keys to integers
:param keys: List of encryption keys as bytes objects
:return: List of Integers
"""
return [
int.from_bytes(i, 'big') for i in keys]
@contextmanager
def captured_output():
"""Capture outputs to StdOut and StdErr."""
new_out, new_err = StringIO(), StringIO()
old_out, old_err = sys.stdout, sys.stderr
try:
sys.stdout, sys.stderr = new_out, new_err
yield sys.stdout, sys.stderr
finally:
sys.stdout, sys.stderr = old_out, old_err
def create_data_dir(data_dir: str) -> None:
"""Create the data directory and a contained log directory if it does
not exists, yet."""
os.makedirs(data_dir, exist_ok=True)
os.makedirs(data_dir + '/logs/', exist_ok=True)
def parse_list(string: str) -> List[float]:
"""Convert a string list into a list object."""
r_list = string.strip('][\n').split(',')
r_list = [float(i) for i in r_list]
return r_list
def get_tls_context(cert: str, key: str) -> ssl.SSLContext:
"""Return an SSL Context with high security level"""
context = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2)
context.load_dh_params(config.TLS_CERT_DIR + 'dhparam.pem')
context.set_ecdh_curve('secp384r1')
context.set_ciphers('AES256+EDH:AES256+EECDH')
context.load_cert_chain(cert, key)
return context
def port_free(port: int) -> bool:
"""
Return True if port can be used.
https://docs.python.org/2/library/socket.html#example
"""
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
s.bind(('', port)) # Try to open port
except OSError as e:
if e.errno is 98 or e.errno is 48: # Errno 98 means address already
# bound
s.close()
return False
raise e # pragma no cover
except Exception as e:
s.close()
raise e
s.close()
return True
def get_free_port() -> int:
"""
Return a free port.
:return: Available port
"""
while True:
port = random.randint(1024, 65535)
if port_free(port):
return port
def generate_auth_header(user: str, token: str) -> List[Tuple[str, str]]:
"""Generate a valid HTTPBasicAuth Header for the given username and
password."""
b64: bytes = base64.b64encode(bytes(f"{user}:{token}",
encoding='UTF-8'))
return [('Authorization', f'Basic {b64.decode()}')]
def print_time(t: float) -> str:
"""Convert time to human readable representation.
:param t: Time in seconds.
:return: String representation
"""
t = t * 1000 # to ms
if t < 1000:
return f"{round(t, 2)}ms"
elif t < 60000:
return f"{round(t / 1000, 2)}s"
elif t < 3600000:
sec = t / 1000
minute = int(sec // 60)
sec = sec % 60
return f"{minute}min {round(sec, 2)}s"
else:
sec = t / 1000
minute = sec // 60
sec = sec % 60
h = int(minute // 60)
minute = int(minute % 60)
return f"{h}h {minute}min {round(sec, 2)}s"
| 11,438
| 29.75
| 79
|
py
|
parameter-exchange
|
parameter-exchange-master/src/tls_test/tls_client.py
|
#!/usr/bin/env python3
"""This module tests the TLS Handshake and Record Protocol Performance.
Start echo_server.sh in another terminal.
Copyright (c) 2020.
Author: Erik Buchholz
Maintainer: Erik Buchholz
E-mail: buchholz@comsys.rwth-aachen.de
"""
import argparse
import os
import socket
import ssl
import time
from tqdm import tqdm
# CONSTANTS -------------------------------------------------------------------
# You may have to adapt the constants in echo_server.sh accordingly
HOST = "127.0.0.1"
PORT = 5000
REPS = 10000
CURVES = ["prime256v1"] # secp256r1 is called prime256v1 in OpenSSL
CIPHERS = [
"ECDHE-RSA-AES256-GCM-SHA384",
# "ECDHE-ECDSA-AES256-GCM-SHA384", # Not working on Preserver
"DHE-RSA-AES256-GCM-SHA384",
]
SENT_BYTES = 10 * 10 ** 6 # 10MB
PROTOCOL = ssl.PROTOCOL_TLSv1_2
# -----------------------------------------------------------------------------
# DIRECTORIES -----------------------------------------------------------------
_cur_dir = os.path.dirname(
os.path.dirname(
os.path.dirname(os.path.abspath(__file__))))
WORKING_DIR = os.path.abspath(_cur_dir) + '/'
DATA_DIR = WORKING_DIR + 'data/'
TLS_CERT_DIR = DATA_DIR + "certs/"
ROOT_CA = TLS_CERT_DIR + "rootCA.crt"
EVAL_DIR = WORKING_DIR + 'eval/'
output_dir = EVAL_DIR + "tls/"
# -----------------------------------------------------------------------------
def main(base_filename: str):
for cipher in CIPHERS:
for curve in CURVES:
print("Testing cipher\033[1m", cipher, "\033[0m with curve\033[1m",
curve, "\033[0m.")
# TLS
# SETTINGS-----------------------------------------------------------------
context = ssl.SSLContext(PROTOCOL)
context.verify_mode = ssl.CERT_REQUIRED
context.check_hostname = True # Required for CERT_REQUIRED
context.load_verify_locations(ROOT_CA)
context.set_ecdh_curve(curve)
context.set_ciphers(cipher)
# -----------------------------------------------------------------------------
send_times = []
hs_times = []
used_cipher = ""
used_protocol = ""
sent_bytes = SENT_BYTES # 10MB
# recv_bytes = 10 * 10 ** 6 - 10000 # sent_bytes
data = b'1' * sent_bytes
for _ in tqdm(range(REPS)):
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
ssl_sock = context.wrap_socket(
s, server_hostname='localhost',
do_handshake_on_connect=False)
ssl_sock.connect((HOST, PORT))
start = time.monotonic()
ssl_sock.do_handshake(block=True)
handshake_time = time.monotonic() - start
hs_times.append(handshake_time)
# print("Handshake: ", handshake_time * 1000, "ms")
# print("Used Cipher: ", ssl_sock.cipher())
used_cipher = ssl_sock.cipher()[0]
used_protocol = ssl_sock.version()
# print("Supported Ciphers: ", ssl_sock.shared_ciphers())
start = time.monotonic()
ssl_sock.send(data)
send_time = time.monotonic() - start
send_times.append(send_time)
# time.sleep(1)
# read_bytes = 0
# start = time.monotonic()
# while read_bytes < recv_bytes:
# read = ssl_sock.recv(recv_bytes)
# read_bytes += len(read)
# recv_time = time.monotonic() - start
print(
"###########################################################")
print("Used Protocol:\033[1m", used_protocol, "\033[0m")
print("Used Cipher: \033[1m", used_cipher, "\033[0m")
print("Used Curve: \033[1m", curve, "\033[0m",
"[Note: secp256r1 is called prime256v1 in OpenSSL]")
print("Rounds: \033[1m", REPS, "\033[0m")
print("Transmitted Data: \033[1m", sent_bytes / 1000000, "MB",
"\033[0m")
avg_hs = sum(hs_times) / REPS
print("Average Handshake: \033[1m", round(avg_hs * 1000, 2), "ms",
"\033[0m")
avg_send = sum(send_times) / REPS
print("Average Sending: \033[1m", round(avg_send * 1000, 2), "ms",
"\033[0m")
print("Average Sending: \033[1m",
round(sent_bytes / avg_send / 1000000, 2), "MB/s", "\033[0m")
# Write to file
if base_filename is not None:
os.makedirs(output_dir, exist_ok=True)
filename = f"{base_filename}_{cipher}_{curve}.csv"
with open(output_dir + filename, "w") as f:
f.write(
"------------------------HEADER------------------------;;;;;;\n")
f.write(f"Protocol: {used_protocol};;;;;;\n")
f.write(f"Cipher: {used_cipher};;;;;;\n")
f.write(
f"Curve: {curve} [Note: secp256r1 is called "
f"prime256v1 in OpenSSL];;;;;;\n")
f.write(f"Rounds: {REPS};;;;;;\n")
f.write(
f"Transmitted Data per Measurement: "
f"{sent_bytes / 1000000}MB;;;;;;\n")
f.write(
f"Average Handshake Duration: "
f"{round(avg_hs * 1000, 2)}ms;;;;;;\n")
f.write(
f"Average Sending: "
f"{round(sent_bytes / avg_send / 1000000, 2)}MB/s;;;;;;\n")
f.write(
"----------------------END-HEADER----------------------;;;;;;\n")
f.write(
"ROUND;PROTOCOL;CIPHER;CURVE;SENT_DATA["
"Byte];HANDSHAKE[s];SENDING[s]\n")
for i, _ in enumerate(hs_times):
f.write(
f"{i + 1};{used_protocol};{used_cipher};{curve};{SENT_BYTES};{hs_times[i]};{send_times[i]}\n")
if __name__ == '__main__':
p = argparse.ArgumentParser("TLS Client.")
p.add_argument('-o', '--out', type=str, action='store',
help="Base Filename", default=None)
args = p.parse_args()
main(args.out)
| 6,477
| 41.064935
| 122
|
py
|
parameter-exchange
|
parameter-exchange-master/src/tls_test/__init__.py
|
#!/usr/bin/env python3
"""This module..."""
| 44
| 14
| 22
|
py
|
parameter-exchange
|
parameter-exchange-master/cython/psi/cython_setup.py
|
#!/usr/bin/env python3
"""
Copyright (c) 2020.
Author: Chris Dax
E-mail: dax@comsys.rwth-aachen.de
Maintainer: Erik Buchholz
E-mail: buchholz@comsys.rwth-aachen.de
"""
import os
import sys
from distutils.core import setup
from distutils.extension import Extension
from Cython.Build import cythonize
# os.path.dirname moves on directory up
cur_dir = os.path.dirname(os.path.abspath(__file__))
cython_dir = os.path.dirname(cur_dir)
repo_base_dir = os.path.dirname(cython_dir)
lib_dirs = [
f"{cython_dir}/lib",
f"{repo_base_dir}/libraries/libPSI/lib",
f"{repo_base_dir}/libraries/libOTe/lib",
f"{repo_base_dir}/libraries/libOTe/cryptoTools/thirdparty/linux"
"/miracl/miracl/source/"
]
inc_dirs = [
cur_dir,
f"{cython_dir}/psi_interface",
f"{repo_base_dir}/libraries/libOTe/cryptoTools",
f"{repo_base_dir}/libraries/libOTe/cryptoTools/thirdparty/linux/boost",
f"{repo_base_dir}/libraries/libPSI",
f"{repo_base_dir}/libraries/libOTe"
]
home = os.path.expanduser("~")
wolfssl_filename = "libwolfssl.so"
if sys.platform == "darwin":
# MAC OS
# os.environ["CC"] = "g++-9"
wolfssl_filename = "libwolfssl.dylib"
inc_dirs.append("/usr/local/include/")
wolfssl_path = home + "/lib/" + wolfssl_filename
if os.path.exists(wolfssl_path):
# WOLFSSL was installed into home instead of /usr/local/
# Done on passion for example
lib_dirs.append(home + "/lib/")
inc_dirs.append(home + "/include/")
RunMain_extension = Extension(
name="cPSIInterface",
sources=[f"{cur_dir}/cPSIInterface.pyx"],
libraries=[
"PSIInterface", "libPSI", "libOTe", "cryptoTools", "miracl", "wolfssl"
],
library_dirs=lib_dirs,
include_dirs=inc_dirs,
language="c++",
extra_compile_args=["-w", "-fPIC", "-pthread", "-std=c++14"]
# extra_link_args = []
)
setup(
name="cPSIInterface",
ext_modules=cythonize([RunMain_extension])
)
| 1,917
| 26.4
| 78
|
py
|
parameter-exchange
|
parameter-exchange-master/cython/psi/Tests/sendMain.py
|
#!/usr/bin/env python3
"""
Copyright (c) 2020.
Author: Chris Dax
E-mail: dax@comsys.rwth-aachen.de
Maintainer: Erik Buchholz
E-mail: buchholz@comsys.rwth-aachen.de
"""
# noinspection PyUnresolvedReferences
from cPSIInterface import PyPSISender
import os
import sys
from config import (
hostname, port, numThreads, certPath, setSize, psi_name
)
parent_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.append(parent_dir)
print("Sender: Starting sender for: ", psi_name)
send = PyPSISender()
send.hostName = hostname
send.port = port
send.numThreads = numThreads
send.setSize = setSize
send.serverCert = certPath + "keyserver.crt"
send.serverKey = certPath + "keyserver.key"
sendSet = []
for x in range(setSize):
sendSet.append(x)
print("HbC Sender [KKRT16]: Execute without "
"TLS----------------------------------------------------")
send.tls = False
send.execute("KKRT16", sendSet)
print("HbC Sender [KKRT16]: "
"-----------------------------------------------------------------------")
print("Sender: Execute with "
"TLS-------------------------------------------------------")
send.tls = True
send.execute("KKRT16", sendSet)
print("Sender: "
"-----------------------------------------------------------------------")
print("Malicious Sender [RR17]: Execute without "
"TLS----------------------------------------------------")
send.tls = False
send.execute("RR17", sendSet)
print("Malicious Sender [RR17]: "
"-----------------------------------------------------------------------")
print("Sender: Execute with "
"TLS-------------------------------------------------------")
send.tls = True
send.execute("RR17", sendSet)
print("Sender: "
"-----------------------------------------------------------------------")
| 1,801
| 30.068966
| 80
|
py
|
parameter-exchange
|
parameter-exchange-master/cython/psi/Tests/config.py
|
#!/usr/bin/env python3
"""
Copyright (c) 2020.
Author: Chris Dax
E-mail: dax@comsys.rwth-aachen.de
Maintainer: Erik Buchholz
E-mail: buchholz@comsys.rwth-aachen.de
"""
import os
def parent(path: str):
return os.path.dirname(path)
hostname = "127.0.0.1"
port = 12345
numThreads = 1
current_dir = os.path.dirname(os.path.abspath(__file__))
certPath = parent(parent(parent(current_dir))) + "/data/certs/"
setSize = 100
psi_name = "KKRT16"
| 444
| 18.347826
| 63
|
py
|
parameter-exchange
|
parameter-exchange-master/cython/psi/Tests/__init__.py
| 0
| 0
| 0
|
py
|
|
parameter-exchange
|
parameter-exchange-master/cython/psi/Tests/recvMain.py
|
#!/usr/bin/env python3
"""
Copyright (c) 2020.
Author: Chris Dax
E-mail: dax@comsys.rwth-aachen.de
Maintainer: Erik Buchholz
E-mail: buchholz@comsys.rwth-aachen.de
"""
# noinspection PyUnresolvedReferences
from cPSIInterface import PyPSIReceiver
import os
import sys
from config import (
hostname, port, numThreads, certPath, setSize, psi_name
)
parent_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.append(parent_dir)
print("Receiver: Starting receiver for: ", psi_name)
recv = PyPSIReceiver()
recv.hostName = hostname
recv.port = port
recv.numThreads = numThreads
recv.setSize = setSize
recv.rootCA = certPath + "rootCA.crt"
recvSet = []
for x in range(setSize):
recvSet.append(x + x % 2 * setSize)
print("HbC Receiver [KKRT16]: Execute without "
"TLS----------------------------------------------------")
recv.tls = False
intersection: list = recv.execute("KKRT16", recvSet)
intersection.sort()
print("Receiver: Found " + str(len(intersection)))
print("Receiver: Intersection: ", intersection)
print("Receiver: "
"-----------------------------------------------------------------------")
print("HbC Receiver [KKRT16]: Execute with "
"TLS-------------------------------------------------------")
recv.tls = True
intersection = recv.execute("KKRT16", recvSet)
intersection.sort()
print("Receiver: Found " + str(len(intersection)))
print("Receiver: Intersection: ", intersection)
print("Receiver: "
"-----------------------------------------------------------------------")
print("Malicious Receiver [RR17]: Execute without "
"TLS----------------------------------------------------")
recv.tls = False
intersection: list = recv.execute("RR17", recvSet)
intersection.sort()
print("Receiver: Found " + str(len(intersection)))
print("Receiver: Intersection: ", intersection)
print("Receiver: "
"-----------------------------------------------------------------------")
print("Malicious Receiver [RR17]: Execute with "
"TLS-------------------------------------------------------")
recv.tls = True
intersection = recv.execute("RR17", recvSet)
intersection.sort()
print("Receiver: Found " + str(len(intersection)))
print("Receiver: Intersection: ", intersection)
print("Receiver: "
"-----------------------------------------------------------------------")
| 2,340
| 32.927536
| 80
|
py
|
parameter-exchange
|
parameter-exchange-master/cython/ot/cython_setup.py
|
#!/usr/bin/env python3
"""
Copyright (c) 2020.
Author: Chris Dax
E-mail: dax@comsys.rwth-aachen.de
Maintainer: Erik Buchholz
E-mail: buchholz@comsys.rwth-aachen.de
"""
import os
import sys
from distutils.core import setup
from distutils.extension import Extension
from Cython.Build import cythonize
# os.path.dirname moves on directory up
cur_dir = os.path.dirname(os.path.abspath(__file__))
cython_dir = os.path.dirname(cur_dir)
repo_base_dir = os.path.dirname(cython_dir)
lib_dirs = [
f"{cython_dir}/lib",
f"{repo_base_dir}/libraries/libPSI/lib",
f"{repo_base_dir}/libraries/libOTe/lib",
f"{repo_base_dir}/libraries/libOTe/cryptoTools/thirdparty/linux"
"/miracl/miracl/source/"
]
inc_dirs = [
cur_dir,
f"{cython_dir}/ot_interface",
f"{repo_base_dir}/libraries/libOTe/cryptoTools",
f"{repo_base_dir}/libraries/libOTe/cryptoTools/thirdparty/linux/boost",
f"{repo_base_dir}/libraries/libPSI",
f"{repo_base_dir}/libraries/libOTe"
]
home = os.path.expanduser("~")
wolfssl_filename = "libwolfssl.so"
if sys.platform == "darwin":
# MAC OS
wolfssl_filename = "libwolfssl.dylib"
inc_dirs.append("/usr/local/include/")
lib_dirs.append("/usr/local/lib/")
wolfssl_path = home + "/lib/" + wolfssl_filename
if os.path.exists(wolfssl_path):
# WOLFSSL was installed into home instead of /usr/local/
# Done on passion for example
lib_dirs.append(home + "/lib/")
inc_dirs.append(home + "/include/")
RunMain_extension = Extension(
name="cOTInterface",
sources=[f"{cur_dir}/cOTInterface.pyx"],
libraries=[
"OTInterface", "libPSI", "libOTe", "cryptoTools", "miracl", "wolfssl"
],
library_dirs=lib_dirs,
include_dirs=inc_dirs,
language="c++",
extra_compile_args=["-w", "-fPIC", "-pthread", "-std=c++14"]
# extra_link_args = []
)
setup(
name="cOTInterface",
ext_modules=cythonize([RunMain_extension])
)
| 1,918
| 26.414286
| 77
|
py
|
parameter-exchange
|
parameter-exchange-master/cython/ot/Test/sendMain.py
|
#!/usr/bin/env python3
"""
Copyright (c) 2020.
Author: Chris Dax
E-mail: dax@comsys.rwth-aachen.de
Maintainer: Erik Buchholz
E-mail: buchholz@comsys.rwth-aachen.de
"""
from cOTInterface import PyOTSender
import os
import sys
parent_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
cython_dir = os.path.dirname(parent_dir)
sys.path.append(parent_dir)
print("Sender: Starting.")
send = PyOTSender()
send.totalOTs = 10
send.numChosenMsgs = 2 ** 10 # max 2^25 60+s
send.hostName = "127.0.0.1"
send.port = 1213
send.serverCert = f"{cython_dir}/certs/keyserver.crt"
send.serverKey = f"{cython_dir}/certs/keyserver.key"
print("numChosenMsgs: " + str(send.numChosenMsgs))
print("totalOTs: " + str(send.totalOTs))
messages = []
for y in range(send.numChosenMsgs):
messages.append(y)
print("Sender (KKRT16): Execute without "
"TLS------------------------------------------------------")
send.executeSame(messages, False)
print("Sender (KKRT16): Execute with "
"TLS---------------------------------------------------------")
send.executeSame(messages, True)
send.maliciousSecure = True
send.inputBitCount = 76
print("Sender (OOS16): Execute without "
"TLS------------------------------------------------------")
send.executeSame(messages, False)
print("Sender (OOS16): Execute with "
"TLS---------------------------------------------------------")
send.executeSame(messages, True)
| 1,422
| 28.645833
| 72
|
py
|
parameter-exchange
|
parameter-exchange-master/cython/ot/Test/recvMain.py
|
#!/usr/bin/env python3
"""
Copyright (c) 2020.
Author: Chris Dax
E-mail: dax@comsys.rwth-aachen.de
Maintainer: Erik Buchholz
E-mail: buchholz@comsys.rwth-aachen.de
"""
import datetime
# noinspection PyUnresolvedReferences
from cOTInterface import PyOTReceiver
import os
import sys
parent_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
cython_dir = os.path.dirname(parent_dir)
sys.path.append(parent_dir)
print("Receiver: Starting.")
recv = PyOTReceiver()
recv.totalOTs = 10
recv.numChosenMsgs = 2 ** 10 # max 2^25 60+s
recv.hostName = "127.0.0.1"
recv.port = 1213
recv.rootCA = f"{cython_dir}/certs/rootCA.crt"
choices = []
for x in range(recv.totalOTs):
choices.append(x)
print("Receiver (KKRT): Execute without "
"TLS------------------------------------------------------")
time1 = datetime.datetime.now().timestamp()
result = recv.execute(choices, False)
time2 = datetime.datetime.now().timestamp()
print(f"Receiver: Took: {time2 - time1}s")
print(f"Receiver: Result: {result}")
print("Receiver (KKRT): Execute with "
"TLS---------------------------------------------------------")
time1 = datetime.datetime.now().timestamp()
result = recv.execute(choices, True)
time2 = datetime.datetime.now().timestamp()
print(f"Receiver: Took: {time2 - time1}s")
print(f"Receiver: Result: {result}")
recv.maliciousSecure = True
recv.inputBitCount = 76
print("Receiver (OOS): Execute without "
"TLS------------------------------------------------------")
time1 = datetime.datetime.now().timestamp()
result = recv.execute(choices, False)
time2 = datetime.datetime.now().timestamp()
print(f"Receiver: Took: {time2 - time1}s")
print(f"Receiver: Result: {result}")
print("Receiver (OOS): Execute with "
"TLS---------------------------------------------------------")
time1 = datetime.datetime.now().timestamp()
result = recv.execute(choices, True)
time2 = datetime.datetime.now().timestamp()
print(f"Receiver: Took: {time2 - time1}s")
print(f"Receiver: Result: {result}")
| 2,011
| 30.936508
| 72
|
py
|
MGANet-DCC2020
|
MGANet-DCC2020-master/codes/MGANet_test_LD37.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import numpy as np
from numpy import *
# from scipy.misc import imresize
from skimage.measure import compare_ssim
import cv2
import glob
import time
import os
import argparse
import Net.MGANet as MGANet
import torch
import copy
def yuv_import(filename, dims ,startfrm,numframe):
fp = open(filename, 'rb')
frame_size = np.prod(dims) * 3 / 2
fp.seek(0, 2)
ps = fp.tell()
totalfrm = int(ps // frame_size)
d00 = dims[0] // 2
d01 = dims[1] // 2
assert startfrm+numframe<=totalfrm
Y = np.zeros(shape=(numframe, 1,dims[0], dims[1]), dtype=np.uint8, order='C')
U = np.zeros(shape=(numframe, 1,d00, d01),dtype= np.uint8, order='C')
V = np.zeros(shape=(numframe, 1,d00, d01),dtype= np.uint8, order='C')
fp.seek(int(frame_size * startfrm), 0)
for i in range(startfrm,startfrm+numframe):
for m in range(dims[0]):
for n in range(dims[1]):
Y[i-startfrm,0, m, n] = ord(fp.read(1))
for m in range(d00):
for n in range(d01):
U[i-startfrm,0, m, n] = ord(fp.read(1))
for m in range(d00):
for n in range(d01):
V[i-startfrm,0, m, n] = ord(fp.read(1))
fp.close()
Y = Y.astype(np.float32)
U = U.astype(np.float32)
V = V.astype(np.float32)
return Y, U, V
def get_w_h(filename):
width = int((filename.split('x')[0]).split('_')[-1])
height = int((filename.split('x')[1]).split('_')[0])
return (height,width)
def get_data(one_filename,video_index,num_frame,startfrm_position):
one_filename_length = len(one_filename)
data_Y = []
for i in range(one_filename_length+1):
if i == 0:
data_37_filename = np.sort(glob.glob(one_filename[i]+'/*.yuv'))
data_37_filename_length = len(data_37_filename )
for i_0 in range(video_index,video_index+1):
file_name = data_37_filename[i_0]
dims = get_w_h(filename=file_name)
data_37_filename_Y,data_37_filename_U,data_37_filename_V = yuv_import(filename=file_name, dims=dims ,startfrm=startfrm_position,numframe=num_frame)
data_Y.append(data_37_filename_Y)
if i == 1:
mask_37_filename = np.sort(glob.glob(one_filename[i] + '/*.yuv'))
mask_37_filename_length = len(mask_37_filename)
for i_1 in range(video_index,video_index+1):
file_name = mask_37_filename[i_1]
dims = get_w_h(filename=file_name)
mask_37_filename_Y, mask_37_filename_U, mask_37_filename_V = yuv_import(filename=file_name, dims=dims,startfrm=startfrm_position, numframe=num_frame)
data_Y.append(mask_37_filename_Y)
if i == 2:
label_37_filename = np.sort(glob.glob('../test_yuv/label/' + '*.yuv'))
label_37_filename_length = len(label_37_filename)
for i_2 in range(video_index,video_index+1):
file_name = label_37_filename[i_2]
dims = get_w_h(filename=file_name)
label_37_filename_Y, label_37_filename_U, label_37_filename_V = yuv_import(filename=file_name, dims=dims,startfrm=startfrm_position, numframe=num_frame)
data_Y.append(label_37_filename_Y)
return data_Y
def test_batch_key(data_Y, start, batch_size=1):
data_pre = (data_Y[0][start-2:start-1,...])/255.0
data_cur = data_Y[0][start:start+1,...]/255.0
data_aft = data_Y[0][start+2:start+3,...]/255.0
mask = data_Y[1][start:start+1,...]/255.0
label = data_Y[2][start:start+1,...]
start+=1
return data_pre,data_cur,data_aft,mask,label,start
def test_batch(data_Y, start, batch_size=1):
data_pre = (data_Y[0][start-1:start,...])/255.0
data_cur = data_Y[0][start:start+1,...]/255.0
data_aft = data_Y[0][start+1:start+2,...]/255.0
mask = data_Y[1][start:start+1,...]/255.0
label = data_Y[2][start:start+1,...]
start+=1
return data_pre,data_cur,data_aft,mask,label,start
def PSNR(img1, img2):
mse = np.mean( (img1.astype(np.float32) - img2.astype(np.float32)) ** 2 ).astype(np.float32)
if mse == 0:
return 100
PIXEL_MAX = 255.0
return 20 * math.log10(PIXEL_MAX / math.sqrt(mse))
def image_test(one_filename,net_G,patch_size=[128,128],f_txt=None,opt=None):
ave_diff_psnr =0.
ave_psnr_pre_gt =0.
ave_psnr_data_gt =0.
video_num=opt.video_nums
for video_index in range(video_num):
data_37_filename = np.sort(glob.glob(one_filename[0]+'/*.yuv'))
data_Y = get_data(one_filename,video_index=video_index,num_frame=92,startfrm_position=opt.startfrm_position)
start =1
psnr_diff_sum = 0
psnr_pre_gt_sum=0
psnr_data_gt_sum=0
nums =opt.frame_nums
for itr in range(0, nums):
if (start - 2) % 4 == 0:
data_pre, data_cur, data_aft, mask, label, start = test_batch_key(data_Y=data_Y, start=start, batch_size=1)
else:
data_pre, data_cur, data_aft, mask, label, start = test_batch(data_Y=data_Y, start=start, batch_size=1)
height = data_pre.shape[2]
width = data_pre.shape[3]
data_pre_value_patch = torch.from_numpy(data_pre).float().cuda()
data_cur_value_patch = torch.from_numpy(data_cur).float().cuda()
data_aft_value_patch = torch.from_numpy(data_aft).float().cuda()
data_mask_value_patch = torch.from_numpy(mask).float().cuda()
start_time = time.time()
fake_image = net_G(data_pre_value_patch,data_cur_value_patch,data_aft_value_patch,data_mask_value_patch)
end_time=time.time()
fake_image_numpy = fake_image.detach().cpu().numpy()
fake_image_numpy = np.squeeze(fake_image_numpy)*255.0
finally_image=np.squeeze(fake_image_numpy)
mask_image = np.squeeze(mask)*255.
os.makedirs(opt.result_path+'/result_enhanced_data/%02d'%(video_index+1),exist_ok = True)
os.makedirs(opt.result_path+'/result_mask/%02d'%(video_index+1),exist_ok = True)
os.makedirs(opt.result_path+'/result_label/%02d'%(video_index+1),exist_ok = True)
os.makedirs(opt.result_path+'/result_compression_data/%02d'%(video_index+1),exist_ok = True)
cv2.imwrite(opt.result_path+'/result_enhanced_data/%02d/%02d.png'%(video_index+1,itr+2),finally_image.astype(np.uint8))
cv2.imwrite(opt.result_path+'/result_mask/%02d/%02d.png'%(video_index+1,itr+2),mask_image.astype(np.uint8))
data_cur_image = (np.squeeze(data_cur)*255.0).astype(np.float32)
label = np.squeeze(label).astype(np.float32)
cv2.imwrite(opt.result_path+'/result_label/%02d/%02d.png'%(video_index+1,itr+2),label.astype(np.uint8))
cv2.imwrite(opt.result_path+'/result_compression_data/%02d/%02d.png'%(video_index+1,itr+2),data_cur_image.astype(np.uint8))
psnr_pre_gt = PSNR(finally_image, label)
psnr_data_gt = PSNR(data_cur_image, label)
psnr_diff = psnr_pre_gt - psnr_data_gt
psnr_diff_sum +=psnr_diff
psnr_pre_gt_sum+=psnr_pre_gt
psnr_data_gt_sum+=psnr_data_gt
print('psnr_gain:%.05f'%(psnr_diff))
print('psnr_predict:{:.04f} psnr_anchor:{:.04f} psnr_gain:{:.04f}'.format(psnr_pre_gt,psnr_data_gt,psnr_diff),file=f_txt)
print('video_index:{:2d} psnr_predict_average:{:.04f} psnr_anchor_average:{:.04f} psnr_gain_average:{:.04f}'.format(video_index,psnr_pre_gt_sum/nums,psnr_data_gt_sum/nums,psnr_diff_sum/nums),file=f_txt)
print('{}'.format(data_37_filename[video_index]),file=f_txt)
f_txt.write('\r\n')
ave_diff_psnr+=psnr_diff_sum/nums
ave_psnr_pre_gt +=psnr_pre_gt_sum/nums
ave_psnr_data_gt +=psnr_data_gt_sum/nums
print(' average_psnr_predict:{:.04f} average_psnr_anchor:{:.04f} average_psnr_gain:{:0.4f}'.format(ave_psnr_pre_gt / video_num, ave_psnr_data_gt / video_num, ave_diff_psnr / video_num))
print(' average_psnr_predict:{:.04f} average_psnr_anchor:{:.04f} average_psnr_gain:{:0.4f}'.format(ave_psnr_pre_gt / video_num, ave_psnr_data_gt / video_num, ave_diff_psnr / video_num), file=f_txt)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="MGANet_test")
parser.add_argument('--net_G', default='../model/model_epoch_LD37.pth',help="add checkpoint")
parser.add_argument("--gpu_id", default=0, type=int, help="gpu ids (default: 0)")
parser.add_argument("--video_nums", default=1, type=int, help="video number (default: 0)")
parser.add_argument("--frame_nums", default=19, type=int, help="frame number of one video to test (default: 90)")
parser.add_argument("--startfrm_position", default=0, type=int, help="start frame position (default: 0)")
parser.add_argument("--is_training", default=False, type=bool, help="train or test mode")
parser.add_argument("--result_path", default='./result_LD37/', type=str, help="store results")
opts = parser.parse_args()
torch.cuda.set_device(opts.gpu_id)
txt_name = './MGANet_test_data_LD37.txt'
if os.path.isfile(txt_name):
f = open(txt_name, 'w+')
else:
os.mknod(txt_name)
f = open(txt_name, 'w+')
one_filename = np.sort(glob.glob('../test_yuv/LD37/' + '*'))
print(one_filename)
patch_size =[240,416]
net_G = MGANet.Gen_Guided_UNet(batchNorm=False,input_size=patch_size,is_training=opts.is_training)
net_G.eval()
net_G.load_state_dict(torch.load(opts.net_G,map_location=lambda storage, loc: storage.cuda(opts.gpu_id)))
print('....')
net_G.cuda()
image_test(one_filename=one_filename,net_G=net_G,patch_size=patch_size,f_txt = f,opt = opts)
f.close()
| 9,952
| 41.900862
| 211
|
py
|
MGANet-DCC2020
|
MGANet-DCC2020-master/codes/MGANet_test_AI37.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import numpy as np
from numpy import *
# from scipy.misc import imresize
from skimage.measure import compare_ssim
import cv2
import glob
import time
import os
import argparse
import Net.MGANet as MGANet
import torch
import copy
def yuv_import(filename, dims ,startfrm,numframe):
fp = open(filename, 'rb')
frame_size = np.prod(dims) * 3 / 2
fp.seek(0, 2)
ps = fp.tell()
totalfrm = int(ps // frame_size)
d00 = dims[0] // 2
d01 = dims[1] // 2
assert startfrm+numframe<=totalfrm
Y = np.zeros(shape=(numframe, 1,dims[0], dims[1]), dtype=np.uint8, order='C')
U = np.zeros(shape=(numframe, 1,d00, d01),dtype= np.uint8, order='C')
V = np.zeros(shape=(numframe, 1,d00, d01),dtype= np.uint8, order='C')
fp.seek(int(frame_size * startfrm), 0)
for i in range(startfrm,startfrm+numframe):
for m in range(dims[0]):
for n in range(dims[1]):
Y[i-startfrm,0, m, n] = ord(fp.read(1))
for m in range(d00):
for n in range(d01):
U[i-startfrm,0, m, n] = ord(fp.read(1))
for m in range(d00):
for n in range(d01):
V[i-startfrm,0, m, n] = ord(fp.read(1))
fp.close()
Y = Y.astype(np.float32)
U = U.astype(np.float32)
V = V.astype(np.float32)
return Y, U, V
def get_w_h(filename):
width = int((filename.split('x')[0]).split('_')[-1])
height = int((filename.split('x')[1]).split('_')[0])
return (height,width)
def get_data(one_filename,video_index,num_frame,startfrm_position):
one_filename_length = len(one_filename)
data_Y = []
for i in range(one_filename_length+1):
if i == 0:
data_37_filename = np.sort(glob.glob(one_filename[i]+'/*.yuv'))
data_37_filename_length = len(data_37_filename )
for i_0 in range(video_index,video_index+1):
file_name = data_37_filename[i_0]
dims = get_w_h(filename=file_name)
data_37_filename_Y,data_37_filename_U,data_37_filename_V = yuv_import(filename=file_name, dims=dims ,startfrm=startfrm_position,numframe=num_frame)
data_Y.append(data_37_filename_Y)
if i == 1:
mask_37_filename = np.sort(glob.glob(one_filename[i] + '/*.yuv'))
mask_37_filename_length = len(mask_37_filename)
for i_1 in range(video_index,video_index+1):
file_name = mask_37_filename[i_1]
dims = get_w_h(filename=file_name)
mask_37_filename_Y, mask_37_filename_U, mask_37_filename_V = yuv_import(filename=file_name, dims=dims,startfrm=startfrm_position, numframe=num_frame)
data_Y.append(mask_37_filename_Y)
if i == 2:
label_37_filename = np.sort(glob.glob('../test_yuv/label/' + '*.yuv'))
label_37_filename_length = len(label_37_filename)
for i_2 in range(video_index,video_index+1):
file_name = label_37_filename[i_2]
dims = get_w_h(filename=file_name)
label_37_filename_Y, label_37_filename_U, label_37_filename_V = yuv_import(filename=file_name, dims=dims,startfrm=startfrm_position, numframe=num_frame)
data_Y.append(label_37_filename_Y)
return data_Y
def test_batch(data_Y, start, batch_size=1):
data_pre = (data_Y[0][start-1:start,...])/255.0
data_cur = data_Y[0][start:start+1,...]/255.0
data_aft = data_Y[0][start+1:start+2,...]/255.0
mask = data_Y[1][start:start+1,...]/255.0
label = data_Y[2][start:start+1,...]
start+=1
return data_pre,data_cur,data_aft,mask,label,start
def PSNR(img1, img2):
mse = np.mean( (img1.astype(np.float32) - img2.astype(np.float32)) ** 2 ).astype(np.float32)
if mse == 0:
return 100
PIXEL_MAX = 255.0
return 20 * math.log10(PIXEL_MAX / math.sqrt(mse))
def image_test(one_filename,net_G,patch_size=[128,128],f_txt=None,opt=None):
ave_gain_psnr =0.
ave_psnr_predict =0.
ave_psnr_data =0.
video_num=opt.video_nums
for video_index in range(video_num):
data_37_filename = np.sort(glob.glob(one_filename[0]+'/*.yuv'))
data_Y = get_data(one_filename,video_index=video_index,num_frame=opt.frame_nums+5,startfrm_position=opt.startfrm_position)
start =1
psnr_gain_sum = 0
psnr_pre_gt_sum=0
psnr_data_gt_sum=0
nums =opt.frame_nums
for itr in range(0, nums):
data_pre, data_cur, data_aft, mask, label, start = test_batch(data_Y=data_Y, start=start, batch_size=1)
height = data_pre.shape[2]
width = data_pre.shape[3]
data_pre_value_patch = torch.from_numpy(data_pre).float().cuda()
data_cur_value_patch = torch.from_numpy(data_cur).float().cuda()
data_aft_value_patch = torch.from_numpy(data_aft).float().cuda()
data_mask_value_patch = torch.from_numpy(mask).float().cuda()
start_time = time.time()
fake_image = net_G(data_pre_value_patch,data_cur_value_patch,data_aft_value_patch,data_mask_value_patch)
end_time=time.time()
fake_image_numpy = fake_image.detach().cpu().numpy()
fake_image_numpy = np.squeeze(fake_image_numpy)*255.0
finally_image=np.squeeze(fake_image_numpy)
mask_image = np.squeeze(mask)*255.
os.makedirs(opt.result_path+'/result_enhanced_data/%02d'%(video_index+1),exist_ok = True)
os.makedirs(opt.result_path+'/result_mask/%02d'%(video_index+1),exist_ok = True)
os.makedirs(opt.result_path+'/result_label/%02d'%(video_index+1),exist_ok = True)
os.makedirs(opt.result_path+'/result_compression_data/%02d'%(video_index+1),exist_ok = True)
cv2.imwrite(opt.result_path+'/result_enhanced_data/%02d/%02d.png'%(video_index+1,itr+2),finally_image.astype(np.uint8))
cv2.imwrite(opt.result_path+'/result_mask/%02d/%02d.png'%(video_index+1,itr+2),mask_image.astype(np.uint8))
data_cur_image = (np.squeeze(data_cur)*255.0).astype(np.float32)
label = np.squeeze(label).astype(np.float32)
cv2.imwrite(opt.result_path+'/result_label/%02d/%02d.png'%(video_index+1,itr+2),label.astype(np.uint8))
cv2.imwrite(opt.result_path+'/result_compression_data/%02d/%02d.png'%(video_index+1,itr+2),data_cur_image.astype(np.uint8))
psnr_pre_gt = PSNR(finally_image, label)
psnr_data_gt = PSNR(data_cur_image, label)
psnr_gain = psnr_pre_gt - psnr_data_gt
psnr_gain_sum +=psnr_gain
psnr_pre_gt_sum+=psnr_pre_gt
psnr_data_gt_sum+=psnr_data_gt
print('psnr_gain:%.05f'%(psnr_gain))
print('psnr_predict:{:.04f} psnr_anchor:{:.04f} psnr_gain:{:.04f}'.format(psnr_pre_gt,psnr_data_gt,psnr_gain),file=f_txt)
print( data_37_filename[video_index])
print('video_index:{:2d} psnr_predict_average:{:.04f} psnr_data_average:{:.04f} psnr_gain_average:{:.04f}'.format(video_index,psnr_pre_gt_sum/nums,psnr_data_gt_sum/nums,psnr_gain_sum/nums),file=f_txt)
print('{}'.format(data_37_filename[video_index]),file=f_txt)
f_txt.write('\r\n')
ave_gain_psnr+=psnr_gain_sum/nums
ave_psnr_predict +=psnr_pre_gt_sum/nums
ave_psnr_data +=psnr_data_gt_sum/nums
print(' average_psnr_predict:{:.04f} average_psnr_anchor:{:.04f} average_psnr_gain:{:0.4f}'.format(ave_psnr_predict/video_num,ave_psnr_data/video_num,ave_gain_psnr/video_num))
print(' average_psnr_predict:{:.04f} average_psnr_anchor:{:.04f} average_psnr_gain:{:0.4f}'.format(ave_psnr_predict/video_num,ave_psnr_data/video_num,ave_gain_psnr/video_num), file=f_txt)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="MGANet_test")
parser.add_argument('--net_G', default='../model/model_epoch_AI37.pth',help="add checkpoint")
parser.add_argument("--gpu_id", default=0, type=int, help="gpu ids (default: 0)")
parser.add_argument("--video_nums", default=1, type=int, help="Videos number (default: 0)")
parser.add_argument("--frame_nums", default=29, type=int, help="frame number of the video to test (default: 90)")
parser.add_argument("--startfrm_position", default=9, type=int, help="start frame position in one video (default: 0)")
parser.add_argument("--is_training", default=False, type=bool, help="train or test mode")
parser.add_argument("--result_path", default='./result_AI37/', type=str, help="store results")
opts = parser.parse_args()
torch.cuda.set_device(opts.gpu_id)
txt_name = './MGANet_test_data_AI37.txt'
if os.path.isfile(txt_name):
f = open(txt_name, 'w+')
else:
os.mknod(txt_name)
f = open(txt_name, 'w+')
one_filename = np.sort(glob.glob('../test_yuv/AI37/' + '*'))
print(one_filename)
patch_size =[240,416]
net_G = MGANet.Gen_Guided_UNet(batchNorm=False,input_size=patch_size,is_training=opts.is_training)
net_G.eval()
net_G.load_state_dict(torch.load(opts.net_G,map_location=lambda storage, loc: storage.cuda(opts.gpu_id)))
print('....')
net_G.cuda()
image_test(one_filename=one_filename,net_G=net_G,patch_size=patch_size,f_txt = f,opt = opts)
f.close()
| 9,464
| 41.443946
| 209
|
py
|
MGANet-DCC2020
|
MGANet-DCC2020-master/codes/LSTM/functional.py
|
from functools import partial
import torch
import torch.nn.functional as F
from torch.nn._functions.thnn import rnnFusedPointwise as fusedBackend
from .utils import _single, _pair, _triple
def RNNReLUCell(input, hidden, w_ih, w_hh, b_ih=None, b_hh=None, linear_func=None):
""" Copied from torch.nn._functions.rnn and modified """
if linear_func is None:
linear_func = F.linear
hy = F.relu(linear_func(input, w_ih, b_ih) + linear_func(hidden, w_hh, b_hh))
return hy
def RNNTanhCell(input, hidden, w_ih, w_hh, b_ih=None, b_hh=None, linear_func=None):
""" Copied from torch.nn._functions.rnn and modified """
if linear_func is None:
linear_func = F.linear
hy = F.tanh(linear_func(input, w_ih, b_ih) + linear_func(hidden, w_hh, b_hh))
return hy
def LSTMCell(input, hidden, w_ih, w_hh, b_ih=None, b_hh=None, linear_func=None):
""" Copied from torch.nn._functions.rnn and modified """
if linear_func is None:
linear_func = F.linear
if input.is_cuda and linear_func is F.linear:
igates = linear_func(input, w_ih)
hgates = linear_func(hidden[0], w_hh)
state = fusedBackend.LSTMFused.apply
return state(igates, hgates, hidden[1]) if b_ih is None else state(igates, hgates, hidden[1], b_ih, b_hh)
hx, cx = hidden
gates = linear_func(input, w_ih, b_ih) + linear_func(hx, w_hh, b_hh)
ingate, forgetgate, cellgate, outgate = gates.chunk(4, 1)
ingate = F.sigmoid(ingate)
forgetgate = F.sigmoid(forgetgate)
cellgate = F.tanh(cellgate)
outgate = F.sigmoid(outgate)
cy = (forgetgate * cx) + (ingate * cellgate)
hy = outgate * F.tanh(cy)
return hy, cy
def PeepholeLSTMCell(input, hidden, w_ih, w_hh, w_pi, w_pf, w_po,
b_ih=None, b_hh=None, linear_func=None):
if linear_func is None:
linear_func = F.linear
hx, cx = hidden
gates = linear_func(input, w_ih, b_ih) + linear_func(hx, w_hh, b_hh)
ingate, forgetgate, cellgate, outgate = gates.chunk(4, 1)
ingate += linear_func(cx, w_pi)
forgetgate += linear_func(cx, w_pf)
ingate = F.sigmoid(ingate)
forgetgate = F.sigmoid(forgetgate)
cellgate = F.tanh(cellgate)
cy = (forgetgate * cx) + (ingate * cellgate)
outgate += linear_func(cy, w_po)
outgate = F.sigmoid(outgate)
hy = outgate * F.tanh(cy)
return hy, cy
def GRUCell(input, hidden, w_ih, w_hh, b_ih=None, b_hh=None, linear_func=None):
""" Copied from torch.nn._functions.rnn and modified """
if linear_func is None:
linear_func = F.linear
if input.is_cuda and linear_func is F.linear:
gi = linear_func(input, w_ih)
gh = linear_func(hidden, w_hh)
state = fusedBackend.GRUFused.apply
return state(gi, gh, hidden) if b_ih is None else state(gi, gh, hidden, b_ih, b_hh)
gi = linear_func(input, w_ih, b_ih)
gh = linear_func(hidden, w_hh, b_hh)
i_r, i_i, i_n = gi.chunk(3, 1)
h_r, h_i, h_n = gh.chunk(3, 1)
resetgate = F.sigmoid(i_r + h_r)
inputgate = F.sigmoid(i_i + h_i)
newgate = F.tanh(i_n + resetgate * h_n)
hy = newgate + inputgate * (hidden - newgate)
return hy
def StackedRNN(inners, num_layers, lstm=False, dropout=0, train=True):
""" Copied from torch.nn._functions.rnn and modified """
num_directions = len(inners)
total_layers = num_layers * num_directions
def forward(input, hidden, weight, batch_sizes):
assert(len(weight) == total_layers)
next_hidden = []
ch_dim = input.dim() - weight[0][0].dim() + 1
if lstm:
hidden = list(zip(*hidden))
for i in range(num_layers):
all_output = []
for j, inner in enumerate(inners):
l = i * num_directions + j
hy, output = inner(input, hidden[l], weight[l], batch_sizes)
next_hidden.append(hy)
all_output.append(output)
input = torch.cat(all_output, ch_dim)
if dropout != 0 and i < num_layers - 1:
input = F.dropout(input, p=dropout, training=train, inplace=False)
if lstm:
next_h, next_c = zip(*next_hidden)
next_hidden = (
torch.cat(next_h, 0).view(total_layers, *next_h[0].size()),
torch.cat(next_c, 0).view(total_layers, *next_c[0].size())
)
else:
next_hidden = torch.cat(next_hidden, 0).view(
total_layers, *next_hidden[0].size())
return next_hidden, input
return forward
def Recurrent(inner, reverse=False):
""" Copied from torch.nn._functions.rnn without any modification """
def forward(input, hidden, weight, batch_sizes):
output = []
steps = range(input.size(0) - 1, -1, -1) if reverse else range(input.size(0))
for i in steps:
hidden = inner(input[i], hidden, *weight)
# hack to handle LSTM
output.append(hidden[0] if isinstance(hidden, tuple) else hidden)
if reverse:
output.reverse()
output = torch.cat(output, 0).view(input.size(0), *output[0].size())
return hidden, output
return forward
def variable_recurrent_factory(inner, reverse=False):
""" Copied from torch.nn._functions.rnn without any modification """
if reverse:
return VariableRecurrentReverse(inner)
else:
return VariableRecurrent(inner)
def VariableRecurrent(inner):
""" Copied from torch.nn._functions.rnn without any modification """
def forward(input, hidden, weight, batch_sizes):
output = []
input_offset = 0
last_batch_size = batch_sizes[0]
hiddens = []
flat_hidden = not isinstance(hidden, tuple)
if flat_hidden:
hidden = (hidden,)
for batch_size in batch_sizes:
step_input = input[input_offset:input_offset + batch_size]
input_offset += batch_size
dec = last_batch_size - batch_size
if dec > 0:
hiddens.append(tuple(h[-dec:] for h in hidden))
hidden = tuple(h[:-dec] for h in hidden)
last_batch_size = batch_size
if flat_hidden:
hidden = (inner(step_input, hidden[0], *weight),)
else:
hidden = inner(step_input, hidden, *weight)
output.append(hidden[0])
hiddens.append(hidden)
hiddens.reverse()
hidden = tuple(torch.cat(h, 0) for h in zip(*hiddens))
assert hidden[0].size(0) == batch_sizes[0]
if flat_hidden:
hidden = hidden[0]
output = torch.cat(output, 0)
return hidden, output
return forward
def VariableRecurrentReverse(inner):
""" Copied from torch.nn._functions.rnn without any modification """
def forward(input, hidden, weight, batch_sizes):
output = []
input_offset = input.size(0)
last_batch_size = batch_sizes[-1]
initial_hidden = hidden
flat_hidden = not isinstance(hidden, tuple)
if flat_hidden:
hidden = (hidden,)
initial_hidden = (initial_hidden,)
hidden = tuple(h[:batch_sizes[-1]] for h in hidden)
for i in reversed(range(len(batch_sizes))):
batch_size = batch_sizes[i]
inc = batch_size - last_batch_size
if inc > 0:
hidden = tuple(torch.cat((h, ih[last_batch_size:batch_size]), 0)
for h, ih in zip(hidden, initial_hidden))
last_batch_size = batch_size
step_input = input[input_offset - batch_size:input_offset]
input_offset -= batch_size
if flat_hidden:
hidden = (inner(step_input, hidden[0], *weight),)
else:
hidden = inner(step_input, hidden, *weight)
output.append(hidden[0])
output.reverse()
output = torch.cat(output, 0)
if flat_hidden:
hidden = hidden[0]
return hidden, output
return forward
def ConvNdWithSamePadding(convndim=2, stride=1, dilation=1, groups=1):
def forward(input, w, b=None):
if convndim == 1:
ntuple = _single
elif convndim == 2:
ntuple = _pair
elif convndim == 3:
ntuple = _triple
else:
raise ValueError('convndim must be 1, 2, or 3, but got {}'.format(convndim))
if input.dim() != convndim + 2:
raise RuntimeError('Input dim must be {}, bot got {}'.format(convndim + 2, input.dim()))
if w.dim() != convndim + 2:
raise RuntimeError('w must be {}, bot got {}'.format(convndim + 2, w.dim()))
insize = input.shape[2:]
kernel_size = w.shape[2:]
_stride = ntuple(stride)
_dilation = ntuple(dilation)
ps = [(i + 1 - h + s * (h - 1) + d * (k - 1)) // 2
for h, k, s, d in list(zip(insize, kernel_size, _stride, _dilation))[::-1] for i in range(2)]
# Padding to make the output shape to have the same shape as the input
input = F.pad(input, ps, 'constant', 0)
return getattr(F, 'conv{}d'.format(convndim))(
input, w, b, stride=_stride, padding=ntuple(0), dilation=_dilation, groups=groups)
return forward
def _conv_cell_helper(mode, convndim=2, stride=1, dilation=1, groups=1):
linear_func = ConvNdWithSamePadding(convndim=convndim, stride=stride, dilation=dilation, groups=groups)
if mode == 'RNN_RELU':
cell = partial(RNNReLUCell, linear_func=linear_func)
elif mode == 'RNN_TANH':
cell = partial(RNNTanhCell, linear_func=linear_func)
elif mode == 'LSTM':
cell = partial(LSTMCell, linear_func=linear_func)
elif mode == 'GRU':
cell = partial(GRUCell, linear_func=linear_func)
elif mode == 'PeepholeLSTM':
cell = partial(PeepholeLSTMCell, linear_func=linear_func)
else:
raise Exception('Unknown mode: {}'.format(mode))
return cell
def AutogradConvRNN(
mode, num_layers=1, batch_first=False,
dropout=0, train=True, bidirectional=False, variable_length=False,
convndim=2, stride=1, dilation=1, groups=1):
""" Copied from torch.nn._functions.rnn and modified """
cell = _conv_cell_helper(mode, convndim=convndim, stride=stride, dilation=dilation, groups=groups)
rec_factory = variable_recurrent_factory if variable_length else Recurrent
if bidirectional:
layer = (rec_factory(cell), rec_factory(cell, reverse=True))
else:
layer = (rec_factory(cell),)
func = StackedRNN(layer, num_layers, (mode in ('LSTM', 'PeepholeLSTM')), dropout=dropout, train=train)
def forward(input, weight, hidden, batch_sizes):
if batch_first and batch_sizes is None:
input = input.transpose(0, 1)
nexth, output = func(input, hidden, weight, batch_sizes)
if batch_first and batch_sizes is None:
output = output.transpose(0, 1)
return output, nexth
return forward
| 11,086
| 33.755486
| 113
|
py
|
MGANet-DCC2020
|
MGANet-DCC2020-master/codes/LSTM/utils.py
|
import collections
from itertools import repeat
""" Copied from torch.nn.modules.utils """
def _ntuple(n):
def parse(x):
if isinstance(x, collections.Iterable):
return x
return tuple(repeat(x, n))
return parse
_single = _ntuple(1)
_pair = _ntuple(2)
_triple = _ntuple(3)
_quadruple = _ntuple(4)
| 337
| 15.9
| 47
|
py
|
MGANet-DCC2020
|
MGANet-DCC2020-master/codes/LSTM/module.py
|
import math
from typing import Union, Sequence
import torch
from torch.nn import Parameter
from torch.nn.utils.rnn import PackedSequence
from .functional import AutogradConvRNN, _conv_cell_helper
from .utils import _single, _pair, _triple
class ConvNdRNNBase(torch.nn.Module):
def __init__(self,
mode: str,
in_channels: int,
out_channels: int,
kernel_size: Union[int, Sequence[int]],
num_layers: int=1,
bias: bool=True,
batch_first: bool=False,
dropout: float=0.,
bidirectional: bool=False,
convndim: int=2,
stride: Union[int, Sequence[int]]=1,
dilation: Union[int, Sequence[int]]=1,
groups: int=1):
super().__init__()
self.mode = mode
self.in_channels = in_channels
self.out_channels = out_channels
self.num_layers = num_layers
self.bias = bias
self.batch_first = batch_first
self.dropout = dropout
self.bidirectional = bidirectional
self.convndim = convndim
if convndim == 1:
ntuple = _single
elif convndim == 2:
ntuple = _pair
elif convndim == 3:
ntuple = _triple
else:
raise ValueError('convndim must be 1, 2, or 3, but got {}'.format(convndim))
self.kernel_size = ntuple(kernel_size)
self.stride = ntuple(stride)
self.dilation = ntuple(dilation)
self.groups = groups
num_directions = 2 if bidirectional else 1
if mode in ('LSTM', 'PeepholeLSTM'):
gate_size = 4 * out_channels
elif mode == 'GRU':
gate_size = 3 * out_channels
else:
gate_size = out_channels
self._all_weights = []
for layer in range(num_layers):
for direction in range(num_directions):
layer_input_size = in_channels if layer == 0 else out_channels * num_directions
w_ih = Parameter(torch.Tensor(gate_size, layer_input_size // groups, *self.kernel_size))
w_hh = Parameter(torch.Tensor(gate_size, out_channels // groups, *self.kernel_size))
b_ih = Parameter(torch.Tensor(gate_size))
b_hh = Parameter(torch.Tensor(gate_size))
if mode == 'PeepholeLSTM':
w_pi = Parameter(torch.Tensor(out_channels, out_channels // groups, *self.kernel_size))
w_pf = Parameter(torch.Tensor(out_channels, out_channels // groups, *self.kernel_size))
w_po = Parameter(torch.Tensor(out_channels, out_channels // groups, *self.kernel_size))
layer_params = (w_ih, w_hh, w_pi, w_pf, w_po, b_ih, b_hh)
param_names = ['weight_ih_l{}{}', 'weight_hh_l{}{}',
'weight_pi_l{}{}', 'weight_pf_l{}{}', 'weight_po_l{}{}']
else:
layer_params = (w_ih, w_hh, b_ih, b_hh)
param_names = ['weight_ih_l{}{}', 'weight_hh_l{}{}']
if bias:
param_names += ['bias_ih_l{}{}', 'bias_hh_l{}{}']
suffix = '_reverse' if direction == 1 else ''
param_names = [x.format(layer, suffix) for x in param_names]
for name, param in zip(param_names, layer_params):
setattr(self, name, param)
self._all_weights.append(param_names)
self.reset_parameters()
def reset_parameters(self):
stdv = 1.0 / math.sqrt(self.out_channels)
for weight in self.parameters():
weight.data.uniform_(-stdv, stdv)
def check_forward_args(self, input, hidden, batch_sizes):
is_input_packed = batch_sizes is not None
expected_input_dim = (2 if is_input_packed else 3) + self.convndim
if input.dim() != expected_input_dim:
raise RuntimeError(
'input must have {} dimensions, got {}'.format(
expected_input_dim, input.dim()))
ch_dim = 1 if is_input_packed else 2
if self.in_channels != input.size(ch_dim):
raise RuntimeError(
'input.size({}) must be equal to in_channels . Expected {}, got {}'.format(
ch_dim, self.in_channels, input.size(ch_dim)))
if is_input_packed:
mini_batch = int(batch_sizes[0])
else:
mini_batch = input.size(0) if self.batch_first else input.size(1)
num_directions = 2 if self.bidirectional else 1
expected_hidden_size = (self.num_layers * num_directions,
mini_batch, self.out_channels) + input.shape[ch_dim + 1:]
def check_hidden_size(hx, expected_hidden_size, msg='Expected hidden size {}, got {}'):
if tuple(hx.size()) != expected_hidden_size:
raise RuntimeError(msg.format(expected_hidden_size, tuple(hx.size())))
if self.mode in ('LSTM', 'PeepholeLSTM'):
check_hidden_size(hidden[0], expected_hidden_size,
'Expected hidden[0] size {}, got {}')
check_hidden_size(hidden[1], expected_hidden_size,
'Expected hidden[1] size {}, got {}')
else:
check_hidden_size(hidden, expected_hidden_size)
def forward(self, input, hx=None):
is_packed = isinstance(input, PackedSequence)
if is_packed:
input, batch_sizes = input
max_batch_size = batch_sizes[0]
insize = input.shape[2:]
else:
batch_sizes = None
max_batch_size = input.size(0) if self.batch_first else input.size(1)
insize = input.shape[3:]
if hx is None:
num_directions = 2 if self.bidirectional else 1
hx = input.new_zeros(self.num_layers * num_directions, max_batch_size, self.out_channels,
*insize, requires_grad=False)
if self.mode in ('LSTM', 'PeepholeLSTM'):
hx = (hx, hx)
self.check_forward_args(input, hx, batch_sizes)
func = AutogradConvRNN(
self.mode,
num_layers=self.num_layers,
batch_first=self.batch_first,
dropout=self.dropout,
train=self.training,
bidirectional=self.bidirectional,
variable_length=batch_sizes is not None,
convndim=self.convndim,
stride=self.stride,
dilation=self.dilation,
groups=self.groups
)
output, hidden = func(input, self.all_weights, hx, batch_sizes)
if is_packed:
output = PackedSequence(output, batch_sizes)
return output, hidden
def extra_repr(self):
s = ('{in_channels}, {out_channels}, kernel_size={kernel_size}'
', stride={stride}')
if self.dilation != (1,) * len(self.dilation):
s += ', dilation={dilation}'
if self.groups != 1:
s += ', groups={groups}'
if self.num_layers != 1:
s += ', num_layers={num_layers}'
if self.bias is not True:
s += ', bias={bias}'
if self.batch_first is not False:
s += ', batch_first={batch_first}'
if self.dropout != 0:
s += ', dropout={dropout}'
if self.bidirectional is not False:
s += ', bidirectional={bidirectional}'
return s.format(**self.__dict__)
def __setstate__(self, d):
super(ConvNdRNNBase, self).__setstate__(d)
if 'all_weights' in d:
self._all_weights = d['all_weights']
if isinstance(self._all_weights[0][0], str):
return
num_layers = self.num_layers
num_directions = 2 if self.bidirectional else 1
self._all_weights = []
for layer in range(num_layers):
for direction in range(num_directions):
suffix = '_reverse' if direction == 1 else ''
if self.mode == 'PeepholeLSTM':
weights = ['weight_ih_l{}{}', 'weight_hh_l{}{}',
'weight_pi_l{}{}', 'weight_pf_l{}{}', 'weight_po_l{}{}',
'bias_ih_l{}{}', 'bias_hh_l{}{}']
else:
weights = ['weight_ih_l{}{}', 'weight_hh_l{}{}',
'bias_ih_l{}{}', 'bias_hh_l{}{}']
weights = [x.format(layer, suffix) for x in weights]
if self.bias:
self._all_weights += [weights]
else:
self._all_weights += [weights[:len(weights) // 2]]
@property
def all_weights(self):
return [[getattr(self, weight) for weight in weights] for weights in self._all_weights]
class Conv1dRNN(ConvNdRNNBase):
def __init__(self,
in_channels: int,
out_channels: int,
kernel_size: Union[int, Sequence[int]],
nonlinearity: str='tanh',
num_layers: int=1,
bias: bool=True,
batch_first: bool=False,
dropout: float=0.,
bidirectional: bool=False,
stride: Union[int, Sequence[int]]=1,
dilation: Union[int, Sequence[int]]=1,
groups: int=1):
if nonlinearity == 'tanh':
mode = 'RNN_TANH'
elif nonlinearity == 'relu':
mode = 'RNN_RELU'
else:
raise ValueError("Unknown nonlinearity '{}'".format(nonlinearity))
super().__init__(
mode=mode,
in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
num_layers=num_layers,
bias=bias,
batch_first=batch_first,
dropout=dropout,
bidirectional=bidirectional,
convndim=1,
stride=stride,
dilation=dilation,
groups=groups)
class Conv1dPeepholeLSTM(ConvNdRNNBase):
def __init__(self,
in_channels: int,
out_channels: int,
kernel_size: Union[int, Sequence[int]],
num_layers: int=1,
bias: bool=True,
batch_first: bool=False,
dropout: float=0.,
bidirectional: bool=False,
stride: Union[int, Sequence[int]]=1,
dilation: Union[int, Sequence[int]]=1,
groups: int=1):
super().__init__(
mode='PeepholeLSTM',
in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
num_layers=num_layers,
bias=bias,
batch_first=batch_first,
dropout=dropout,
bidirectional=bidirectional,
convndim=1,
stride=stride,
dilation=dilation,
groups=groups)
class Conv1dLSTM(ConvNdRNNBase):
def __init__(self,
in_channels: int,
out_channels: int,
kernel_size: Union[int, Sequence[int]],
num_layers: int=1,
bias: bool=True,
batch_first: bool=False,
dropout: float=0.,
bidirectional: bool=False,
stride: Union[int, Sequence[int]]=1,
dilation: Union[int, Sequence[int]]=1,
groups: int=1):
super().__init__(
mode='LSTM',
in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
num_layers=num_layers,
bias=bias,
batch_first=batch_first,
dropout=dropout,
bidirectional=bidirectional,
convndim=1,
stride=stride,
dilation=dilation,
groups=groups)
class Conv1dGRU(ConvNdRNNBase):
def __init__(self,
in_channels: int,
out_channels: int,
kernel_size: Union[int, Sequence[int]],
num_layers: int=1,
bias: bool=True,
batch_first: bool=False,
dropout: float=0.,
bidirectional: bool=False,
stride: Union[int, Sequence[int]]=1,
dilation: Union[int, Sequence[int]]=1,
groups: int=1):
super().__init__(
mode='GRU',
in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
num_layers=num_layers,
bias=bias,
batch_first=batch_first,
dropout=dropout,
bidirectional=bidirectional,
convndim=1,
stride=stride,
dilation=dilation,
groups=groups)
class Conv2dRNN(ConvNdRNNBase):
def __init__(self,
in_channels: int,
out_channels: int,
kernel_size: Union[int, Sequence[int]],
nonlinearity: str='tanh',
num_layers: int=1,
bias: bool=True,
batch_first: bool=False,
dropout: float=0.,
bidirectional: bool=False,
stride: Union[int, Sequence[int]]=1,
dilation: Union[int, Sequence[int]]=1,
groups: int=1):
if nonlinearity == 'tanh':
mode = 'RNN_TANH'
elif nonlinearity == 'relu':
mode = 'RNN_RELU'
else:
raise ValueError("Unknown nonlinearity '{}'".format(nonlinearity))
super().__init__(
mode=mode,
in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
num_layers=num_layers,
bias=bias,
batch_first=batch_first,
dropout=dropout,
bidirectional=bidirectional,
convndim=2,
stride=stride,
dilation=dilation,
groups=groups)
class Conv2dLSTM(ConvNdRNNBase):
def __init__(self,
in_channels: int,
out_channels: int,
kernel_size: Union[int, Sequence[int]],
num_layers: int=1,
bias: bool=True,
batch_first: bool=False,
dropout: float=0.,
bidirectional: bool=False,
stride: Union[int, Sequence[int]]=1,
dilation: Union[int, Sequence[int]]=1,
groups: int=1):
super().__init__(
mode='LSTM',
in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
num_layers=num_layers,
bias=bias,
batch_first=batch_first,
dropout=dropout,
bidirectional=bidirectional,
convndim=2,
stride=stride,
dilation=dilation,
groups=groups)
class Conv2dPeepholeLSTM(ConvNdRNNBase):
def __init__(self,
in_channels: int,
out_channels: int,
kernel_size: Union[int, Sequence[int]],
num_layers: int=1,
bias: bool=True,
batch_first: bool=False,
dropout: float=0.,
bidirectional: bool=False,
stride: Union[int, Sequence[int]]=1,
dilation: Union[int, Sequence[int]]=1,
groups: int=1):
super().__init__(
mode='PeepholeLSTM',
in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
num_layers=num_layers,
bias=bias,
batch_first=batch_first,
dropout=dropout,
bidirectional=bidirectional,
convndim=2,
stride=stride,
dilation=dilation,
groups=groups)
class Conv2dGRU(ConvNdRNNBase):
def __init__(self,
in_channels: int,
out_channels: int,
kernel_size: Union[int, Sequence[int]],
num_layers: int=1,
bias: bool=True,
batch_first: bool=False,
dropout: float=0.,
bidirectional: bool=False,
stride: Union[int, Sequence[int]]=1,
dilation: Union[int, Sequence[int]]=1,
groups: int=1):
super().__init__(
mode='GRU',
in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
num_layers=num_layers,
bias=bias,
batch_first=batch_first,
dropout=dropout,
bidirectional=bidirectional,
convndim=2,
stride=stride,
dilation=dilation,
groups=groups)
class Conv3dRNN(ConvNdRNNBase):
def __init__(self,
in_channels: int,
out_channels: int,
kernel_size: Union[int, Sequence[int]],
nonlinearity: str='tanh',
num_layers: int=1,
bias: bool=True,
batch_first: bool=False,
dropout: float=0.,
bidirectional: bool=False,
stride: Union[int, Sequence[int]]=1,
dilation: Union[int, Sequence[int]]=1,
groups: int=1):
if nonlinearity == 'tanh':
mode = 'RNN_TANH'
elif nonlinearity == 'relu':
mode = 'RNN_RELU'
else:
raise ValueError("Unknown nonlinearity '{}'".format(nonlinearity))
super().__init__(
mode=mode,
in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
num_layers=num_layers,
bias=bias,
batch_first=batch_first,
dropout=dropout,
bidirectional=bidirectional,
convndim=3,
stride=stride,
dilation=dilation,
groups=groups)
class Conv3dLSTM(ConvNdRNNBase):
def __init__(self,
in_channels: int,
out_channels: int,
kernel_size: Union[int, Sequence[int]],
num_layers: int=1,
bias: bool=True,
batch_first: bool=False,
dropout: float=0.,
bidirectional: bool=False,
stride: Union[int, Sequence[int]]=1,
dilation: Union[int, Sequence[int]]=1,
groups: int=1):
super().__init__(
mode='LSTM',
in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
num_layers=num_layers,
bias=bias,
batch_first=batch_first,
dropout=dropout,
bidirectional=bidirectional,
convndim=3,
stride=stride,
dilation=dilation,
groups=groups)
class Conv3dPeepholeLSTM(ConvNdRNNBase):
def __init__(self,
in_channels: int,
out_channels: int,
kernel_size: Union[int, Sequence[int]],
num_layers: int=1,
bias: bool=True,
batch_first: bool=False,
dropout: float=0.,
bidirectional: bool=False,
stride: Union[int, Sequence[int]]=1,
dilation: Union[int, Sequence[int]]=1,
groups: int=1):
super().__init__(
mode='PeepholeLSTM',
in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
num_layers=num_layers,
bias=bias,
batch_first=batch_first,
dropout=dropout,
bidirectional=bidirectional,
convndim=3,
stride=stride,
dilation=dilation,
groups=groups)
class Conv3dGRU(ConvNdRNNBase):
def __init__(self,
in_channels: int,
out_channels: int,
kernel_size: Union[int, Sequence[int]],
num_layers: int=1,
bias: bool=True,
batch_first: bool=False,
dropout: float=0.,
bidirectional: bool=False,
stride: Union[int, Sequence[int]]=1,
dilation: Union[int, Sequence[int]]=1,
groups: int=1):
super().__init__(
mode='GRU',
in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
num_layers=num_layers,
bias=bias,
batch_first=batch_first,
dropout=dropout,
bidirectional=bidirectional,
convndim=3,
stride=stride,
dilation=dilation,
groups=groups)
class ConvRNNCellBase(torch.nn.Module):
def __init__(self,
mode: str,
in_channels: int,
out_channels: int,
kernel_size: Union[int, Sequence[int]],
bias: bool=True,
convndim: int=2,
stride: Union[int, Sequence[int]]=1,
dilation: Union[int, Sequence[int]]=1,
groups: int=1
):
super().__init__()
self.mode = mode
self.in_channels = in_channels
self.out_channels = out_channels
self.bias = bias
self.convndim = convndim
if convndim == 1:
ntuple = _single
elif convndim == 2:
ntuple = _pair
elif convndim == 3:
ntuple = _triple
else:
raise ValueError('convndim must be 1, 2, or 3, but got {}'.format(convndim))
self.kernel_size = ntuple(kernel_size)
self.stride = ntuple(stride)
self.dilation = ntuple(dilation)
self.groups = groups
if mode in ('LSTM', 'PeepholeLSTM'):
gate_size = 4 * out_channels
elif mode == 'GRU':
gate_size = 3 * out_channels
else:
gate_size = out_channels
self.weight_ih = Parameter(torch.Tensor(gate_size, in_channels // groups, *self.kernel_size))
self.weight_hh = Parameter(torch.Tensor(gate_size, out_channels // groups, *self.kernel_size))
if bias:
self.bias_ih = Parameter(torch.Tensor(gate_size))
self.bias_hh = Parameter(torch.Tensor(gate_size))
else:
self.register_parameter('bias_ih', None)
self.register_parameter('bias_hh', None)
if mode == 'PeepholeLSTM':
self.weight_pi = Parameter(torch.Tensor(out_channels, out_channels // groups, *self.kernel_size))
self.weight_pf = Parameter(torch.Tensor(out_channels, out_channels // groups, *self.kernel_size))
self.weight_po = Parameter(torch.Tensor(out_channels, out_channels // groups, *self.kernel_size))
self.reset_parameters()
def extra_repr(self):
s = ('{in_channels}, {out_channels}, kernel_size={kernel_size}'
', stride={stride}')
if self.dilation != (1,) * len(self.dilation):
s += ', dilation={dilation}'
if self.groups != 1:
s += ', groups={groups}'
if self.bias is not True:
s += ', bias={bias}'
if self.bidirectional is not False:
s += ', bidirectional={bidirectional}'
return s.format(**self.__dict__)
def check_forward_input(self, input):
if input.size(1) != self.in_channels:
raise RuntimeError(
"input has inconsistent channels: got {}, expected {}".format(
input.size(1), self.in_channels))
def check_forward_hidden(self, input, hx, hidden_label=''):
if input.size(0) != hx.size(0):
raise RuntimeError(
"Input batch size {} doesn't match hidden{} batch size {}".format(
input.size(0), hidden_label, hx.size(0)))
if hx.size(1) != self.out_channels:
raise RuntimeError(
"hidden{} has inconsistent hidden_size: got {}, expected {}".format(
hidden_label, hx.size(1), self.out_channels))
def reset_parameters(self):
stdv = 1.0 / math.sqrt(self.out_channels)
for weight in self.parameters():
weight.data.uniform_(-stdv, stdv)
def forward(self, input, hx=None):
self.check_forward_input(input)
if hx is None:
batch_size = input.size(0)
insize = input.shape[2:]
hx = input.new_zeros(batch_size, self.out_channels, *insize, requires_grad=False)
if self.mode in ('LSTM', 'PeepholeLSTM'):
hx = (hx, hx)
if self.mode in ('LSTM', 'PeepholeLSTM'):
self.check_forward_hidden(input, hx[0])
self.check_forward_hidden(input, hx[1])
else:
self.check_forward_hidden(input, hx)
cell = _conv_cell_helper(
self.mode,
convndim=self.convndim,
stride=self.stride,
dilation=self.dilation,
groups=self.groups)
if self.mode == 'PeepholeLSTM':
return cell(
input, hx,
self.weight_ih, self.weight_hh, self.weight_pi, self.weight_pf, self.weight_po,
self.bias_ih, self.bias_hh
)
else:
return cell(
input, hx,
self.weight_ih, self.weight_hh,
self.bias_ih, self.bias_hh,
)
class Conv1dRNNCell(ConvRNNCellBase):
def __init__(self,
in_channels: int,
out_channels: int,
kernel_size: Union[int, Sequence[int]],
nonlinearity: str='tanh',
bias: bool=True,
stride: Union[int, Sequence[int]]=1,
dilation: Union[int, Sequence[int]]=1,
groups: int=1
):
if nonlinearity == 'tanh':
mode = 'RNN_TANH'
elif nonlinearity == 'relu':
mode = 'RNN_RELU'
else:
raise ValueError("Unknown nonlinearity '{}'".format(nonlinearity))
super().__init__(
mode=mode,
in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
bias=bias,
convndim=1,
stride=stride,
dilation=dilation,
groups=groups
)
class Conv1dLSTMCell(ConvRNNCellBase):
def __init__(self,
in_channels: int,
out_channels: int,
kernel_size: Union[int, Sequence[int]],
bias: bool=True,
stride: Union[int, Sequence[int]]=1,
dilation: Union[int, Sequence[int]]=1,
groups: int=1
):
super().__init__(
mode='LSTM',
in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
bias=bias,
convndim=1,
stride=stride,
dilation=dilation,
groups=groups
)
class Conv1dPeepholeLSTMCell(ConvRNNCellBase):
def __init__(self,
in_channels: int,
out_channels: int,
kernel_size: Union[int, Sequence[int]],
bias: bool=True,
stride: Union[int, Sequence[int]]=1,
dilation: Union[int, Sequence[int]]=1,
groups: int=1
):
super().__init__(
mode='PeepholeLSTM',
in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
bias=bias,
convndim=1,
stride=stride,
dilation=dilation,
groups=groups
)
class Conv1dGRUCell(ConvRNNCellBase):
def __init__(self,
in_channels: int,
out_channels: int,
kernel_size: Union[int, Sequence[int]],
bias: bool=True,
stride: Union[int, Sequence[int]]=1,
dilation: Union[int, Sequence[int]]=1,
groups: int=1
):
super().__init__(
mode='GRU',
in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
bias=bias,
convndim=1,
stride=stride,
dilation=dilation,
groups=groups
)
class Conv2dRNNCell(ConvRNNCellBase):
def __init__(self,
in_channels: int,
out_channels: int,
kernel_size: Union[int, Sequence[int]],
nonlinearity: str='tanh',
bias: bool=True,
stride: Union[int, Sequence[int]]=1,
dilation: Union[int, Sequence[int]]=1,
groups: int=1
):
if nonlinearity == 'tanh':
mode = 'RNN_TANH'
elif nonlinearity == 'relu':
mode = 'RNN_RELU'
else:
raise ValueError("Unknown nonlinearity '{}'".format(nonlinearity))
super().__init__(
mode=mode,
in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
bias=bias,
convndim=2,
stride=stride,
dilation=dilation,
groups=groups
)
class Conv2dLSTMCell(ConvRNNCellBase):
def __init__(self,
in_channels: int,
out_channels: int,
kernel_size: Union[int, Sequence[int]],
bias: bool=True,
stride: Union[int, Sequence[int]]=1,
dilation: Union[int, Sequence[int]]=1,
groups: int=1
):
super().__init__(
mode='LSTM',
in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
bias=bias,
convndim=2,
stride=stride,
dilation=dilation,
groups=groups
)
class Conv2dPeepholeLSTMCell(ConvRNNCellBase):
def __init__(self,
in_channels: int,
out_channels: int,
kernel_size: Union[int, Sequence[int]],
bias: bool=True,
stride: Union[int, Sequence[int]]=1,
dilation: Union[int, Sequence[int]]=1,
groups: int=1
):
super().__init__(
mode='PeepholeLSTM',
in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
bias=bias,
convndim=2,
stride=stride,
dilation=dilation,
groups=groups
)
class Conv2dGRUCell(ConvRNNCellBase):
def __init__(self,
in_channels: int,
out_channels: int,
kernel_size: Union[int, Sequence[int]],
bias: bool=True,
stride: Union[int, Sequence[int]]=1,
dilation: Union[int, Sequence[int]]=1,
groups: int=1
):
super().__init__(
mode='GRU',
in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
bias=bias,
convndim=2,
stride=stride,
dilation=dilation,
groups=groups
)
class Conv3dRNNCell(ConvRNNCellBase):
def __init__(self,
in_channels: int,
out_channels: int,
kernel_size: Union[int, Sequence[int]],
nonlinearity: str='tanh',
bias: bool=True,
stride: Union[int, Sequence[int]]=1,
dilation: Union[int, Sequence[int]]=1,
groups: int=1
):
if nonlinearity == 'tanh':
mode = 'RNN_TANH'
elif nonlinearity == 'relu':
mode = 'RNN_RELU'
else:
raise ValueError("Unknown nonlinearity '{}'".format(nonlinearity))
super().__init__(
mode=mode,
in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
bias=bias,
convndim=3,
stride=stride,
dilation=dilation,
groups=groups
)
class Conv3dLSTMCell(ConvRNNCellBase):
def __init__(self,
in_channels: int,
out_channels: int,
kernel_size: Union[int, Sequence[int]],
bias: bool=True,
stride: Union[int, Sequence[int]]=1,
dilation: Union[int, Sequence[int]]=1,
groups: int=1
):
super().__init__(
mode='LSTM',
in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
bias=bias,
convndim=3,
stride=stride,
dilation=dilation,
groups=groups
)
class Conv3dPeepholeLSTMCell(ConvRNNCellBase):
def __init__(self,
in_channels: int,
out_channels: int,
kernel_size: Union[int, Sequence[int]],
bias: bool=True,
stride: Union[int, Sequence[int]]=1,
dilation: Union[int, Sequence[int]]=1,
groups: int=1
):
super().__init__(
mode='PeepholeLSTM',
in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
bias=bias,
convndim=3,
stride=stride,
dilation=dilation,
groups=groups
)
class Conv3dGRUCell(ConvRNNCellBase):
def __init__(self,
in_channels: int,
out_channels: int,
kernel_size: Union[int, Sequence[int]],
bias: bool=True,
stride: Union[int, Sequence[int]]=1,
dilation: Union[int, Sequence[int]]=1,
groups: int=1
):
super().__init__(
mode='GRU',
in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
bias=bias,
convndim=3,
stride=stride,
dilation=dilation,
groups=groups
)
| 35,171
| 33.789318
| 109
|
py
|
MGANet-DCC2020
|
MGANet-DCC2020-master/codes/LSTM/BiConvLSTM.py
|
import torch.nn as nn
from torch.autograd import Variable
import torch
torch.cuda.set_device(0)
class BiConvLSTMCell(nn.Module):
def __init__(self, input_size, input_dim, hidden_dim, kernel_size, bias):
"""
Initialize ConvLSTM cell.
Parameters
----------
input_size: (int, int)
Height and width of input tensor as (height, width).
input_dim: int
Number of channels of input tensor.
hidden_dim: int
Number of channels of hidden state.
kernel_size: (int, int)
Size of the convolutional kernel.
bias: bool
Whether or not to add the bias.
"""
super(BiConvLSTMCell, self).__init__()
self.height, self.width = input_size
self.input_dim = input_dim
self.hidden_dim = hidden_dim
self.kernel_size = kernel_size
# NOTE: This keeps height and width the same
self.padding = kernel_size[0] // 2, kernel_size[1] // 2
self.bias = bias
self.conv = nn.Conv2d(in_channels=self.input_dim + self.hidden_dim,
out_channels=4 * self.hidden_dim,
kernel_size=self.kernel_size,
padding=self.padding,
bias=self.bias)
# TODO: we may want this to be different than the conv we use inside each cell
self.conv_concat = nn.Conv2d(in_channels=self.input_dim + self.hidden_dim,
out_channels=self.hidden_dim,
kernel_size=self.kernel_size,
padding=self.padding,
bias=self.bias)
def forward(self, input_tensor, cur_state):
h_cur, c_cur = cur_state
# print(input_tensor.shape,h_cur.shape)
combined = torch.cat([input_tensor, h_cur], dim=1) # concatenate along channel axis
# print('...',combined.shape)
combined_conv = self.conv(combined)
cc_i, cc_f, cc_o, cc_g = torch.split(combined_conv, self.hidden_dim, dim=1)
i = torch.sigmoid(cc_i)
f = torch.sigmoid(cc_f)
o = torch.sigmoid(cc_o)
g = torch.tanh(cc_g)
c_next = f * c_cur + i * g
h_next = o * torch.tanh(c_next)
return h_next, c_next
class BiConvLSTM(nn.Module):
def __init__(self, input_size, input_dim, hidden_dim, kernel_size, num_layers,
bias=True, return_all_layers=False):
super(BiConvLSTM, self).__init__()
self._check_kernel_size_consistency(kernel_size)
# Make sure that both `kernel_size` and `hidden_dim` are lists having len == num_layers
kernel_size = self._extend_for_multilayer(kernel_size, num_layers)
hidden_dim = self._extend_for_multilayer(hidden_dim, num_layers)
if not len(kernel_size) == len(hidden_dim) == num_layers:
raise ValueError('Inconsistent list length.')
self.height, self.width = input_size
self.input_dim = input_dim
self.hidden_dim = hidden_dim
self.kernel_size = kernel_size
self.num_layers = num_layers
self.bias = bias
self.return_all_layers = return_all_layers
cell_list = []
for i in range(0, self.num_layers):
cur_input_dim = self.input_dim if i == 0 else self.hidden_dim[i - 1]
cell_list.append(BiConvLSTMCell(input_size=(self.height, self.width),
input_dim=cur_input_dim,
hidden_dim=self.hidden_dim[i],
kernel_size=self.kernel_size[i],
bias=self.bias))
self.cell_list = nn.ModuleList(cell_list)
def forward(self, input_tensor):
hidden_state = self._init_hidden(batch_size=input_tensor.size(0), cuda=input_tensor.is_cuda)
layer_output_list = []
seq_len = input_tensor.size(1)
cur_layer_input = input_tensor
for layer_idx in range(self.num_layers):
backward_states = []
forward_states = []
output_inner = []
hb, cb = hidden_state[layer_idx]
# print('hb,cb',hb.shape,cb.shape)
for t in range(seq_len):
hb, cb = self.cell_list[layer_idx](input_tensor=cur_layer_input[:, seq_len - t - 1, :, :, :], cur_state=[hb, cb])
backward_states.append(hb)
hf, cf = hidden_state[layer_idx]
for t in range(seq_len):
hf, cf = self.cell_list[layer_idx](input_tensor=cur_layer_input[:, t, :, :, :], cur_state=[hf, cf])
# print('hf:',hf.shape)
forward_states.append(hf)
for t in range(seq_len):
h = self.cell_list[layer_idx].conv_concat(torch.cat((forward_states[t], backward_states[seq_len - t - 1]), dim=1))
# print('h',h.shape)
output_inner.append(h)
layer_output = torch.stack(output_inner, dim=1)
cur_layer_input = layer_output
layer_output_list.append(layer_output)
if not self.return_all_layers:
return layer_output_list[-1]
return layer_output_list
def _init_hidden(self, batch_size, cuda):
init_states = []
for i in range(self.num_layers):
if(cuda):
init_states.append((Variable(torch.zeros(batch_size, self.hidden_dim[i], self.height, self.width).cuda()).cuda(),
Variable(torch.zeros(batch_size, self.hidden_dim[i], self.height, self.width).cuda()).cuda()))
else:
init_states.append((Variable(torch.zeros(batch_size, self.hidden_dim[i], self.height, self.width).cuda()).cuda(),
Variable(torch.zeros(batch_size, self.hidden_dim[i], self.height, self.width).cuda()).cuda()))
return init_states
@staticmethod
def _check_kernel_size_consistency(kernel_size):
if not (isinstance(kernel_size, tuple) or
(isinstance(kernel_size, list) and all([isinstance(elem, tuple) for elem in kernel_size]))):
raise ValueError('`kernel_size` must be tuple or list of tuples')
@staticmethod
def _extend_for_multilayer(param, num_layers):
if not isinstance(param, list):
param = [param] * num_layers
return param
| 6,521
| 39.259259
| 130
|
py
|
MGANet-DCC2020
|
MGANet-DCC2020-master/codes/LSTM/__init__.py
|
from .module import Conv1dRNN
from .module import Conv1dLSTM
from .module import Conv1dPeepholeLSTM
from .module import Conv1dGRU
from .module import Conv2dRNN
from .module import Conv2dLSTM
from .module import Conv2dPeepholeLSTM
from .module import Conv2dGRU
from .module import Conv3dRNN
from .module import Conv3dLSTM
from .module import Conv3dPeepholeLSTM
from .module import Conv3dGRU
from .module import Conv1dRNNCell
from .module import Conv1dLSTMCell
from .module import Conv1dPeepholeLSTMCell
from .module import Conv1dGRUCell
from .module import Conv2dRNNCell
from .module import Conv2dLSTMCell
from .module import Conv2dPeepholeLSTMCell
from .module import Conv2dGRUCell
from .module import Conv3dRNNCell
from .module import Conv3dLSTMCell
from .module import Conv3dPeepholeLSTMCell
from .module import Conv3dGRUCell
| 833
| 26.8
| 42
|
py
|
MGANet-DCC2020
|
MGANet-DCC2020-master/codes/Net/net_view.py
|
from graphviz import Digraph
from torch.autograd import Variable
import torch
def make_dot(var, params=None):
""" Produces Graphviz representation of PyTorch autograd graph
Blue nodes are the Variables that require grad, orange are Tensors
saved for backward in torch.autograd.Function
Args:
var: output Variable
params: dict of (name, Variable) to add names to node that
require grad (TODO: make optional)
"""
if params is not None:
assert isinstance(params.values()[0], Variable)
param_map = {id(v): k for k, v in params.items()}
node_attr = dict(style='filled',
shape='box',
align='left',
fontsize='12',
ranksep='0.1',
height='0.2')
dot = Digraph(node_attr=node_attr, graph_attr=dict(size="12,12"))
seen = set()
def size_to_str(size):
return '('+(', ').join(['%d' % v for v in size])+')'
def add_nodes(var):
if var not in seen:
if torch.is_tensor(var):
dot.node(str(id(var)), size_to_str(var.size()), fillcolor='orange')
elif hasattr(var, 'variable'):
u = var.variable
name = param_map[id(u)] if params is not None else ''
node_name = '%s\n %s' % (name, size_to_str(u.size()))
dot.node(str(id(var)), node_name, fillcolor='lightblue')
else:
dot.node(str(id(var)), str(type(var).__name__))
seen.add(var)
if hasattr(var, 'next_functions'):
for u in var.next_functions:
if u[0] is not None:
dot.edge(str(id(u[0])), str(id(var)))
add_nodes(u[0])
if hasattr(var, 'saved_tensors'):
for t in var.saved_tensors:
dot.edge(str(id(t)), str(id(var)))
add_nodes(t)
add_nodes(var.grad_fn)
return dot
| 2,016
| 37.788462
| 83
|
py
|
MGANet-DCC2020
|
MGANet-DCC2020-master/codes/Net/multiscaleloss.py
|
import torch
import torch.nn as nn
def EPE(input_image, target_image,L_model=None):
loss_L2 = L_model(input_image,target_image)
return loss_L2
# EPE_map = torch.norm(target_image-input_image,2,1)
# batch_size = EPE_map.size(0)
#
# if mean:
# return EPE_map.mean()
# else:
# return EPE_map.sum()/batch_size
def sparse_max_pool(input, size):
positive = (input > 0).float()
negative = (input < 0).float()
output = nn.functional.adaptive_max_pool2d(input * positive, size) - nn.functional.adaptive_max_pool2d(-input * negative, size)
return output
def multiscaleEPE(network_output, target_image, weights=None, L_model=None):
def one_scale(output, target, L_model):
b, _, h, w = output.size()
target_scaled = nn.functional.adaptive_avg_pool2d(target, (h, w))
return EPE(output, target_scaled, L_model)
if type(network_output) not in [tuple, list]:
network_output = [network_output]
if weights is None:
weights = [ 1.0/32,1.0/16.0, 1.0/8.0, 1.0/4.0, 1.0/2.0]
assert(len(weights) == len(network_output))
loss = 0
for output, weight in zip(network_output, weights):
loss += weight * one_scale(output, target_image,L_model)
return loss
| 1,280
| 26.255319
| 131
|
py
|
MGANet-DCC2020
|
MGANet-DCC2020-master/codes/Net/MGANet.py
|
import torch
import torch.nn as nn
from torch.nn.init import kaiming_normal
from LSTM.BiConvLSTM import BiConvLSTM
def conv(batchNorm, in_planes, out_planes, kernel_size=3, stride=1):
if batchNorm:
return nn.Sequential(
nn.Conv2d(in_planes, out_planes, kernel_size=kernel_size, stride=stride, padding=(kernel_size-1)//2, bias=False),
nn.BatchNorm2d(out_planes),
nn.LeakyReLU(0.05,inplace=True)
)
else:
return nn.Sequential(
nn.Conv2d(in_planes, out_planes, kernel_size=kernel_size, stride=stride, padding=(kernel_size-1)//2, bias=True),
nn.LeakyReLU(0.05,inplace=True)
)
def conv_no_lrelu(batchNorm, in_planes, out_planes, kernel_size=3, stride=1):
if batchNorm:
return nn.Sequential(
nn.Conv2d(in_planes, out_planes, kernel_size=kernel_size, stride=stride, padding=(kernel_size-1)//2, bias=False),
nn.BatchNorm2d(out_planes)
)
else:
return nn.Sequential(
nn.Conv2d(in_planes, out_planes, kernel_size=kernel_size, stride=stride, padding=(kernel_size-1)//2, bias=True),
)
def predict_image(in_planes):
return nn.Conv2d(in_planes,1,kernel_size=3,stride=1,padding=1,bias=False)
def deconv(in_planes, out_planes):
return nn.Sequential(
nn.ConvTranspose2d(in_planes, out_planes, kernel_size=4, stride=2, padding=1, bias=False),
nn.LeakyReLU(0.05,inplace=True)
)
def crop_like(input, target):
if input.size()[2:] == target.size()[2:]:
return input
else:
return input[:, :, :target.size(2), :target.size(3)]
class Gen_Guided_UNet(nn.Module):
expansion = 1
def __init__(self,batchNorm=True,input_size=[240,416],is_training=True):
super(Gen_Guided_UNet,self).__init__()
self.batchNorm = batchNorm
self.is_training = is_training
self.pre_conv1 = conv(self.batchNorm, 1, 64, kernel_size=3, stride=1)
self.pre_conv1_1 = conv(self.batchNorm, 64, 64, kernel_size=3, stride=1)
self.pre_conv2 = conv(self.batchNorm, 1, 64, kernel_size=3, stride=1)
self.pre_conv2_1 = conv(self.batchNorm, 64, 64, kernel_size=3, stride=1)
self.pre_conv3 = conv(self.batchNorm, 1, 64, kernel_size=3, stride=1)
self.pre_conv3_1 = conv(self.batchNorm, 64, 64, kernel_size=3, stride=1)
self.biconvlstm = BiConvLSTM(input_size=(input_size[0], input_size[1]), input_dim=64, hidden_dim=64,kernel_size=(3, 3), num_layers=1)
self.LSTM_out = conv(self.batchNorm,128,64, kernel_size=1, stride=1)
self.conv1_mask = conv(self.batchNorm, 1, 64, kernel_size=3, stride=1)
self.conv2_mask = conv(self.batchNorm, 64, 64, kernel_size=3, stride=1)
self.conv1 = conv(self.batchNorm, 64, 128, kernel_size=7, stride=2)#64
self.conv1_1 = conv(self.batchNorm, 128,128) # 128*128 ->64*64
self.conv2 = conv(self.batchNorm, 128,256, kernel_size=3, stride=2)#64 ->32
self.conv2_1 = conv(self.batchNorm, 256,256) # 128*128 ->64*64
self.conv3 = conv(self.batchNorm, 256,512, kernel_size=3, stride=2)#32->16
self.conv3_1 = conv(self.batchNorm, 512,512)
self.conv4 = conv(self.batchNorm, 512,1024, kernel_size=3, stride=2)#16->8
self.conv4_1 = conv(self.batchNorm, 1024,1024)
self.deconv4 = deconv(1024,512)
self.deconv3 = deconv(1025,256)
self.deconv2 = deconv(513,128)
self.deconv1 = deconv(257,64)
self.predict_image4 = predict_image(1024)
self.predict_image3 = predict_image(1025)
self.predict_image2 = predict_image(513)
self.predict_image1 = predict_image(257)
self.upsampled_image4_to_3 = nn.ConvTranspose2d(1,1, 4, 2, 1, bias=False)#8_16
self.upsampled_image3_to_2 = nn.ConvTranspose2d(1,1, 4, 2, 1, bias=False)#16-32
self.upsampled_image2_to_1 = nn.ConvTranspose2d(1,1, 4, 2, 1, bias=False)#32-64
self.upsampled_image1_to_finally = nn.ConvTranspose2d(1, 1, 4, 2, 1, bias=False) # 64-128
self.output1 = conv(self.batchNorm,129,64,kernel_size=3,stride=1)
self.output2 = conv(self.batchNorm, 64, 64, kernel_size=3, stride=1)
self.output3 = conv_no_lrelu(self.batchNorm,64,1,kernel_size=3,stride=1)
for m in self.modules():
if isinstance(m, nn.Conv2d) or isinstance(m, nn.ConvTranspose2d):
kaiming_normal(m.weight.data,a=0.05)
if m.bias is not None:
m.bias.data.zero_()
elif isinstance(m, nn.BatchNorm2d):
m.weight.data.fill_(1)
m.bias.data.zero_()
def forward(self, data1,data2,data3,mask):
CNN_seq = []
pre_conv1 = self.pre_conv1(data1)
pre_conv1_1 = self.pre_conv1_1(pre_conv1)
CNN_seq.append(pre_conv1_1)
pre_conv2 = self.pre_conv2(data2)
pre_conv2_1 = self.pre_conv2_1(pre_conv2)
CNN_seq.append(pre_conv2_1)
pre_conv3 = self.pre_conv3(data3)
pre_conv3_1 = self.pre_conv3_1(pre_conv3)
CNN_seq.append(pre_conv3_1)
CNN_seq_out = torch.stack(CNN_seq, dim=1)
CNN_seq_feature_maps = self.biconvlstm(CNN_seq_out)
# CNN_concat_input = CNN_seq_out[:, 1, ...]+CNN_seq_feature_maps[:, 1, ...]
CNN_concat_input = torch.cat([CNN_seq_out[:, 1, ...],CNN_seq_feature_maps[:, 1, ...]],dim=1)
LSTM_out = self.LSTM_out(CNN_concat_input)#128*128*64
conv1_mask = self.conv1_mask(mask)
conv2_mask = self.conv2_mask(conv1_mask)#128*128*64
out_conv1 = self.conv1_1(self.conv1(LSTM_out))
out_conv2 = self.conv2_1(self.conv2(out_conv1))
out_conv3 = self.conv3_1(self.conv3(out_conv2))
out_conv4 = self.conv4_1(self.conv4(out_conv3))
out_conv1_mask = self.conv1_1(self.conv1(conv2_mask))
out_conv2_mask = self.conv2_1(self.conv2(out_conv1_mask))
out_conv3_mask = self.conv3_1(self.conv3(out_conv2_mask))
out_conv4_mask = self.conv4_1(self.conv4(out_conv3_mask))
sum4 = out_conv4+out_conv4_mask
image_4 = self.predict_image4(sum4)
image_4_up = crop_like(self.upsampled_image4_to_3(image_4), out_conv3)
out_deconv3 = crop_like(self.deconv4(sum4), out_conv3)
sum3 = out_conv3 + out_conv3_mask
concat3 = torch.cat((sum3,out_deconv3,image_4_up),dim=1)
image_3 = self.predict_image3(concat3)
image_3_up = crop_like(self.upsampled_image3_to_2(image_3), out_conv2)
out_deconv2 = crop_like(self.deconv3(concat3), out_conv2)
sum2 = out_conv2+out_conv2_mask
concat2 = torch.cat((sum2,out_deconv2,image_3_up),dim=1)
image_2 = self.predict_image2(concat2)
image_2_up = crop_like(self.upsampled_image2_to_1(image_2), out_conv1)
out_deconv2 = crop_like(self.deconv2(concat2), out_conv1)
sum1 = out_conv1 + out_conv1_mask
concat1 = torch.cat((sum1,out_deconv2,image_2_up),dim=1)
image_1 = self.predict_image1(concat1)
image_1_up = crop_like(self.upsampled_image1_to_finally(image_1), LSTM_out)
# print(image_1_up.shape)
out_deconv1 = crop_like(self.deconv1(concat1), LSTM_out)
sum0 = LSTM_out + conv2_mask
concat0 = torch.cat([sum0,out_deconv1,image_1_up],dim=1)
image_out = self.output1(concat0)
image_out2 = self.output2(image_out)
image_finally = self.output3(image_out2)
image_finally = torch.clamp(image_finally,0.,1.)
# print('image_1',image_finally.shape)
if self.is_training:
return image_4,image_3,image_2,image_1,image_finally
else:
return image_finally
| 7,816
| 41.483696
| 142
|
py
|
MGANet-DCC2020
|
MGANet-DCC2020-master/codes/dataloader/read_h5.py
|
import numpy as np
import cv2
import torch.multiprocessing as mp
mp.set_start_method('spawn')
import h5py
f = h5py.File('../../train_b8_LD37.h5','r')
for key in f.keys():
print(f[key].name,f[key].shape)
# for i in range(1,100):
# cv2.imshow('1.jpg',f[key][i,0,...])
# cv2.waitKey(0)
| 301
| 20.571429
| 43
|
py
|
MGANet-DCC2020
|
MGANet-DCC2020-master/codes/dataloader/h5_dataset_T.py
|
import torch.utils.data as data
import torch
import numpy as np
from torchvision import transforms, datasets
import h5py
def data_augmentation(image, mode):
if mode == 0:
# original
return image
elif mode == 1:
# flip up and down
return np.flipud(image)
elif mode == 2:
# rotate counterwise 90 degree
return np.rot90(image)
elif mode == 3:
# rotate 90 degree and flip up and down
image = np.rot90(image)
return np.flipud(image)
elif mode == 4:
# rotate 180 degree
return np.rot90(image, k=2)
elif mode == 5:
# rotate 180 degree and flip
image = np.rot90(image, k=2)
return np.flipud(image)
elif mode == 6:
# rotate 270 degree
return np.rot90(image, k=3)
elif mode == 7:
# rotate 270 degree and flip
image = np.rot90(image, k=3)
return np.flipud(image)
class DatasetFromHdf5(data.Dataset):
def __init__(self, file_path):
super(DatasetFromHdf5, self).__init__()
f = h5py.File(file_path,'r')
self.data_pre = f.get('data_pre')
self.data_cur = f.get('data_cur')
self.data_aft = f.get('data_aft')
self.data_mask = f.get('mask')
self.label = f.get('label')
self.data_transform = transforms.Compose([
transforms.RandomHorizontalFlip(),
transforms.ToTensor()
])
def __getitem__(self, index):
return torch.from_numpy(self.data_pre[index, :, :, :].transpose(0,2,1)).float(), \
torch.from_numpy(self.data_cur[index, :, :, :].transpose(0,2,1)).float(),\
torch.from_numpy(self.data_aft[index, :, :, :].transpose(0,2,1)).float(),\
torch.from_numpy(self.data_mask[index, :, :, :].transpose(0,2,1)).float(),\
torch.from_numpy(self.label[index, :, :, :].transpose(0,2,1)).float()
def __len__(self):
assert self.label.shape[0]==self.data_aft.shape[0]
return self.label.shape[0]
class DatasetFromHdf5_2_data(data.Dataset):
def __init__(self,data_root_1,data_root_2,transforms=None):
super(DatasetFromHdf5_2_data, self).__init__()
f1 = h5py.File(data_root_1,'r')
f2 = h5py.File(data_root_2,'r')
self.data_pre = f1.get('data_pre')
self.data_cur = f1.get('data_cur')
self.data_aft = f1.get('data_aft')
self.data_mask = f1.get('mask')
self.label = f1.get('label')
self.data_pre_2 = f2.get('data_pre')
self.data_cur_2 = f2.get('data_cur')
self.data_aft_2 = f2.get('data_aft')
self.data_mask_2 = f2.get('mask')
self.label_2 = f2.get('label')
def __getitem__(self, index):
# print(index)
if index%2==0:
index = index//2
return torch.from_numpy(self.data_pre[index, :, :, :].transpose(0,2,1)), torch.from_numpy(self.data_cur[index, :, :, :].transpose(0,2,1)),\
torch.from_numpy(self.data_aft[index, :, :, :].transpose(0,2,1)),torch.from_numpy(self.data_mask[index, :, :, :].transpose(0,2,1)),torch.from_numpy(self.label[index, :, :, :].transpose(0,2,1))
else:
index = index // 2
return torch.from_numpy(self.data_pre_2[index, :, :, :].transpose(0,2,1)), torch.from_numpy(self.data_cur_2[index, :, :, :].transpose(0,2,1)), \
torch.from_numpy(self.data_aft_2[index, :, :, :].transpose(0,2,1)), torch.from_numpy(self.data_mask_2[index, :, :, :].transpose(0,2,1)), torch.from_numpy(self.label_2[index, :, :, :].transpose(0,2,1))
def __len__(self):
return self.data_pre.shape[0]+self.data_pre_2.shape[0]
| 3,692
| 38.287234
| 219
|
py
|
gwsky
|
gwsky-master/examples/GWMC.py
|
import argparse
from datetime import datetime
import numpy as np
from bilby.gw import WaveformGenerator
from bilby.gw.source import lal_binary_black_hole
from bilby.gw.detector import InterferometerList, load_interferometer
from bilby.core.prior import PriorDict, Uniform, Sine
from bilby.gw.prior import UniformSourceFrame
from astropy.cosmology import Planck15
from gwsky import BilbyEvaluatorRotated, Sampler, \
O3aMassSampler, uniform_sky_sampler, PriorDictSampler
parser = argparse.ArgumentParser()
parser.add_argument('--nsamples', type=float, help='number of samples') # float: handle inputs like --nsamples 1e6
parser.add_argument('--nprocess', type=int, help='number of process used')
parser.add_argument('--resultpath', type=str, help='result file pathname')
parser.add_argument('--catalogpath', type=str, help='catalog file pathname')
args = parser.parse_args()
nsamples: int = int(args.nsamples)
nprocess: int = args.nprocess
result_path: str = args.resultpath
catalog_path : str = args.catalogpath
if not result_path.endswith('.csv'):
result_path += '.csv'
if not catalog_path.endswith('.csv'):
catalog_path += '.csv'
waveform_generator = WaveformGenerator(
duration=4, sampling_frequency=2048,
frequency_domain_source_model=lal_binary_black_hole,
waveform_arguments={
'waveform_approximant': 'TaylorF2',
'minimum_frequency': 1e-2, 'reference_frequency': 0})
network : InterferometerList = load_interferometer('ifo/ET.ifo')
ifo_CE = load_interferometer('ifo/CE.ifo')
network.append(ifo_CE)
evaluator = BilbyEvaluatorRotated(
network=network, waveform_generator=waveform_generator,
fisher_parameters=['mass_1', 'mass_2', 'luminosity_distance', 'geocent_time', 'phase', 'theta_jn', 'ra', 'dec', 'psi'])
default_parameters = dict(
phase=0, psi=np.pi/6,
a_1=0, a_2=0, tilt_1=0, tilt_2=0, phi_12=0, phi_jl=0)
z_min, z_max = 0.1, 2
priors = {
'luminosity_distance': UniformSourceFrame(
name='luminosity_distance',
minimum=Planck15.luminosity_distance(z_min).value,
maximum=Planck15.luminosity_distance(z_max).value),
'theta_jn': Sine(name='iota'),
'geocent_time': Uniform(minimum=0, maximum=24*3600, name='geocent_time')
}
priors = PriorDict(priors)
para_samplers = [
O3aMassSampler(), uniform_sky_sampler(), PriorDictSampler(priors)]
sampler = Sampler(
para_samplers=para_samplers, default_parameters=default_parameters)
result = sampler.sample_fisher(
nsamples=nsamples, evaluator=evaluator,
result_path=result_path, nprocess=nprocess)
start = datetime.now()
catalog = result.generate_catalog(
converter_from_parameter=evaluator.converter_from_parameter)
end = datetime.now()
print(f'generate catalog used time: {end-start}')
catalog.save_csv(catalog_path)
| 2,796
| 33.530864
| 123
|
py
|
gwsky
|
gwsky-master/gwsky/sampler.py
|
import numpy as np
from pathos import multiprocessing
from datetime import datetime
from .evaluator import BaseEvaluator
from .result import Result
from .para_sampler import ParameterSampler
from typing import List, Optional
from .typing import ParameterVector, ParameterDict, SampleResult
class Sampler:
def __init__(self, para_samplers: List[ParameterSampler], default_parameters: ParameterDict):
self.para_samplers = para_samplers
self.keys = sum([sampler.keys for sampler in para_samplers], [])
self.default_parameters = default_parameters
def sample_points(self) -> ParameterVector:
samples = sum([sampler.sample() for sampler in self.para_samplers], [])
return samples
def get_parameter_from_samples(self, samples: ParameterVector) -> ParameterDict:
parameters = self.default_parameters.copy()
parameters.update({name: value for name, value in zip(self.keys, samples)})
return parameters
def sample_parameter(self) -> ParameterDict:
return self.get_parameter_from_samples(self.sample_points())
def _real_sample(self, evaluator: BaseEvaluator, nsamples: int, njobs: int) -> List[SampleResult]:
def _sample_wrapper(*args) -> SampleResult:
np.random.seed()
samples = self.sample_points()
parameters = self.get_parameter_from_samples(samples)
snr = evaluator.snr(parameters)
fisher = evaluator.fisher(parameters)
return samples, snr, fisher
pool = multiprocessing.ProcessPool(nodes=njobs)
return pool.map(_sample_wrapper, range(nsamples))
def sample_fisher(self, nsamples: int, evaluator: BaseEvaluator, result_path: str,
nprocess: int = 1, max_memory_sample: Optional[int] = None) -> Result:
"""
run the MC sample, calculate SNR and fisher matrix for sample points
Args:
nsamples (int): number of sample points
evaluator (BaseEvaluator): evaluator instance to calculate SNR and fisher matrix
result_path (str): path to result file
nprocess (int, optional): number of processes to use. Defaults to 1.
max_memory_sample (Optional[int]):
Max numbers of samples that can be saved in memory.
All samples are saved to disk and result will be cleared when
number of samples in result exceeds this value.
Defaults to None, when no memory limit is presented.
Returns:
Result: result instance, containing the last `nsamples`%`max_memory_sample` samples.
"""
if max_memory_sample is None:
max_memory_sample = nsamples + 1 # act as infinity
start_time = datetime.now()
result = Result(keys=self.keys, fisher_parameters=evaluator.fisher_parameters)
for _ in range(nsamples//max_memory_sample):
samples = self._real_sample(evaluator, max_memory_sample, nprocess)
result.append_samples(samples)
result.save_csv(result_path)
result.clear()
samples = self._real_sample(evaluator, nsamples%max_memory_sample, nprocess)
result.append_samples(samples)
result.save_csv(result_path)
end_time = datetime.now()
print(f'Sampled {nsamples} points, used time {end_time-start_time}')
return result
| 3,437
| 40.421687
| 102
|
py
|
gwsky
|
gwsky-master/gwsky/typing.py
|
import numpy as np
from typing import List, Dict, Tuple, Union
ParameterDict = Dict[str, float]
ParameterVector = List[float]
ParaNameList = List[str]
Value = Union[float, complex, np.ndarray]
DerivateDict = Dict[str, Value]
FisherMatrix = np.ndarray
SampleResult = List[Tuple[ParameterVector, float, FisherMatrix]]
SHModeLM = Tuple[int, int]
SHModes = Dict[SHModeLM, complex]
| 380
| 24.4
| 64
|
py
|
gwsky
|
gwsky-master/gwsky/utils.py
|
import numpy as np
import healpy as hp
from scipy.special import sph_harm
from functools import reduce
import quaternionic
import spherical
import matplotlib.pyplot as plt
from healpy.projaxes import HpxMollweideAxes
from typing import Tuple, Optional, List, Dict
from .typing import SHModes, Value
def ra_dec_to_theta_phi(ra: Value, dec: Value) -> Tuple[Value, Value]:
return np.pi/2-dec, ra
def theta_phi_to_ra_dec(theta: Value, phi: Value) -> Tuple[Value, Value]:
return phi, np.pi/2-theta
def catalog_delta_map(theta: np.ndarray, phi: np.ndarray,
nside: int = 64, ra_dec: bool = False) -> np.ndarray:
if ra_dec:
theta, phi = ra_dec_to_theta_phi(theta, phi)
hp_map = np.zeros(hp.nside2npix(nside))
points_ipix = hp.ang2pix(nside=nside, theta=theta, phi=phi)
ipix, counts = np.unique(points_ipix, return_counts=True)
hp_map[ipix] += counts
map_mean = theta.shape[0] / hp.nside2npix(nside)
return hp_map/map_mean - 1
def spherical_harmonic_modes(theta: np.ndarray, phi: np.ndarray, l: int, m: int,
weights: Optional[np.ndarray] = None, ra_dec: bool = False) -> complex:
if ra_dec:
theta, phi = ra_dec_to_theta_phi(theta, phi)
if weights is None:
weights = np.ones(theta.shape)
normalization = theta.shape[0] / (4*np.pi)
# sph_harm(m, n, theta, phi)
# m,n: harmonic mode, |m|<=n
# theta, phi: spherical coordinate, 0<theta<2*pi, 0<phi<pi
coefficient = np.sum(sph_harm(m, l, phi, theta) * weights).conjugate() / normalization
return coefficient
def sh_normal_coeff(l, m):
fact_item = reduce(
lambda x, y: x*y, range(l-np.abs(m)+1, l+np.abs(m)+1), 1)
return ((2*l+1)/(4*np.pi) / fact_item)**0.5
def rotation_matrix_from_vec(orig_vec, dest_vec) -> np.ndarray:
# see https://math.stackexchange.com/a/476311
v = np.cross(orig_vec, dest_vec)
c = np.inner(orig_vec, dest_vec)
v_cross = np.array(
[[0, -v[2], v[1]],
[v[2], 0, -v[0]],
[-v[1], v[0], 0]])
rot = np.eye(3) + v_cross + np.matmul(v_cross, v_cross)/(1+c)
return rot
def dipole_modes(amplitude: float, theta: float, phi: float) -> SHModes:
a10 = amplitude / sh_normal_coeff(1, 0)
dipole_mode = spherical.Modes(
np.array([0, 0, a10, 0], dtype=complex),
spin_weight=0)
rot_mat = rotation_matrix_from_vec(
orig_vec=hp.dir2vec(theta, phi),
dest_vec=np.array([0, 0, 1])) # 将Y_{10}转到给定(theta, phi)方向的偶极场的旋转矩阵的逆
rotation = quaternionic.array.from_rotation_matrix(rot_mat)
wigner = spherical.Wigner(ell_max=2)
# wigner.rotate返回的modes对应的是坐标旋转的逆
# 即对于一个球谐系数为a_lm的场f,用坐标旋转RM作用之,得到的新场f'(r)=f(R^{-1} r)
# 则f'的球谐系数为`wigner.rotate(modes=a_lm, R=1/R)`
rot_mode_sph: spherical.Modes = wigner.rotate(modes=dipole_mode, R=rotation)
rot_mode = {(1, m): rot_mode_sph[spherical.LM_index(1, m)] for m in range(-1, 2)}
return rot_mode
def plot_hp_map(hp_map: np.ndarray, detectors: Optional[List[Dict]] = None,
fig: Optional[plt.Figure] = None, label: str = '',
grid_on: bool = True, grid_kwargs: Optional[Dict] = None,
detector_kwargs: Optional[Dict] = None, **kwargs):
if detectors is None:
detectors = []
plot_kwargs = {'flip': 'geo'}
plot_kwargs.update(kwargs)
hp.mollview(hp_map, fig=fig, **plot_kwargs)
fig = plt.gcf()
skymap_ax: HpxMollweideAxes = fig.get_axes()[0]
det_kwargs = {'color': 'orange', 'markersize': 10}
if detector_kwargs is not None:
det_kwargs.update(detector_kwargs)
for detector in detectors:
skymap_ax.projplot(
detector['longitude'], detector['latitude'], lonlat=True,
marker=detector['marker'], **det_kwargs)
if grid_on:
grid_kwargs_real = {'dpar': 30, 'dmer': 30}
grid_kwargs_real.update(grid_kwargs)
skymap_ax.graticule(**grid_kwargs_real)
cb_ax: plt.Axes = fig.get_axes()[1]
cb_ax.set_xlabel(label)
return fig
| 4,056
| 32.254098
| 100
|
py
|
gwsky
|
gwsky-master/gwsky/converter.py
|
from abc import ABCMeta, abstractmethod
import numpy as np
import healpy as hp
from typing import Callable
from .typing import ParameterDict, ParaNameList
class ParameterConverter(metaclass=ABCMeta):
def __init__(self) -> None:
pass
@abstractmethod
def __call__(self, parameters: ParameterDict) -> ParameterDict:
pass
@abstractmethod
def reverse_convert(self, converted: ParameterDict) -> ParameterDict:
pass
@abstractmethod
def name_convert(self, name_list: ParaNameList) -> ParaNameList:
pass
@abstractmethod
def reverse_name_convert(self, converted_name_list: ParaNameList) -> ParaNameList:
pass
def __mul__(self, other: 'ParameterConverter')->'ParameterConverter':
class TmpConverter(ParameterConverter):
def __call__(self_, parameters: ParameterDict) -> ParameterDict:
return other(self(parameters))
def reverse_convert(self_, converted: ParameterDict) -> ParameterDict:
return self.reverse_convert(other.reverse_convert(converted))
def name_convert(self_, name_list: ParaNameList) -> ParaNameList:
return other.name_convert(self.name_convert(name_list))
def reverse_name_convert(self_, converted_name_list: ParaNameList) -> ParaNameList:
return self.reverse_name_convert(other.reverse_name_convert(converted_name_list))
return TmpConverter()
class NoConvert(ParameterConverter):
def __call__(self, parameters: ParameterDict) -> ParameterDict:
return parameters.copy()
def reverse_convert(self, converted: ParameterDict) -> ParameterDict:
return converted.copy()
def name_convert(self, name_list: ParaNameList) -> ParaNameList:
return name_list.copy()
def reverse_name_convert(self, converted_name_list: ParaNameList) -> ParaNameList:
return converted_name_list.copy()
class FuncConverter(ParameterConverter):
def __init__(self, convert_para: ParaNameList, convert_name: str,
convert_func: Callable[[float], float],
revert_convert_func: Callable[[float], float]) -> None:
super().__init__()
self.convert_para = convert_para
self.convert_name = convert_name
self.convert_func = convert_func
self.revert_convert_func = revert_convert_func
def _para_name_convert(self, para):
return f'{self.convert_name}_{para}'
def __call__(self, parameters: ParameterDict) -> ParameterDict:
converted = parameters.copy()
for para in self.convert_para:
converted[self._para_name_convert(
para)] = self.convert_func(converted.pop(para))
return converted
def reverse_convert(self, converted: ParameterDict) -> ParameterDict:
parameters = converted.copy()
for para in self.convert_para:
parameters[para] = self.revert_convert_func(
parameters.pop(self._para_name_convert(para)))
return parameters
def name_convert(self, name_list: ParaNameList) -> ParaNameList:
converted_name_list = name_list.copy()
for para in self.convert_para:
converted_name_list.remove(para)
converted_name_list.append(self._para_name_convert(para))
return converted_name_list
def reverse_name_convert(self, converted_name_list: ParaNameList) -> ParaNameList:
name_list = converted_name_list.copy()
for para in self.convert_para:
name_list.remove(self._para_name_convert(para))
name_list.append(para)
return name_list
class RotationConverter(ParameterConverter):
def __init__(self, rotation_matrix: np.ndarray, ra_dec: bool = True) -> None:
self.rot_mat = rotation_matrix
self.ra_dec = ra_dec
self._inv_rot_mat = None
@property
def inv_rot_mat(self):
if self._inv_rot_mat is None:
self._inv_rot_mat = np.linalg.inv(self.rot_mat)
return self._inv_rot_mat
def rotate_from_parameters(self, rot_mat: np.ndarray, parameters: ParameterDict) -> ParameterDict:
parameters = parameters.copy()
if self.ra_dec:
theta, phi = np.pi/2-parameters['dec'], parameters['ra']
else:
theta, phi = parameters['theta'], parameters['phi']
rot_theta, rot_phi = hp.rotator.rotateDirection(rot_mat, theta, phi)
if self.ra_dec:
parameters['ra'], parameters['dec'] = rot_phi, np.pi/2-rot_theta
else:
parameters['theta'], parameters['phi'] = rot_theta, rot_phi
return parameters
def __call__(self, parameters: ParameterDict) -> ParameterDict:
return self.rotate_from_parameters(self.rot_mat, parameters)
def name_convert(self, name_list: ParaNameList) -> ParaNameList:
return name_list.copy()
def reverse_convert(self, converted: ParameterDict) -> ParameterDict:
return self.rotate_from_parameters(self.inv_rot_mat, converted)
def reverse_name_convert(self, converted_name_list: ParaNameList) -> ParaNameList:
return converted_name_list.copy()
| 5,201
| 35.633803
| 102
|
py
|
gwsky
|
gwsky-master/gwsky/__init__.py
|
from .evaluator import BaseEvaluator, BilbyEvaluator, BilbyEvaluatorRotated, GWBEvaluator
from .result import Result
from .catalog import GWCatalog
from .sampler import Sampler
from .para_sampler import ParameterSampler, PriorDictSampler, \
O3aMassSampler, uniform_sky_sampler, SHSkySampler, DipoleSkySampler
from .converter import ParameterConverter, NoConvert, FuncConverter, RotationConverter
| 400
| 49.125
| 89
|
py
|
gwsky
|
gwsky-master/gwsky/catalog.py
|
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from .utils import spherical_harmonic_modes, catalog_delta_map, plot_hp_map
from typing import Dict, List, Optional, Tuple, Iterable
from .typing import ParameterVector, ParaNameList, SHModes, SHModeLM
class GWCatalog:
def __init__(self, events: Optional[pd.DataFrame] = None, keys: Optional[ParaNameList] = None) -> None:
if events is None:
assert keys is not None
self.events = events
self.keys = keys
if self.events is not None:
self._fix_dec()
def _fix_dec(self):
dec = self.events.loc[:, 'dec']
self.events.loc[dec<-np.pi/2, 'dec'] = -np.pi/2
self.events.loc[dec>np.pi/2, 'dec'] = np.pi/2
def append_events(self, new_events: List[ParameterVector]) -> None:
if self.events is None:
self.events = pd.DataFrame(new_events, columns=self.keys)
else:
new_df = pd.DataFrame(new_events, columns=self.events.columns)
self.events = pd.concat([self.events, new_df], ignore_index=True)
self._fix_dec()
def get_parameters(self, parameter: str, indexes: Optional[pd.Index] = None) -> np.ndarray:
if indexes is None:
indexes = self.events.index
return self.events.loc[indexes, parameter].to_numpy()
def get_snr(self, indexes: Optional[pd.Index] = None) -> np.ndarray:
if indexes is None:
indexes = self.events.index
return self.events.loc[indexes, 'snr'].to_numpy()
def get_source_position(self, snr_threshold: float = 0, other_cols: Optional[Iterable[str]] = None) -> Tuple[np.ndarray, ...]:
snr = self.get_snr()
index = snr > snr_threshold
if other_cols is None:
other_cols = []
paras = self.get_parameters(['ra', 'dec']+list(other_cols), index)
return tuple(paras[:, i] for i in range(paras.shape[1]))
def clear(self):
self.events.drop(index=self.events.index, inplace=True)
def spherical_harmonic_modes(self, lmax: Optional[int] = None, lmin: int = 1, mmax: Optional[int] = None, mmin: int = 0,
lms: Optional[Iterable[SHModeLM]] = None, weights_col: Optional[str] = None, snr_threshold: float = 0) -> SHModes:
if lms is None:
if lmax is None:
raise ValueError('lmax should be passed when not passing lms')
if mmax is None:
mmax = lmax + 1
lms = [(l, m) for l in range(lmin, lmax+1) for m in range(mmin, min(l, mmax)+1)]
if weights_col is None:
ra, dec = self.get_source_position(snr_threshold)
weights = None
else:
ra, dec, weights = self.get_source_position(snr_threshold, [weights_col])
calc_lms = {(l, abs(m)) for l, m in lms}
modes: SHModes = {(l, m): spherical_harmonic_modes(
ra, dec, l=l, m=m, weights=weights, ra_dec=True) for l, m in calc_lms}
for l, m in lms:
if m < 0:
a_l_absm = modes[(l, -m)] if (l, -m) in lms else modes.pop((l, -m))
modes[(l, m)] = -a_l_absm.conjugate()
return modes
def save_csv(self, path: str, append: bool = False):
if append:
self.events.to_csv(path, index=False, mode='a', header=False)
else:
self.events.to_csv(path, index=False)
@classmethod
def load_csv(cls, path: str):
events = pd.read_csv(path, index_col=False)
return cls(events=events)
def plot_skymap(self, snr_threshold: float = 0, detectors: Optional[List[Dict]] = None,
nside=64, **kwargs) -> plt.Figure:
ra, dec = self.get_source_position(snr_threshold)
delta_map = catalog_delta_map(ra, dec, nside, ra_dec=True)
fig = plot_hp_map(delta_map, detectors=detectors, **kwargs)
return fig
| 3,929
| 39.515464
| 147
|
py
|
gwsky
|
gwsky-master/gwsky/result.py
|
import json
import numpy as np
import pandas as pd
from .catalog import GWCatalog
from .converter import ParameterConverter, NoConvert
from typing import Optional, Union, Callable, List, Dict
from .typing import ParameterDict, ParaNameList, FisherMatrix, ParameterVector, SampleResult
ConverterFunc = Callable[[ParameterDict], ParameterConverter]
ConverterGen = Union[ParameterConverter, ConverterFunc]
class Result:
def __init__(self, keys: ParaNameList, fisher_parameters: ParaNameList):
self.keys = keys
self.fisher_parameters = fisher_parameters
self.last_save_path = ''
self.samples = None
@property
def fisher_parameters(self):
return self._fisher_parameters
@fisher_parameters.setter
def fisher_parameters(self, fisher_parameters: ParaNameList):
self._fisher_parameters = fisher_parameters
self._fisher_tril_indices = np.tril_indices(len(fisher_parameters))
self.fisher_columns = [f'fisher_{i}{j}' for i, j in zip(*self._fisher_tril_indices)]
def set_samples(self, samples: pd.DataFrame, fisher_parameters: Optional[ParaNameList] = None):
self.samples = samples
self.keys = list(samples.columns[:samples.columns.get_loc('snr')])
if fisher_parameters is not None:
self.fisher_parameters = fisher_parameters
def _subpara_fisher_columns(self, sub_parameters: ParaNameList) -> List:
para_to_index = {para: i for i, para in enumerate(self.fisher_parameters)}
def sub_element_to_main(sub_i, sub_j):
main_i = para_to_index[sub_parameters[sub_i]]
main_j = para_to_index[sub_parameters[sub_j]]
if main_i >= main_j:
return main_i, main_j
else:
return main_j, main_i
columns = []
for i, j in zip(*np.tril_indices(len(sub_parameters))):
main_i, main_j = sub_element_to_main(i, j)
columns.append(f'fisher_{main_i}{main_j}')
return columns
def _fisher_matrix_to_columns(self, fisher: FisherMatrix) -> List:
return list(fisher[self._fisher_tril_indices])
def _fisher_columns_to_matrix(self, columns: List, ndim: Optional[int] = None) -> FisherMatrix:
if ndim is None:
ndim = len(self.fisher_parameters)
fisher = np.zeros((ndim, ndim))
fisher[np.tril_indices(ndim)] = columns
return fisher + np.triu(fisher.T, k=1) # symmetrize
def _sample_result_to_df_item(self, sample_points: ParameterVector, snr: float, fisher: FisherMatrix) -> List:
return sample_points+[snr]+self._fisher_matrix_to_columns(fisher)
def append_samples(self, sample_results: List[SampleResult]):
df_items = [self._sample_result_to_df_item(
*result) for result in sample_results]
new_samples = pd.DataFrame(
df_items, columns=self.keys+['snr']+self.fisher_columns)
if self.samples is None:
self.samples = new_samples
else:
self.samples = pd.concat([self.samples, new_samples], ignore_index=True)
def get_sample_points(self, indexes: Optional[pd.Index] = None, keys: Optional[ParaNameList] = None) -> np.ndarray:
if indexes is None:
indexes = self.samples.index
if keys is None:
keys = self.keys
return self.samples.loc[indexes, keys].to_numpy()
def get_sample_parameter_dict(self, indexes: Optional[pd.Index] = None, keys: Optional[ParaNameList] = None) -> List[Dict]:
if indexes is None:
indexes = self.samples.index
if keys is None:
keys = self.keys
return [{key: self.samples.loc[i, key] for key in keys}
for i in indexes]
def get_snr(self, indexes: Optional[pd.Index] = None) -> np.ndarray:
if indexes is None:
indexes = self.samples.index
return np.array(self.samples.loc[indexes, 'snr'])
def get_fisher_matrix(self, indexes: Optional[pd.Index] = None,
fisher_parameters: Optional[ParaNameList] = None) -> np.ndarray:
if indexes is None:
indexes = self.samples.index
if fisher_parameters is None:
fisher_columns = self.fisher_columns
ndim = len(self.fisher_parameters)
else:
fisher_columns = self._subpara_fisher_columns(fisher_parameters)
ndim = len(fisher_parameters)
fisher_value = np.array(self.samples.reindex(index=indexes, columns=fisher_columns))
return np.array([self._fisher_columns_to_matrix(f, ndim=ndim) for f in fisher_value])
def clear(self):
self.samples.drop(index=self.samples.index, inplace=True)
@property
def metadata(self):
return {'keys': self.keys, 'fisher_parameters': self.fisher_parameters}
def save_csv(self, path: str, append_mode: Optional[bool] = None):
if append_mode is None:
append_mode = (path == self.last_save_path)
if append_mode:
self.samples.to_csv(path, index=False, mode='a', header=False)
else:
self.samples.to_csv(path, index=False)
self.last_save_path = path
metadata_path = path + '.metadata.json'
with open(metadata_path, 'w') as f:
json.dump(self.metadata, f)
@classmethod
def load_csv(cls, path: str):
metadata_path = path + '.metadata.json'
with open(metadata_path, 'r') as f:
metadata = json.load(f)
keys = metadata['keys']
fisher_parameters = metadata['fisher_parameters']
result = cls(keys=keys, fisher_parameters=fisher_parameters)
samples = pd.read_csv(path, index_col=False)
result.samples = samples
return result
def regenerate_event(self, real_parameter: ParameterDict, fisher: FisherMatrix,
fisher_parameter: ParaNameList, num: Optional[int] = None) -> Union[ParameterDict, List[ParameterDict]]:
calc_num = 1 if num is None else num
cov_para_names = list(filter(lambda p: p in fisher_parameter, real_parameter.keys()))
cov_para_list = [real_parameter[para] for para in cov_para_names]
no_cov_para = {para: value for para, value in real_parameter.items() if para not in fisher_parameter}
cov_full = np.linalg.inv(fisher)
fisher_index_map = {para: i for i, para in enumerate(fisher_parameter)}
catalog_index = [fisher_index_map[para] for para in cov_para_names]
cov = cov_full[catalog_index, :][:, catalog_index]
detected_para_lists = np.random.multivariate_normal(mean=cov_para_list, cov=cov, size=calc_num)
detected_params = []
for params in detected_para_lists:
detected = {para: value for para, value in zip(cov_para_names, params)}
detected.update(no_cov_para)
detected_params.append(detected)
if num is None:
return detected_params[0]
else:
return detected_params
def regenerate_converted_event(self, parameters: ParameterDict, fisher: FisherMatrix, snr: float,
converter_func: ConverterFunc, paras: ParaNameList,
num: Optional[int] = None) -> Union[ParameterVector, List[ParameterVector]]:
calc_num = 1 if num is None else num
converter = converter_func(parameters)
converted = converter(parameters)
try:
event_para_converted = self.regenerate_event(
real_parameter=converted, fisher=fisher,
fisher_parameter=self.fisher_parameters, num=calc_num)
except np.linalg.LinAlgError:
event_para_converted = [parameters] * calc_num
snr = -1
event_params = [converter.reverse_convert(converted) for converted in event_para_converted]
catalog_vectors = [[params[para] for para in paras]+[snr] for params in event_params]
if num is None:
return catalog_vectors[0]
else:
return catalog_vectors
def _get_converter_func(self, converter_gen: Optional[ConverterGen] = None) -> ConverterFunc:
if converter_gen is None:
converter_gen = NoConvert()
if isinstance(converter_gen, ParameterConverter):
def converter_func(p: ParameterDict):
return converter_gen
else:
converter_func = converter_gen
return converter_func
def generate_catalog(self, converter_from_parameter: Optional[ConverterGen] = None,
indexes: Optional[pd.Index] = None, paras: Optional[ParaNameList] = None) -> GWCatalog:
"""
generate observed catalog using parameters and fisher matrix value
Args:
converter (Union[ParameterConverter, Callable[[ParameterDict], ParameterConverter]]):
converter that convert stored parameters to parameters that corresponding to fisher matrix
or a function that generate converter from parameters (can vary for different parameters)
indexes (Iterable[pandas.Index]):
choose indexes in result samples to generate catalog. Default to choose all samples
Returns:
GWCatalog: GW catalog, containing observed parameter value and SNR
"""
if paras is None:
paras = self.keys
converter_func = self._get_converter_func(converter_from_parameter)
fishers = self.get_fisher_matrix(indexes=indexes)
parameters = self.get_sample_parameter_dict(indexes=indexes, keys=paras)
snrs = self.get_snr(indexes=indexes)
catalog_list = [self.regenerate_converted_event(
parameter_i, fisher_i, snr_i, converter_func=converter_func, paras=paras)
for parameter_i, fisher_i, snr_i in zip(parameters, fishers, snrs)]
catalog_df = pd.DataFrame(catalog_list, columns=paras+['snr'])
catalog = GWCatalog(events=catalog_df)
return catalog
def bootstrap_catalog(self, n_resample: int, converter_from_parameter: Optional[ConverterGen] = None,
indexes: Optional[pd.Index] = None, paras: Optional[ParaNameList] = None,
memory_batch: Optional[int] = None, save_path: Optional[str] = None) -> GWCatalog:
if indexes is None:
indexes = self.samples.index
if paras is None:
paras = self.keys
if memory_batch is None:
memory_batch = n_resample + 1
else:
assert save_path is not None
converter_func = self._get_converter_func(converter_from_parameter)
resample_j = np.random.randint(0, len(indexes), size=n_resample) # j is the index of `indexes` parameter
j_value, j_count = np.unique(resample_j, return_counts=True)
fishers = self.get_fisher_matrix(indexes=indexes)
parameters = self.get_sample_parameter_dict(indexes=indexes, keys=paras)
snrs = self.get_snr(indexes=indexes)
catalog = GWCatalog(keys=paras+['snr'])
events = []
save_append = False
for j, n in zip(j_value, j_count):
events += self.regenerate_converted_event(
parameters[j], fishers[j], snrs[j],
converter_func=converter_func, paras=paras, num=n)
if len(events) >= memory_batch:
catalog.append_events(events)
events = []
catalog.save_csv(save_path, append=save_append)
catalog.clear()
save_append = True
catalog.append_events(events)
if save_path is not None:
catalog.save_csv(save_path, append=save_append)
return catalog
| 11,793
| 42.681481
| 129
|
py
|
gwsky
|
gwsky-master/gwsky/evaluator/utils.py
|
import numpy as np
from numdifftools import Gradient
from typing import Callable, Tuple, Union
from ..typing import ParaNameList, ParameterDict, ParameterVector, Value, DerivateDict
def derivate_central(func, parameters: ParameterDict, deriv_para: ParaNameList, step: float = 1e-7) -> DerivateDict:
deriv = {}
for para in deriv_para:
forward_parameters = parameters.copy()
backward_parameters = parameters.copy()
forward_parameters[para] += step
backward_parameters[para] -= step
deriv[para] = (func(forward_parameters) - func(backward_parameters)) / (2*step)
return deriv
def complex_to_amplitude_phase(z: Value) -> Tuple[Value, Value]:
return np.abs(z), np.unwrap(np.angle(np.atleast_1d(z)))
def amplitude_phase_to_complex(amplitude: Value, phase: Value) -> Value:
return amplitude*np.exp(1j*phase)
def derivative_from_amplitude_and_phase(amplitude: Value, phase: Value, d_amplitude: Value, d_phase: Value) -> Value:
return d_amplitude * np.exp(1j*phase) + amplitude * np.exp(1j*phase) * 1j * d_phase
def complex_gradient(func: Callable, parameter_vector: ParameterVector, return_value: bool,
step: float, **gradient_kwargs) -> Union[np.ndarray, Tuple[np.ndarray, Value]]:
"""
Calculate gradient for complex function by calculating derivative for amplitude and phase seperately.
Using numerical derivative package numdifftools.
Args:
func: function for gradient calculation
Return value of func should be a 1d or 2d array.
parameter_vector (ParameterVector): values of parameters(real)
return_value (bool): whether to return value of func at parameter_vector
is func is expensive to calculate, this parameter should be set to True,
since func(parameter_vector) is calculated once in this function,
it will be returned to the caller who may need to use this value.
step (float) : step of numerical derivative
Returns:
array, shape: len(parameter_vector) x shape of return value of func
gradient of func
value: value of func(parameter_vector). returns if `return_value` is True
"""
value = func(parameter_vector)
amplitude, phase = complex_to_amplitude_phase(value)
def amp_pha_func(parameter_vector):
# always returns a 2d array, no matter return value of func is 1d or 2d array
return np.vstack(complex_to_amplitude_phase(func(parameter_vector)))
d_amp_pha = Gradient(amp_pha_func, step=step, **gradient_kwargs)(parameter_vector)
# 3d array, first index matches parameter_vector
d_amp_pha = np.transpose(d_amp_pha, axes=(1, 0, 2))
d_amplitude = d_amp_pha[:, :d_amp_pha.shape[1]//2, :]
d_phase = d_amp_pha[:, d_amp_pha.shape[1]//2:, :]
gradient_shape = [len(parameter_vector)] + list(amplitude.shape)
gradient = derivative_from_amplitude_and_phase(
amplitude, phase, d_amplitude, d_phase).reshape(gradient_shape)
if return_value:
return gradient, value
else:
return gradient
def combine_product_derivate(deriv_parameters: ParaNameList, deriv_dict1: DerivateDict, deriv_dict2: DerivateDict,
value1: Value, value2: Value) -> DerivateDict:
product_deriv = {}
for para in deriv_parameters:
product_deriv[para] = deriv_dict1.get(para, 0) * value2 \
+ deriv_dict2.get(para, 0) * value1
return product_deriv
def convert_derivate_cos(value: Value, derivate: Value):
return -1/np.sin(value) * derivate
def convert_derivate_log(value: Value, derivate: Value):
return value * derivate
| 3,690
| 40.011111
| 117
|
py
|
gwsky
|
gwsky-master/gwsky/evaluator/evaluator.py
|
from abc import ABCMeta, abstractmethod
from ..typing import ParameterDict, ParaNameList, FisherMatrix
class BaseEvaluator(metaclass=ABCMeta):
@abstractmethod
def snr(self, parameters: ParameterDict) -> float:
pass
@abstractmethod
def fisher(self, parameters: ParameterDict) -> FisherMatrix:
pass
@property
@abstractmethod
def fisher_parameters(self):
pass
| 414
| 20.842105
| 64
|
py
|
gwsky
|
gwsky-master/gwsky/evaluator/__init__.py
|
from .evaluator import BaseEvaluator
from .gwb import GWBEvaluator
from .bilby import BilbyEvaluator, BilbyEvaluatorRotated
| 123
| 40.333333
| 56
|
py
|
gwsky
|
gwsky-master/gwsky/evaluator/bilby.py
|
import numpy as np
import healpy as hp
from bilby.gw import WaveformGenerator
from bilby.gw.detector import Interferometer, InterferometerList
from bilby.gw.utils import noise_weighted_inner_product
from .evaluator import BaseEvaluator
from ..converter import RotationConverter
from .utils import derivate_central
from ..utils import rotation_matrix_from_vec
from ..typing import ParameterDict, FisherMatrix, ParaNameList, DerivateDict
class BilbyEvaluator(BaseEvaluator):
def __init__(self, network: InterferometerList, waveform_generator: WaveformGenerator,
fisher_parameters: ParaNameList, step: float = 1e-7) -> None:
self.network = network
self.waveform_generator = waveform_generator
self.network.set_strain_data_from_power_spectral_densities(
sampling_frequency=self.waveform_generator.sampling_frequency,
duration=self.waveform_generator.duration,
start_time=self.waveform_generator.start_time)
self._fisher_parameters = fisher_parameters.copy()
self._step = step
def _interferometer_response(self, parameters: ParameterDict, interferometer: Interferometer):
waveform = self.waveform_generator.frequency_domain_strain(parameters)
return interferometer.get_detector_response(
waveform_polarizations=waveform, parameters=parameters)
def snr(self, parameters: ParameterDict) -> float:
total_snr_square = sum([
ifo.optimal_snr_squared(
self._interferometer_response(parameters, ifo)).real
for ifo in self.network])
return np.sqrt(total_snr_square)
@property
def fisher_parameters(self):
return self._fisher_parameters
@fisher_parameters.setter
def fisher_parameters(self, value: ParaNameList):
self._fisher_parameters = value
def _interferometer_signal_derivate(self, interferometer: Interferometer, parameters: ParameterDict,
fisher_parameters: ParaNameList) -> DerivateDict:
"""
calculate derivate of masked waveform
Args:
interferometer (Interferometer)
parameters (ParameterDict): waveform parameters
fisher_parameters (ParaNameList): names of parameters to derivate on
Returns:
DerivateDict: derivate of frequency-masked waveform
"""
def waveform(parameters):
return self._interferometer_response(parameters, interferometer)
return derivate_central(waveform, parameters, deriv_para=fisher_parameters, step=self._step)
def _signal_derivate_to_fisher_matrix(self, interferometer: Interferometer, signal_derivate:DerivateDict,
fisher_parameters: ParaNameList) -> FisherMatrix:
signal_array_len = signal_derivate[fisher_parameters[0]].shape[0]
if signal_array_len == interferometer.frequency_array.shape[0]:
masked_derivate = {
para: deriv[interferometer.frequency_mask] for para, deriv in signal_derivate.items()}
elif signal_array_len == np.sum(interferometer.frequency_mask):
masked_derivate = signal_derivate
else:
raise ValueError('signal derivate shape does not match frequency array of interferometer.')
psd = interferometer.power_spectral_density_array[interferometer.frequency_mask]
fisher = np.zeros([len(fisher_parameters)]*2)
for i, para_i in enumerate(fisher_parameters):
for j in range(i+1):
para_j = fisher_parameters[j]
fisher[i, j] = noise_weighted_inner_product(
masked_derivate[para_i], masked_derivate[para_j],
power_spectral_density=psd, duration=interferometer.duration).real
fisher[j, i] = fisher[i, j]
return fisher
def _interferometer_fisher(self, interferometer: Interferometer, parameters: ParameterDict,
fisher_parameters: ParaNameList):
signal_derivate = self._interferometer_signal_derivate(
interferometer, parameters, fisher_parameters)
return self._signal_derivate_to_fisher_matrix(interferometer, signal_derivate, fisher_parameters)
def fisher(self, parameters: ParameterDict) -> FisherMatrix:
return sum([
self._interferometer_fisher(interferometer, parameters, self.fisher_parameters)
for interferometer in self.network])
class BilbyEvaluatorRotated(BilbyEvaluator):
def __init__(self, network: InterferometerList, waveform_generator: WaveformGenerator, fisher_parameters: ParaNameList, step: float = 1e-7) -> None:
super().__init__(network, waveform_generator, fisher_parameters, step)
def converter_from_parameter(self, parameters: ParameterDict) -> RotationConverter:
theta, phi = np.pi/2-parameters['dec'], parameters['ra']
rot_mat = rotation_matrix_from_vec(
orig_vec=hp.dir2vec(theta, phi), dest_vec=[1, 0, 0])
return RotationConverter(rot_mat)
def _interferometer_signal_derivate(self, interferometer: Interferometer, parameters: ParameterDict, fisher_parameters: ParaNameList) -> DerivateDict:
converter = self.converter_from_parameter(parameters)
parameters = converter(parameters)
def waveform(parameters):
return self._interferometer_response(converter.reverse_convert(parameters), interferometer)
return derivate_central(waveform, parameters, deriv_para=fisher_parameters, step=self._step)
| 5,630
| 46.319328
| 154
|
py
|
gwsky
|
gwsky-master/gwsky/evaluator/gwb.py
|
import numpy as np
from gwbench.network import Network
from .evaluator import BaseEvaluator
from typing import List, Optional
from ..typing import ParameterDict, ParaNameList, FisherMatrix
class GWBEvaluator(BaseEvaluator):
def __init__(self, network_specs: List[str], frequency_array: np.ndarray, waveform_approximant: str,
deriv_parameters: ParaNameList, convert_cos: Optional[ParaNameList] = None, convert_log: Optional[ParaNameList] = None,
rotate: bool = False):
self.network = Network(network_specs)
self.network.set_wf_vars(
wf_model_name='lal_bbh',
wf_other_var_dic=dict(approximant=waveform_approximant))
self.network.set_net_vars(
f=frequency_array, deriv_symbs_string=' '.join(deriv_parameters),
conv_cos=convert_cos, conv_log=convert_log, use_rot=rotate)
self.network.setup_psds()
self._fisher_parameters = deriv_parameters.copy()
for convert_func, convert_para in {'cos':convert_cos, 'log':convert_log}.items():
if convert_para is not None:
for para in convert_para:
self._fisher_parameters[self._fisher_parameters.index(para)]=f'{convert_func}_{para}'
def snr(self, parameters: ParameterDict) -> float:
self.network.set_net_vars(inj_params=parameters)
self.network.setup_ant_pat_lpf()
self.network.calc_det_responses()
self.network.calc_snrs_det_responses(only_net=True)
return self.network.snr
def fisher(self, parameters: ParameterDict) -> FisherMatrix:
self.network.set_net_vars(inj_params=parameters)
self.network.setup_ant_pat_lpf()
self.network.calc_det_responses_derivs_num()
self.network.calc_errors(only_net=True)
return self.network.fisher
@property
def fisher_parameters(self):
return self._fisher_parameters
| 1,933
| 40.148936
| 136
|
py
|
gwsky
|
gwsky-master/gwsky/para_sampler/mass.py
|
import numpy as np
from bilby.core.prior import Prior, Interped
from .para_sampler import ParameterSampler
from typing import Optional
from ..typing import ParaNameList, ParameterVector
class PowerLawPeakMass(Interped):
'''
Power Law + Peak model in 2010.14533
'''
def __init__(self, m_min, m_max, delta_m, alpha, mu_m, sigma_m, lambda_peak):
self.m_min = m_min
self.m_max = m_max
self.delta_m = delta_m
self.alpha = alpha
self.mu_m = mu_m
self.sigma_m = sigma_m
self.lambda_peak = lambda_peak
m = np.linspace(m_min, m_max, 1000)
power_law = self.smooth(m**(-self.alpha), m)
gaussian = self.smooth(
np.exp(-(m-self.mu_m)**2 / self.sigma_m**2 / 2), m)
yy = (1-self.lambda_peak) * self.normalize(power_law, m) + \
self.lambda_peak * self.normalize(gaussian, m)
super().__init__(xx=m, yy=yy, minimum=m_min, maximum=m_max, name='mass_1')
def smooth(self, y, m):
smooth_i = m < self.m_min+self.delta_m
mprime = m[smooth_i] - self.m_min
smooth = 1 / (np.exp(self.delta_m/mprime +
self.delta_m/(mprime-self.delta_m)) + 1)
y[smooth_i] *= smooth
return y
def normalize(self, y, x):
return y / np.trapz(y, x)
class PowerLawPeakMassRatio(Prior):
def __init__(self, beta_q, m_min, delta_m):
self.beta_q = beta_q
self.m_min = m_min
self.delta_m = delta_m
self._mass_1 = None
super().__init__(minimum=0, maximum=1)
@property
def mass_1(self):
return self._mass_1
@mass_1.setter
def mass_1(self, m:float):
self._mass_1=m
def smooth_function(self, m):
if m < self.m_min:
return 0
elif m < self.m_min+self.delta_m:
mprime = m - self.m_min
return 1 / (np.exp(self.delta_m/mprime + self.delta_m/(mprime-self.delta_m)) + 1)
else:
return 1
def rescale(self, val):
# 采用马文淦《计算物理学》P20中介绍的第二类舍选法
# h(x)为指数分布,g(x)为smooth函数(归一化常数被吸收到L中)
while True:
q_eta = val**(1/(self.beta_q+1))
if np.random.rand() <= self.smooth_function(q_eta*self.mass_1):
return q_eta
val = np.random.rand()
class O3aMassSampler(ParameterSampler):
M1_PRIOR_CONFIG = dict(
m_min=4.59, m_max=86.22, delta_m=4.82, alpha=2.63,
mu_m=33.07, sigma_m=5.69, lambda_peak=0.1)
Q_PRIOR_CONFIG = dict(beta_q=1.26, m_min=4.59, delta_m=4.82)
def __init__(self,
m1_prior: Optional[PowerLawPeakMass] = None,
q_prior: Optional[PowerLawPeakMassRatio] = None) -> None:
if m1_prior is None:
m1_prior = PowerLawPeakMass(**self.M1_PRIOR_CONFIG)
if q_prior is None:
q_prior = PowerLawPeakMassRatio(**self.Q_PRIOR_CONFIG)
self.m1_prior = m1_prior
self.q_prior = q_prior
@property
def keys(self) -> ParaNameList:
return ['mass_1', 'mass_2']
def sample(self) -> ParameterVector:
mass_1 = self.m1_prior.sample()
self.q_prior.mass_1 = mass_1
mass_ratio = self.q_prior.sample()
mass_2 = mass_1 * mass_ratio
return [mass_1, mass_2]
| 3,309
| 29.648148
| 93
|
py
|
gwsky
|
gwsky-master/gwsky/para_sampler/para_sampler.py
|
from abc import ABCMeta, abstractmethod
from ..typing import ParaNameList, ParameterVector
class ParameterSampler(metaclass=ABCMeta):
def __init__(self) -> None:
pass
@property
@abstractmethod
def keys(self) -> ParaNameList:
pass
@abstractmethod
def sample(self) -> ParameterVector:
pass
| 341
| 18
| 50
|
py
|
gwsky
|
gwsky-master/gwsky/para_sampler/prior_dict.py
|
from bilby.core.prior import PriorDict
from .para_sampler import ParameterSampler
from ..typing import ParaNameList, ParameterVector
class PriorDictSampler(ParameterSampler):
def __init__(self, priors: PriorDict) -> None:
self.priors = priors
self._keys = list(priors.keys())
@property
def keys(self) -> ParaNameList:
return self._keys
def sample(self) -> ParameterVector:
para_dict = self.priors.sample()
return [para_dict[key] for key in self.keys]
| 513
| 24.7
| 52
|
py
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.