index int64 | repo_name string | branch_name string | path string | content string | import_graph string |
|---|---|---|---|---|---|
43,435 | MarcoRLK/mw_scraper | refs/heads/master | /scraping_test.py | from bs4 import BeautifulSoup as soup
from urllib2 import urlopen as uReq
class Scraper:
#grabbing the page
def page_grabber(self):
uClient = uReq("https://matriculaweb.unb.br/graduacao/fluxo.aspx?cod=6360")
page_html = uClient.read()
uClient.close()
return page_html
#html parser
def html_parser(self, page_html):
page_soup = soup(page_html, 'html.parser')
return page_soup
def get_courses_tables(self, page_soup):
courses_tables = page_soup.findAll('div', {'class', 'table-responsive'})
courses_tables.pop(0)
return courses_tables
def get_courses_datas(self, courses_tables):
for semester in courses_tables:
print('--------')
semester_courses = semester.findAll('tr')
semester_courses.pop(0)
semester_courses.pop(0)
for course in semester_courses:
course_attributes = course.findAll('td')
#getting only the text from couse 'a tag'
course_name = course_attributes[4].find('a').contents[0]
course_credits = course_attributes[5].contents[0]
print (course_name + ' ' + course_credits)
def mw_scrapper(self):
page_html = self.page_grabber()
page_soup = self.html_parser(page_html)
courses_tables = self.get_courses_tables(page_soup)
self.get_courses_datas(courses_tables)
| {"/pandas_are_cool.py": ["/scraping_test.py"]} |
43,436 | MarcoRLK/mw_scraper | refs/heads/master | /pandas_are_cool.py | import pandas as pd
import numpy as np
from scraping_test import Scraper
scraper = Scraper()
dates = pd.date_range('2018-01-01', periods=70, freq='M')
course_names = []
course_credits = []
scraper.mw_scrapper(course_names, course_credits)
print (len(course_names))
# df = pd.DataFrame(np.random.randn(70,4), columns=['Data*', 'B', 'C', 'D'])
df = pd.DataFrame( course_credits, columns=['Creditos'], index=course_names )
print(df)
| {"/pandas_are_cool.py": ["/scraping_test.py"]} |
43,471 | alisha17/truthsystem | refs/heads/master | /main.py | import requests
import json
import pprint
import tweepy
import datetime
import mongoengine
import textblob
import sklearn
from collections import Counter
from mongoengine import connect
from models import Tweet, Article
from textblob import TextBlob
from similar import cosine_sim
from config import auth, key
# TO DO:
# make this work
# store in the table
# write function to check link from one user to another user
def news():
"""
Fetch news articles and save them, for now leave this
"""
r = requests.get(url)
new_dict = json.loads(r.text)
pp = pprint.PrettyPrinter(indent=4)
for item in new_dict['sources']:
newsSource = item['id']
newsApiData = "https://newsapi.org/v1/articles?source={0}&apiKey={1}".format(newsSource, key)
respons = requests.get(newsApiData)
response_dict = json.loads(respons.text)
# print (response_dict)
name_of_source = newsSource
for article in response_dict['articles']:
description = article['description']
if(description==None):
break
wordFrequency=Counter()
for x in description.split(' '):
wordFrequency[x] += 1
if any(wordFrequency.get(x)!=None for x in list_words) or all(wordFrequency.get(x)!=None for x in list_words):
similarity = cosine_sim(text, description)
print (similarity)
# if similarity >= 0.2:
# print (similarity, item2['description'])
dict_to_pass = {
"source": name_of_source,
"title": article["title"],
"publishedAt": article["publishedAt"],
"description": article["description"]
}
article_obj = Article(dict_to_pass)
article_obj.save()
class StdOutListener(tweepy.StreamListener):
"""
Tweepy class for streaming data
"""
def __init__(self, api=None):
super(StdOutListener, self).__init__()
self.num_tweets = 0
def on_status(self, status):
if hasattr (status, 'retweeted_status'):
return
self.num_tweets += 1
if self.num_tweets < 20:
if cosine_sim(text, status.text) != 0.0:
status1 = status.text.lower()
tweet_test = Tweet(status = status1)
tweet_test.created_at = status.created_at
tweet_test.user = status.user.screen_name
tweet_test.user_id = status.user.id_str
tweet_test.followers = status.user.followers_count
tweet_test.friends = status.user.friends_count
tweet_test.verified = status.user.verified
tweet_test.geo = status.geo
tweet_test.ip = status.coordinates
tweet_test.similarity = cosine_sim(text, status.text)
tweet_test.save()
print (self.num_tweets)
return True
else:
return False
def on_error(self, status):
if status == 420:
return False
# 19773456
# 19773464
# charts and tables
if __name__ == '__main__':
list_words = []
list_sentence = []
list_nouns = []
count = 0
url = "https://newsapi.org/v1/sources?language=en"
text = "Ivana Trump I am first lady".lower()
stopwords = ["the", "in", "a", "i", "am"]
blob = TextBlob(text)
list_words = blob.words
b = blob.sentences
for i in b:
list_words.append(str(i))
c = blob.noun_phrases
for j in c:
list_words.append(str(j))
list_words = [word for word in list_words if word not in stopwords]
connect(db = 'twitter_data')
# news()
l = StdOutListener()
stream = tweepy.Stream(auth, l)
stream.filter(track= list_words)
Tweet.objects.order_by('-created_at')
print ("I am done")
# for document in Tweet.objects:
# Real news spread far
# independant parties
| {"/main.py": ["/models.py", "/config.py"]} |
43,472 | alisha17/truthsystem | refs/heads/master | /user.py | # NOT COMPLETED/TESTED YET
api = tweepy.API(auth, wait_on_rate_limit=True, wait_on_rate_limit_notify=True, retry_count=3, retry_delay=60)
db = client.tweets
raw_tweets = db.raw_tweets
users = db.users
def get_followers(user_id):
users = []
page_count = 0
for i, user in enumerate(tweepy.Cursor(api.followers, id=user_id, count=200).pages()):
print 'Getting page {} for followers'.format(i)
users += user
return users
def get_friends(user_id):
users = []
page_count = 0
for user in tweepy.Cursor(api.friends, id=user_id, count=200).pages():
page_count += 1
print 'Getting page {} for friends'.format(page_count)
users.extend(user)
return users
def get_followers_ids(user_id):
ids = []
page_count = 0
for page in tweepy.Cursor(api.followers_ids, id=user_id, count=5000).pages():
page_count += 1
print 'Getting page {} for followers ids'.format(page_count)
ids.extend(page)
return ids
def get_friends_ids(user_id):
ids = []
page_count = 0
for page in tweepy.Cursor(api.friends_ids, id=user_id, count=5000).pages():
page_count += 1
print 'Getting page {} for friends ids'.format(page_count)
ids.extend(page)
return ids
| {"/main.py": ["/models.py", "/config.py"]} |
43,473 | alisha17/truthsystem | refs/heads/master | /models.py | import mongoengine
import datetime
from mongoengine import *
class Tweet(Document):
status = StringField()
created_at = DateTimeField(default=datetime.datetime.now)
user = StringField()
user_id = StringField()
followers = IntField()
friends = IntField()
verified = BooleanField()
geo = StringField()
ip = StringField()
similarity = DecimalField()
class Article(Document):
source = DynamicField()
title = StringField()
description = StringField()
publishedAt = DateTimeField(default= datetime.datetime.now)
similarity = DecimalField()
| {"/main.py": ["/models.py", "/config.py"]} |
43,474 | alisha17/truthsystem | refs/heads/master | /config.py | import tweepy
#Consumer and Access code for twitter api
consumer_key = "XXXXXXXXXX"
consumer_secret = "XXXXXXXXXX"
access_token = "XXXXXXXXXX"
access_token_secret = "XXXXXXXXXX"
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
#News Api key
key= "XXXXXXXXXX" | {"/main.py": ["/models.py", "/config.py"]} |
43,476 | fcremo/tomasulo-simulator | refs/heads/master | /tomasulo_simulator/execution_trace.py | from collections import namedtuple
from json import JSONEncoder
from . import functional_unit, reservation_station
from .instruction import Instruction
RSUnavailableHazard = namedtuple("RSUnavailableHazard", "detected_at solved_at assigned_rs")
FUUnavailableHazard = namedtuple("FUUnavailableHazard", "detected_at solved_at assigned_fu")
CDBUnavailableHazard = namedtuple("CDBUnavailableHazard", "detected_at solved_at")
MemQueueSlotUnavailableHazard = namedtuple("MemQueueSlotUnavailableHazard", "detected_at solved_at")
RAWHazard = namedtuple("RAWHazard", "detected_at solved_at register source_rs")
class ExecutionTrace:
def __init__(self, instruction):
super().__init__()
self.instruction = instruction
self.hazards = []
self.issued = None
self.start_execution = None
self.write_result = None
self.written_result = None
self.rs = None
self.fu = None
def to_dict(self):
d = {}
for k, v in self.__dict__.items():
if isinstance(v, (reservation_station.ReservationStation, functional_unit.FunctionalUnit)):
d[k] = v.id
elif isinstance(v, Instruction):
d[k] = str(v)
else:
d[k] = v
return d
class ExecutionTraceSerializer(JSONEncoder):
def default(self, obj):
return obj.__repr__()
| {"/tomasulo_simulator/execution_trace.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/instruction/__init__.py"], "/tomasulo_simulator/cpu.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/cdb.py", "/tomasulo_simulator/functional_unit.py", "/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/log_utils.py", "/tomasulo_simulator/memory.py", "/tomasulo_simulator/registerfile.py", "/tomasulo_simulator/reservation_station/__init__.py"], "/tomasulo_simulator/reservation_station/__init__.py": ["/tomasulo_simulator/reservation_station/alu_reservation_station.py", "/tomasulo_simulator/reservation_station/mem_reservation_station.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/instruction/alu_instructions.py": ["/tomasulo_simulator/instruction/instruction.py"], "/tomasulo_simulator/instruction/bitwise_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/simulation.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/execution_trace.py"], "/tomasulo_simulator/assembler.py": ["/tomasulo_simulator/__init__.py"], "/tomasulo_simulator/reservation_station/alu_reservation_station.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/instruction/control_flow_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/instruction/instruction.py": ["/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/execution_trace.py"], "/tomasulo_simulator/parser/parser.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/instruction/bitwise_instructions.py", "/tomasulo_simulator/instruction/floating_instructions.py", "/tomasulo_simulator/instruction/logic_instructions.py"], "/tomasulo_simulator/reservation_station/mem_reservation_station.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/cdb.py": ["/tomasulo_simulator/execution_trace.py", "/tomasulo_simulator/log_utils.py"], "/tomasulo_simulator/instruction/__init__.py": ["/tomasulo_simulator/instruction/instruction.py", "/tomasulo_simulator/instruction/alu_instructions.py", "/tomasulo_simulator/instruction/bitwise_instructions.py", "/tomasulo_simulator/instruction/control_flow_instructions.py", "/tomasulo_simulator/instruction/floating_instructions.py", "/tomasulo_simulator/instruction/label.py", "/tomasulo_simulator/instruction/logic_instructions.py", "/tomasulo_simulator/instruction/mem_instructions.py"], "/tomasulo_simulator/instruction/mem_instructions.py": ["/tomasulo_simulator/instruction/instruction.py"], "/tomasulo_simulator/instruction/logic_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/registerfile.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/log_utils.py", "/tomasulo_simulator/reservation_station/__init__.py"], "/tomasulo_simulator/reservation_station/reservation_station.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/log_utils.py"], "/tomasulo_simulator/instruction/floating_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/__init__.py": ["/tomasulo_simulator/assembler.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/cpu.py"], "/tomasulo_simulator/memory.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/log_utils.py"]} |
43,477 | fcremo/tomasulo-simulator | refs/heads/master | /tomasulo_simulator/log_utils.py | def get_log_str(env, name, msg, *fmtargs):
if name is None:
return "CLK {:>3n} | {}".format(env.now, msg.format(*fmtargs))
else:
return "CLK {:>3n} | {:>6s} | {}".format(env.now, name, msg.format(*fmtargs))
def log_with_time(env, name, msg, *fmtargs):
print(get_log_str(env, name, msg, *fmtargs))
def get_logger(env, name):
def log(msg, *fmtargs):
print(get_log_str(env, name, msg, *fmtargs))
return log
| {"/tomasulo_simulator/execution_trace.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/instruction/__init__.py"], "/tomasulo_simulator/cpu.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/cdb.py", "/tomasulo_simulator/functional_unit.py", "/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/log_utils.py", "/tomasulo_simulator/memory.py", "/tomasulo_simulator/registerfile.py", "/tomasulo_simulator/reservation_station/__init__.py"], "/tomasulo_simulator/reservation_station/__init__.py": ["/tomasulo_simulator/reservation_station/alu_reservation_station.py", "/tomasulo_simulator/reservation_station/mem_reservation_station.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/instruction/alu_instructions.py": ["/tomasulo_simulator/instruction/instruction.py"], "/tomasulo_simulator/instruction/bitwise_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/simulation.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/execution_trace.py"], "/tomasulo_simulator/assembler.py": ["/tomasulo_simulator/__init__.py"], "/tomasulo_simulator/reservation_station/alu_reservation_station.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/instruction/control_flow_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/instruction/instruction.py": ["/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/execution_trace.py"], "/tomasulo_simulator/parser/parser.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/instruction/bitwise_instructions.py", "/tomasulo_simulator/instruction/floating_instructions.py", "/tomasulo_simulator/instruction/logic_instructions.py"], "/tomasulo_simulator/reservation_station/mem_reservation_station.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/cdb.py": ["/tomasulo_simulator/execution_trace.py", "/tomasulo_simulator/log_utils.py"], "/tomasulo_simulator/instruction/__init__.py": ["/tomasulo_simulator/instruction/instruction.py", "/tomasulo_simulator/instruction/alu_instructions.py", "/tomasulo_simulator/instruction/bitwise_instructions.py", "/tomasulo_simulator/instruction/control_flow_instructions.py", "/tomasulo_simulator/instruction/floating_instructions.py", "/tomasulo_simulator/instruction/label.py", "/tomasulo_simulator/instruction/logic_instructions.py", "/tomasulo_simulator/instruction/mem_instructions.py"], "/tomasulo_simulator/instruction/mem_instructions.py": ["/tomasulo_simulator/instruction/instruction.py"], "/tomasulo_simulator/instruction/logic_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/registerfile.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/log_utils.py", "/tomasulo_simulator/reservation_station/__init__.py"], "/tomasulo_simulator/reservation_station/reservation_station.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/log_utils.py"], "/tomasulo_simulator/instruction/floating_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/__init__.py": ["/tomasulo_simulator/assembler.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/cpu.py"], "/tomasulo_simulator/memory.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/log_utils.py"]} |
43,478 | fcremo/tomasulo-simulator | refs/heads/master | /tomasulo_simulator/cpu.py | from copy import deepcopy
import simpy
from tomasulo_simulator import execution_trace as etrace
from tomasulo_simulator.cpu_config import CpuConfig
from tomasulo_simulator.cdb import CDB
from tomasulo_simulator.functional_unit import AluFU, MemFU
from tomasulo_simulator.instruction import (
HaltInstruction, BreakpointInstruction,
ControlFlowInstruction, MemInstruction,
AluInstruction, FloatingInstruction
)
from tomasulo_simulator.log_utils import get_logger
from tomasulo_simulator.memory import Memory
from tomasulo_simulator.registerfile import RegisterFile
from tomasulo_simulator.reservation_station import ALUReservationStation, MemReservationStation
class CPU:
def __init__(self, env: simpy.Environment, instructions, config: CpuConfig, breakpoint_handler=None):
self._instructions = instructions
self.config = config
self.memory = Memory(env, config)
self.reg_file = RegisterFile(env, self, config.gp_registers, config.fp_registers)
# Common data bus
self.CDB = CDB(env, config.cdb_width)
# TODO: distinguish ALU from FPALU
self.alu_FU = simpy.Store(env)
[self.alu_FU.put(AluFU()) for _ in range(config.alu_fu)]
self.fpalu_FU = simpy.Store(env)
[self.fpalu_FU.put(AluFU()) for _ in range(config.fpalu_fu)]
self.mem_FU = simpy.Store(env)
[self.mem_FU.put(MemFU()) for _ in range(config.mem_fu)]
self.alu_RS = simpy.Store(env)
[self.alu_RS.put(ALUReservationStation(env, self, self.alu_FU, self.alu_RS)) for _ in range(config.alu_rs)]
self.fpalu_RS = simpy.Store(env)
[self.fpalu_RS.put(ALUReservationStation(env, self, self.fpalu_FU, self.fpalu_RS)) for _ in range(config.fpalu_rs)]
self.mem_RS = simpy.Store(env)
[self.mem_RS.put(MemReservationStation(env, self, self.mem_FU, self.mem_RS)) for _ in range(config.mem_rs)]
# This list will hold the executed instructions
self.executed_instructions = []
self.env = env
if breakpoint_handler is None:
self.breakpoint_handler = self._default_breakpoint_handler
else:
self.breakpoint_handler = breakpoint_handler
self._log = get_logger(env, "CPU")
def _dispatch(self):
while True:
self._log("Fetching instruction at PC {}", self.reg_file["PC"])
yield self.env.timeout(self.config.fetch_latency)
# TODO: find a better way to log execution traces without deepcopying the instruction object
# Right now the object is copied so stats can be saved directly into it, and it works even for
# tight loops where the same instruction may be executing many times simultaneously
try:
next_instruction = deepcopy(self._instructions[self.reg_file["PC"]])
except IndexError:
self._log("WARNING: PC {} is out of range", self.reg_file["PC"])
self._log("Use HLT instructions")
return
self._log("Fetched {}", next_instruction)
self.reg_file["PC"] += 1
if isinstance(next_instruction, HaltInstruction):
return
if isinstance(next_instruction, BreakpointInstruction):
if next_instruction.handler is not None:
next_instruction.handler(self)
self.breakpoint_handler(self)
continue
# Get an appropriate reservation station
rs = yield self.env.process(self.get_reservation_station(next_instruction))
# Loads and stores also need a spot in the memory access queue
if isinstance(rs, MemReservationStation):
yield self.env.process(self.memory.enqueue_memory_access(rs, next_instruction))
# Issue the instruction to the reservation station
self._log("Issuing {} to {}", next_instruction, rs)
self.env.process(rs.issue(next_instruction))
# TODO: log fetch stall (as conflict)
# TODO: implement speculative execution
if isinstance(next_instruction, ControlFlowInstruction):
self._log("Stalling fetches until the new PC is available")
self.reg_file.values["PC"] = yield self.CDB.snoop(rs)
def get_reservation_station(self, instruction):
if isinstance(instruction, AluInstruction):
if isinstance(instruction, FloatingInstruction):
rs_store = self.fpalu_RS
else:
rs_store = self.alu_RS
elif isinstance(instruction, MemInstruction):
rs_store = self.mem_RS
else:
raise Exception("Unrecognized instruction: {}".format(instruction))
# FIXME
# this code sucks a bit, but it's necessary to detect
# structural hazards immediately to print them in order.
# Checking if rs_store.items is empty does not work, as
# an RS may be be relased right after we yield so no conflict is happening
# The idea is to check if the request is granted in zero time,
# but any_of is not guaranteed to yield both the resource request and the timeout
# at the same time (as in in a single call), even if they will trigger at the same
# simulation time.
# Possible definitive solutions:
# 1) ignore the problem and not print conflicts in real time
# 2) use a 0.1 clock-cycles timeout (so the resource request will be granted first if any is available)
store_req_start = self.env.now
rs_req = rs_store.get()
res = yield self.env.any_of([rs_req, self.env.timeout(0)])
if rs_req in res:
hazard = False
obtained_rs = rs_req.value
else:
self._log("Structural hazard: no RS available for {}", instruction)
hazard = True
obtained_rs = yield rs_req
if hazard:
self._log("Structural hazard solved: obtained {} for {}", obtained_rs, instruction)
instruction.stats.hazards.append(etrace.RSUnavailableHazard(store_req_start, self.env.now, obtained_rs))
return obtained_rs
def run(self):
self._log("Starting instruction dispatch")
yield self.env.process(self._dispatch())
self._log("Stopped instruction dispatch")
def dump_memory(self):
for addr in range(0, len(self.memory._memory), 4):
contents = ["0x{:0>2x}".format(m) for m in self.memory._memory[addr:addr + 4]]
print("0x{:x}: ".format(addr).ljust(6) + " ".join(contents))
@staticmethod
def _default_breakpoint_handler(cpu):
cpu._log("Breakpoint hit, registers: {}", cpu.reg_file)
def __repr__(self):
return "CLK {:>3n} | CPU Registers: {}".format(self.env.now, self.reg_file)
| {"/tomasulo_simulator/execution_trace.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/instruction/__init__.py"], "/tomasulo_simulator/cpu.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/cdb.py", "/tomasulo_simulator/functional_unit.py", "/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/log_utils.py", "/tomasulo_simulator/memory.py", "/tomasulo_simulator/registerfile.py", "/tomasulo_simulator/reservation_station/__init__.py"], "/tomasulo_simulator/reservation_station/__init__.py": ["/tomasulo_simulator/reservation_station/alu_reservation_station.py", "/tomasulo_simulator/reservation_station/mem_reservation_station.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/instruction/alu_instructions.py": ["/tomasulo_simulator/instruction/instruction.py"], "/tomasulo_simulator/instruction/bitwise_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/simulation.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/execution_trace.py"], "/tomasulo_simulator/assembler.py": ["/tomasulo_simulator/__init__.py"], "/tomasulo_simulator/reservation_station/alu_reservation_station.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/instruction/control_flow_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/instruction/instruction.py": ["/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/execution_trace.py"], "/tomasulo_simulator/parser/parser.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/instruction/bitwise_instructions.py", "/tomasulo_simulator/instruction/floating_instructions.py", "/tomasulo_simulator/instruction/logic_instructions.py"], "/tomasulo_simulator/reservation_station/mem_reservation_station.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/cdb.py": ["/tomasulo_simulator/execution_trace.py", "/tomasulo_simulator/log_utils.py"], "/tomasulo_simulator/instruction/__init__.py": ["/tomasulo_simulator/instruction/instruction.py", "/tomasulo_simulator/instruction/alu_instructions.py", "/tomasulo_simulator/instruction/bitwise_instructions.py", "/tomasulo_simulator/instruction/control_flow_instructions.py", "/tomasulo_simulator/instruction/floating_instructions.py", "/tomasulo_simulator/instruction/label.py", "/tomasulo_simulator/instruction/logic_instructions.py", "/tomasulo_simulator/instruction/mem_instructions.py"], "/tomasulo_simulator/instruction/mem_instructions.py": ["/tomasulo_simulator/instruction/instruction.py"], "/tomasulo_simulator/instruction/logic_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/registerfile.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/log_utils.py", "/tomasulo_simulator/reservation_station/__init__.py"], "/tomasulo_simulator/reservation_station/reservation_station.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/log_utils.py"], "/tomasulo_simulator/instruction/floating_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/__init__.py": ["/tomasulo_simulator/assembler.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/cpu.py"], "/tomasulo_simulator/memory.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/log_utils.py"]} |
43,479 | fcremo/tomasulo-simulator | refs/heads/master | /tomasulo_simulator/reservation_station/__init__.py | from .alu_reservation_station import ALUReservationStation
from .mem_reservation_station import MemReservationStation
from .reservation_station import ReservationStation
| {"/tomasulo_simulator/execution_trace.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/instruction/__init__.py"], "/tomasulo_simulator/cpu.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/cdb.py", "/tomasulo_simulator/functional_unit.py", "/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/log_utils.py", "/tomasulo_simulator/memory.py", "/tomasulo_simulator/registerfile.py", "/tomasulo_simulator/reservation_station/__init__.py"], "/tomasulo_simulator/reservation_station/__init__.py": ["/tomasulo_simulator/reservation_station/alu_reservation_station.py", "/tomasulo_simulator/reservation_station/mem_reservation_station.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/instruction/alu_instructions.py": ["/tomasulo_simulator/instruction/instruction.py"], "/tomasulo_simulator/instruction/bitwise_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/simulation.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/execution_trace.py"], "/tomasulo_simulator/assembler.py": ["/tomasulo_simulator/__init__.py"], "/tomasulo_simulator/reservation_station/alu_reservation_station.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/instruction/control_flow_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/instruction/instruction.py": ["/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/execution_trace.py"], "/tomasulo_simulator/parser/parser.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/instruction/bitwise_instructions.py", "/tomasulo_simulator/instruction/floating_instructions.py", "/tomasulo_simulator/instruction/logic_instructions.py"], "/tomasulo_simulator/reservation_station/mem_reservation_station.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/cdb.py": ["/tomasulo_simulator/execution_trace.py", "/tomasulo_simulator/log_utils.py"], "/tomasulo_simulator/instruction/__init__.py": ["/tomasulo_simulator/instruction/instruction.py", "/tomasulo_simulator/instruction/alu_instructions.py", "/tomasulo_simulator/instruction/bitwise_instructions.py", "/tomasulo_simulator/instruction/control_flow_instructions.py", "/tomasulo_simulator/instruction/floating_instructions.py", "/tomasulo_simulator/instruction/label.py", "/tomasulo_simulator/instruction/logic_instructions.py", "/tomasulo_simulator/instruction/mem_instructions.py"], "/tomasulo_simulator/instruction/mem_instructions.py": ["/tomasulo_simulator/instruction/instruction.py"], "/tomasulo_simulator/instruction/logic_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/registerfile.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/log_utils.py", "/tomasulo_simulator/reservation_station/__init__.py"], "/tomasulo_simulator/reservation_station/reservation_station.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/log_utils.py"], "/tomasulo_simulator/instruction/floating_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/__init__.py": ["/tomasulo_simulator/assembler.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/cpu.py"], "/tomasulo_simulator/memory.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/log_utils.py"]} |
43,480 | fcremo/tomasulo-simulator | refs/heads/master | /tomasulo_simulator/instruction/alu_instructions.py | from abc import ABC, abstractmethod
from .instruction import Instruction
class AluInstruction(Instruction, ABC):
def __init__(self, dst_reg):
super().__init__()
self.dst_reg = dst_reg
@property
@abstractmethod
def mnemonic(self):
raise NotImplementedError()
| {"/tomasulo_simulator/execution_trace.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/instruction/__init__.py"], "/tomasulo_simulator/cpu.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/cdb.py", "/tomasulo_simulator/functional_unit.py", "/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/log_utils.py", "/tomasulo_simulator/memory.py", "/tomasulo_simulator/registerfile.py", "/tomasulo_simulator/reservation_station/__init__.py"], "/tomasulo_simulator/reservation_station/__init__.py": ["/tomasulo_simulator/reservation_station/alu_reservation_station.py", "/tomasulo_simulator/reservation_station/mem_reservation_station.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/instruction/alu_instructions.py": ["/tomasulo_simulator/instruction/instruction.py"], "/tomasulo_simulator/instruction/bitwise_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/simulation.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/execution_trace.py"], "/tomasulo_simulator/assembler.py": ["/tomasulo_simulator/__init__.py"], "/tomasulo_simulator/reservation_station/alu_reservation_station.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/instruction/control_flow_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/instruction/instruction.py": ["/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/execution_trace.py"], "/tomasulo_simulator/parser/parser.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/instruction/bitwise_instructions.py", "/tomasulo_simulator/instruction/floating_instructions.py", "/tomasulo_simulator/instruction/logic_instructions.py"], "/tomasulo_simulator/reservation_station/mem_reservation_station.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/cdb.py": ["/tomasulo_simulator/execution_trace.py", "/tomasulo_simulator/log_utils.py"], "/tomasulo_simulator/instruction/__init__.py": ["/tomasulo_simulator/instruction/instruction.py", "/tomasulo_simulator/instruction/alu_instructions.py", "/tomasulo_simulator/instruction/bitwise_instructions.py", "/tomasulo_simulator/instruction/control_flow_instructions.py", "/tomasulo_simulator/instruction/floating_instructions.py", "/tomasulo_simulator/instruction/label.py", "/tomasulo_simulator/instruction/logic_instructions.py", "/tomasulo_simulator/instruction/mem_instructions.py"], "/tomasulo_simulator/instruction/mem_instructions.py": ["/tomasulo_simulator/instruction/instruction.py"], "/tomasulo_simulator/instruction/logic_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/registerfile.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/log_utils.py", "/tomasulo_simulator/reservation_station/__init__.py"], "/tomasulo_simulator/reservation_station/reservation_station.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/log_utils.py"], "/tomasulo_simulator/instruction/floating_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/__init__.py": ["/tomasulo_simulator/assembler.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/cpu.py"], "/tomasulo_simulator/memory.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/log_utils.py"]} |
43,481 | fcremo/tomasulo-simulator | refs/heads/master | /tomasulo_simulator/instruction/bitwise_instructions.py | from abc import ABC, abstractmethod
from .alu_instructions import AluInstruction
class BitwiseInstruction(AluInstruction, ABC):
def __init__(self, dst_reg, op1, op2):
super().__init__(dst_reg)
self.OP1 = op1
self.OP2 = op2
@property
def operands_str(self):
return "{}, {}, {}".format(self.dst_reg, self.OP1, self.OP2)
@staticmethod
@abstractmethod
def result(op1, op2):
raise NotImplementedError()
@property
@abstractmethod
def mnemonic(self):
raise NotImplementedError()
class AndInstruction(BitwiseInstruction):
mnemonic = "AND"
@staticmethod
def result(op1, op2):
return (op1 & op2) & 0xff
class OrInstruction(BitwiseInstruction):
mnemonic = "OR"
@staticmethod
def result(op1, op2):
return (op1 | op2) & 0xff
| {"/tomasulo_simulator/execution_trace.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/instruction/__init__.py"], "/tomasulo_simulator/cpu.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/cdb.py", "/tomasulo_simulator/functional_unit.py", "/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/log_utils.py", "/tomasulo_simulator/memory.py", "/tomasulo_simulator/registerfile.py", "/tomasulo_simulator/reservation_station/__init__.py"], "/tomasulo_simulator/reservation_station/__init__.py": ["/tomasulo_simulator/reservation_station/alu_reservation_station.py", "/tomasulo_simulator/reservation_station/mem_reservation_station.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/instruction/alu_instructions.py": ["/tomasulo_simulator/instruction/instruction.py"], "/tomasulo_simulator/instruction/bitwise_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/simulation.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/execution_trace.py"], "/tomasulo_simulator/assembler.py": ["/tomasulo_simulator/__init__.py"], "/tomasulo_simulator/reservation_station/alu_reservation_station.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/instruction/control_flow_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/instruction/instruction.py": ["/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/execution_trace.py"], "/tomasulo_simulator/parser/parser.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/instruction/bitwise_instructions.py", "/tomasulo_simulator/instruction/floating_instructions.py", "/tomasulo_simulator/instruction/logic_instructions.py"], "/tomasulo_simulator/reservation_station/mem_reservation_station.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/cdb.py": ["/tomasulo_simulator/execution_trace.py", "/tomasulo_simulator/log_utils.py"], "/tomasulo_simulator/instruction/__init__.py": ["/tomasulo_simulator/instruction/instruction.py", "/tomasulo_simulator/instruction/alu_instructions.py", "/tomasulo_simulator/instruction/bitwise_instructions.py", "/tomasulo_simulator/instruction/control_flow_instructions.py", "/tomasulo_simulator/instruction/floating_instructions.py", "/tomasulo_simulator/instruction/label.py", "/tomasulo_simulator/instruction/logic_instructions.py", "/tomasulo_simulator/instruction/mem_instructions.py"], "/tomasulo_simulator/instruction/mem_instructions.py": ["/tomasulo_simulator/instruction/instruction.py"], "/tomasulo_simulator/instruction/logic_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/registerfile.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/log_utils.py", "/tomasulo_simulator/reservation_station/__init__.py"], "/tomasulo_simulator/reservation_station/reservation_station.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/log_utils.py"], "/tomasulo_simulator/instruction/floating_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/__init__.py": ["/tomasulo_simulator/assembler.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/cpu.py"], "/tomasulo_simulator/memory.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/log_utils.py"]} |
43,482 | fcremo/tomasulo-simulator | refs/heads/master | /simulation.py | #!/usr/bin/env python3
import argparse
import json
import time
import IPython
import pandas as pd
import simpy
from tomasulo_simulator import Parser
from tomasulo_simulator import assemble
from tomasulo_simulator import CpuConfig
from tomasulo_simulator import CPU
from tomasulo_simulator.execution_trace import ExecutionTraceSerializer
def main():
print_figlet()
with open(args.program) as f:
program = f.read()
directives, code = Parser().parse_code(program)
instructions = assemble(code)
config = CpuConfig(directives)
if args.dump_assembled_instructions:
print("Assembled instructions:")
print(dump_instructions(instructions))
env = simpy.Environment()
# cpu = CPU(env, code, breakpoint_handler=spawn_ipython_handler)
# cpu = CPU(env, code, breakpoint_handler=lambda cpu: print(cpu))
cpu = CPU(env, instructions, config)
run_simulation(env, cpu)
collect_statistics(cpu)
def print_figlet():
if not args.quiet:
from pyfiglet import Figlet
f = Figlet(font="nancyj-improved")
print(f.renderText("Tomasulo simulator"))
def run_simulation(env, cpu):
env.process(cpu.run())
if args.step_by_step:
while True:
try:
env.step()
except simpy.core.EmptySchedule:
break
finally:
print(cpu)
if args.interactive:
try:
spawn_ipython_handler(cpu)
except Exception as e:
print(e)
else:
env.run()
print(cpu)
def collect_statistics(cpu):
stats = [i.stats.to_dict() for i in cpu.executed_instructions]
if not args.no_stats:
print_stats(stats)
if args.output:
filename = "./outputs/out_{:.0f}.json".format(time.time())
with open(filename, "x") as f:
json.dump(stats, f, cls=ExecutionTraceSerializer)
def print_stats(stats):
columns = ["Instruction", "Issue", "Start exec.", "Write res.", "Written res.", "Hazards", "RS", "FU"]
col_order = ["instruction", "issued", "start_execution", "write_result", "written_result", "hazards", "rs", "fu"]
df = pd.DataFrame(stats).sort_values(by="issued")[col_order]
pd.set_option('display.max_colwidth', -1)
formatters = {
"hazards": lambda hazards: " ".join([hazard.__repr__() for hazard in hazards])
}
print("\n")
print(df.to_string(header=columns, justify="end", formatters=formatters))
# TODO: print ClockPerInstruction/InstructionsPerClock
def spawn_ipython_handler(cpu):
header = "The cpu variable contains a reference to the CPU instance.\nUse 'quit' to exit."
IPython.embed(header=header)
def dump_instructions(instructions):
return "\n".join([str(i) for i in instructions]) + "\n"
argparser = argparse.ArgumentParser(description="Tomasulo algorithm simulator")
argparser.add_argument("program", help="the assembly file to execute")
argparser.add_argument("--output", "-o", help="Output statistics to json")
argparser.add_argument("--no-stats", "-n", help="Don't output statistics on STDOUT", action="store_true")
argparser.add_argument("--interactive", "-i", help="Spawn IPython shell during the simulation", action="store_true")
argparser.add_argument("--step-by-step", "-s", help="Execute the simulation step by step", action="store_true")
argparser.add_argument("--dump-assembled-instructions", "-d", help="Print the assembled instructions", action="store_true")
argparser.add_argument("--quiet", "-q", help="Don't print the program logo", action="store_true")
if __name__ == "__main__":
args = argparser.parse_args()
main()
| {"/tomasulo_simulator/execution_trace.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/instruction/__init__.py"], "/tomasulo_simulator/cpu.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/cdb.py", "/tomasulo_simulator/functional_unit.py", "/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/log_utils.py", "/tomasulo_simulator/memory.py", "/tomasulo_simulator/registerfile.py", "/tomasulo_simulator/reservation_station/__init__.py"], "/tomasulo_simulator/reservation_station/__init__.py": ["/tomasulo_simulator/reservation_station/alu_reservation_station.py", "/tomasulo_simulator/reservation_station/mem_reservation_station.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/instruction/alu_instructions.py": ["/tomasulo_simulator/instruction/instruction.py"], "/tomasulo_simulator/instruction/bitwise_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/simulation.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/execution_trace.py"], "/tomasulo_simulator/assembler.py": ["/tomasulo_simulator/__init__.py"], "/tomasulo_simulator/reservation_station/alu_reservation_station.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/instruction/control_flow_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/instruction/instruction.py": ["/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/execution_trace.py"], "/tomasulo_simulator/parser/parser.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/instruction/bitwise_instructions.py", "/tomasulo_simulator/instruction/floating_instructions.py", "/tomasulo_simulator/instruction/logic_instructions.py"], "/tomasulo_simulator/reservation_station/mem_reservation_station.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/cdb.py": ["/tomasulo_simulator/execution_trace.py", "/tomasulo_simulator/log_utils.py"], "/tomasulo_simulator/instruction/__init__.py": ["/tomasulo_simulator/instruction/instruction.py", "/tomasulo_simulator/instruction/alu_instructions.py", "/tomasulo_simulator/instruction/bitwise_instructions.py", "/tomasulo_simulator/instruction/control_flow_instructions.py", "/tomasulo_simulator/instruction/floating_instructions.py", "/tomasulo_simulator/instruction/label.py", "/tomasulo_simulator/instruction/logic_instructions.py", "/tomasulo_simulator/instruction/mem_instructions.py"], "/tomasulo_simulator/instruction/mem_instructions.py": ["/tomasulo_simulator/instruction/instruction.py"], "/tomasulo_simulator/instruction/logic_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/registerfile.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/log_utils.py", "/tomasulo_simulator/reservation_station/__init__.py"], "/tomasulo_simulator/reservation_station/reservation_station.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/log_utils.py"], "/tomasulo_simulator/instruction/floating_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/__init__.py": ["/tomasulo_simulator/assembler.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/cpu.py"], "/tomasulo_simulator/memory.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/log_utils.py"]} |
43,483 | fcremo/tomasulo-simulator | refs/heads/master | /tomasulo_simulator/assembler.py | from tomasulo_simulator import instruction
class Assembler:
def __init__(self):
self.code = []
self.labels = {}
def get_assembled_code(self):
assembled = []
for i in self.code:
if isinstance(i, instruction.ControlFlowInstruction):
try:
i.address = self.labels[i.label]
except KeyError:
raise RuntimeError("Label {} is not declared".format(i.label))
assembled.append(i)
return assembled
def set_code(self, code):
for c in code:
self.__add__(c)
def __add__(self, code):
if isinstance(code, instruction.Label):
self.labels[code.id] = len(self.code)
elif isinstance(code, instruction.Instruction):
self.code.append(code)
return self
def assemble(instructions):
assembler = Assembler()
assembler.set_code(instructions)
return assembler.get_assembled_code()
| {"/tomasulo_simulator/execution_trace.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/instruction/__init__.py"], "/tomasulo_simulator/cpu.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/cdb.py", "/tomasulo_simulator/functional_unit.py", "/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/log_utils.py", "/tomasulo_simulator/memory.py", "/tomasulo_simulator/registerfile.py", "/tomasulo_simulator/reservation_station/__init__.py"], "/tomasulo_simulator/reservation_station/__init__.py": ["/tomasulo_simulator/reservation_station/alu_reservation_station.py", "/tomasulo_simulator/reservation_station/mem_reservation_station.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/instruction/alu_instructions.py": ["/tomasulo_simulator/instruction/instruction.py"], "/tomasulo_simulator/instruction/bitwise_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/simulation.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/execution_trace.py"], "/tomasulo_simulator/assembler.py": ["/tomasulo_simulator/__init__.py"], "/tomasulo_simulator/reservation_station/alu_reservation_station.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/instruction/control_flow_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/instruction/instruction.py": ["/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/execution_trace.py"], "/tomasulo_simulator/parser/parser.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/instruction/bitwise_instructions.py", "/tomasulo_simulator/instruction/floating_instructions.py", "/tomasulo_simulator/instruction/logic_instructions.py"], "/tomasulo_simulator/reservation_station/mem_reservation_station.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/cdb.py": ["/tomasulo_simulator/execution_trace.py", "/tomasulo_simulator/log_utils.py"], "/tomasulo_simulator/instruction/__init__.py": ["/tomasulo_simulator/instruction/instruction.py", "/tomasulo_simulator/instruction/alu_instructions.py", "/tomasulo_simulator/instruction/bitwise_instructions.py", "/tomasulo_simulator/instruction/control_flow_instructions.py", "/tomasulo_simulator/instruction/floating_instructions.py", "/tomasulo_simulator/instruction/label.py", "/tomasulo_simulator/instruction/logic_instructions.py", "/tomasulo_simulator/instruction/mem_instructions.py"], "/tomasulo_simulator/instruction/mem_instructions.py": ["/tomasulo_simulator/instruction/instruction.py"], "/tomasulo_simulator/instruction/logic_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/registerfile.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/log_utils.py", "/tomasulo_simulator/reservation_station/__init__.py"], "/tomasulo_simulator/reservation_station/reservation_station.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/log_utils.py"], "/tomasulo_simulator/instruction/floating_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/__init__.py": ["/tomasulo_simulator/assembler.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/cpu.py"], "/tomasulo_simulator/memory.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/log_utils.py"]} |
43,484 | fcremo/tomasulo-simulator | refs/heads/master | /tomasulo_simulator/reservation_station/alu_reservation_station.py | from ..instruction import AluInstruction
from ..instruction import BitwiseInstruction
from ..instruction import BranchInstruction
from ..instruction import ControlFlowInstruction
from ..instruction import FloatingInstruction
from ..instruction import IntegerInstruction
from ..instruction import JumpInstruction
from ..instruction import LogicInstruction
from .reservation_station import ReservationStation
class ALUReservationStation(ReservationStation):
kind = "AluRS"
incremental_id = 1
def __init__(self, env, cpu, fu_store, rs_store):
super().__init__(env, cpu, fu_store, rs_store)
self.OP1_read = None
self.OP2_read = None
self.OP1_val = None
self.OP2_val = None
def _decode_operands(self):
if isinstance(self.instruction, (IntegerInstruction, LogicInstruction, BitwiseInstruction,
BranchInstruction, FloatingInstruction)):
self.OP1_read = self._read_operand(self.instruction.OP1)
self.OP2_read = self._read_operand(self.instruction.OP2)
# Associate destination register with this reservation station in the RF
if isinstance(self.instruction, (IntegerInstruction, LogicInstruction, BitwiseInstruction, FloatingInstruction)):
self.cpu.reg_file.associate_rs_with_reg(self, self.instruction.dst_reg)
def _wait_for_dependencies(self):
dependencies = []
if self.OP1_read is not None:
dependencies.append(self.OP1_read)
if self.OP2_read is not None:
dependencies.append(self.OP2_read)
fu_request = self.env.process(self._get_functional_unit())
dependencies.append(fu_request)
# Wait for dependencies to be ready
results = yield self.env.all_of(dependencies)
if self.OP1_read in results:
self.OP1_val = results[self.OP1_read]
if self.OP2_read in results:
self.OP2_val = results[self.OP2_read]
self.FU = results[fu_request]
def _execute(self):
if isinstance(self.instruction, ControlFlowInstruction):
yield self.env.process(self._execute_control_flow_instruction())
elif isinstance(self.instruction, AluInstruction):
yield self.env.process(self._execute_alu_instruction())
else:
raise ValueError("Instruction type unsupported: {}".format(type(self.instruction)))
def _execute_alu_instruction(self):
yield self.env.timeout(self._execution_latency())
self.result = self.instruction.result(self.OP1_val, self.OP2_val)
def _execute_control_flow_instruction(self):
if isinstance(self.instruction, JumpInstruction):
yield self.env.process(self._execute_jump_instruction())
elif isinstance(self.instruction, BranchInstruction):
yield self.env.process(self._execute_branch_instruction())
else:
raise NotImplementedError()
def _execute_jump_instruction(self):
# TODO: support indirect jumps like JMP R1
yield self.env.timeout(self._execution_latency())
self.result = self.instruction.address
def _execute_branch_instruction(self):
yield self.env.timeout(self._execution_latency())
self.result = self.instruction.result(self.OP1_val, self.OP2_val, self.cpu.reg_file["PC"])
if self.result != self.cpu.reg_file["PC"]:
self._log("{} TAKEN, new PC is {}", self.instruction, self.result)
else:
self._log("{} NOT TAKEN", self.instruction)
def _reset(self):
super()._reset()
self.OP1_read = None
self.OP2_read = None
self.OP1_val = None
self.OP2_val = None
| {"/tomasulo_simulator/execution_trace.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/instruction/__init__.py"], "/tomasulo_simulator/cpu.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/cdb.py", "/tomasulo_simulator/functional_unit.py", "/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/log_utils.py", "/tomasulo_simulator/memory.py", "/tomasulo_simulator/registerfile.py", "/tomasulo_simulator/reservation_station/__init__.py"], "/tomasulo_simulator/reservation_station/__init__.py": ["/tomasulo_simulator/reservation_station/alu_reservation_station.py", "/tomasulo_simulator/reservation_station/mem_reservation_station.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/instruction/alu_instructions.py": ["/tomasulo_simulator/instruction/instruction.py"], "/tomasulo_simulator/instruction/bitwise_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/simulation.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/execution_trace.py"], "/tomasulo_simulator/assembler.py": ["/tomasulo_simulator/__init__.py"], "/tomasulo_simulator/reservation_station/alu_reservation_station.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/instruction/control_flow_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/instruction/instruction.py": ["/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/execution_trace.py"], "/tomasulo_simulator/parser/parser.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/instruction/bitwise_instructions.py", "/tomasulo_simulator/instruction/floating_instructions.py", "/tomasulo_simulator/instruction/logic_instructions.py"], "/tomasulo_simulator/reservation_station/mem_reservation_station.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/cdb.py": ["/tomasulo_simulator/execution_trace.py", "/tomasulo_simulator/log_utils.py"], "/tomasulo_simulator/instruction/__init__.py": ["/tomasulo_simulator/instruction/instruction.py", "/tomasulo_simulator/instruction/alu_instructions.py", "/tomasulo_simulator/instruction/bitwise_instructions.py", "/tomasulo_simulator/instruction/control_flow_instructions.py", "/tomasulo_simulator/instruction/floating_instructions.py", "/tomasulo_simulator/instruction/label.py", "/tomasulo_simulator/instruction/logic_instructions.py", "/tomasulo_simulator/instruction/mem_instructions.py"], "/tomasulo_simulator/instruction/mem_instructions.py": ["/tomasulo_simulator/instruction/instruction.py"], "/tomasulo_simulator/instruction/logic_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/registerfile.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/log_utils.py", "/tomasulo_simulator/reservation_station/__init__.py"], "/tomasulo_simulator/reservation_station/reservation_station.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/log_utils.py"], "/tomasulo_simulator/instruction/floating_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/__init__.py": ["/tomasulo_simulator/assembler.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/cpu.py"], "/tomasulo_simulator/memory.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/log_utils.py"]} |
43,485 | fcremo/tomasulo-simulator | refs/heads/master | /tomasulo_simulator/instruction/control_flow_instructions.py | from abc import ABC, abstractmethod
from .alu_instructions import AluInstruction
class ControlFlowInstruction(AluInstruction, ABC):
def __init__(self, label=None, address=None):
super().__init__("PC")
self.label = label
self.address = address
@property
@abstractmethod
def mnemonic(self):
raise NotImplementedError()
class JumpInstruction(ControlFlowInstruction):
mnemonic = "JMP"
@property
def operands_str(self):
return "{}({})".format(self.label, self.address)
class BranchInstruction(ControlFlowInstruction, ABC):
def __init__(self, op1, op2, label=None, address=None):
super().__init__(label, address)
self.OP1 = op1
self.OP2 = op2
@staticmethod
@abstractmethod
def result(op1, op2, current_pc):
raise NotImplementedError()
@property
def operands_str(self):
return "{}, {}, {}({})".format(self.OP1, self.OP2, self.label, self.address)
@property
@abstractmethod
def mnemonic(self):
raise NotImplementedError()
class BEQInstruction(BranchInstruction):
mnemonic = "BEQ"
def result(self, op1, op2, current_pc):
if op1 == op2:
return self.address
else:
return current_pc
class BNEInstruction(BranchInstruction):
mnemonic = "BNE"
def result(self, op1, op2, current_pc):
if op1 != op2:
return self.address
else:
return current_pc
class BLTInstruction(BranchInstruction):
mnemonic = "BLT"
def result(self, op1, op2, current_pc):
if op1 < op2:
return self.address
else:
return current_pc
class BLEInstruction(BranchInstruction):
mnemonic = "BLE"
def result(self, op1, op2, current_pc):
if op1 <= op2:
return self.address
else:
return current_pc
class BGTInstruction(BranchInstruction):
mnemonic = "BGT"
def result(self, op1, op2, current_pc):
if op1 > op2:
return self.address
else:
return current_pc
class BGEInstruction(BranchInstruction):
mnemonic = "BGE"
def result(self, op1, op2, current_pc):
if op1 >= op2:
return self.address
else:
return current_pc
| {"/tomasulo_simulator/execution_trace.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/instruction/__init__.py"], "/tomasulo_simulator/cpu.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/cdb.py", "/tomasulo_simulator/functional_unit.py", "/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/log_utils.py", "/tomasulo_simulator/memory.py", "/tomasulo_simulator/registerfile.py", "/tomasulo_simulator/reservation_station/__init__.py"], "/tomasulo_simulator/reservation_station/__init__.py": ["/tomasulo_simulator/reservation_station/alu_reservation_station.py", "/tomasulo_simulator/reservation_station/mem_reservation_station.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/instruction/alu_instructions.py": ["/tomasulo_simulator/instruction/instruction.py"], "/tomasulo_simulator/instruction/bitwise_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/simulation.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/execution_trace.py"], "/tomasulo_simulator/assembler.py": ["/tomasulo_simulator/__init__.py"], "/tomasulo_simulator/reservation_station/alu_reservation_station.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/instruction/control_flow_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/instruction/instruction.py": ["/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/execution_trace.py"], "/tomasulo_simulator/parser/parser.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/instruction/bitwise_instructions.py", "/tomasulo_simulator/instruction/floating_instructions.py", "/tomasulo_simulator/instruction/logic_instructions.py"], "/tomasulo_simulator/reservation_station/mem_reservation_station.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/cdb.py": ["/tomasulo_simulator/execution_trace.py", "/tomasulo_simulator/log_utils.py"], "/tomasulo_simulator/instruction/__init__.py": ["/tomasulo_simulator/instruction/instruction.py", "/tomasulo_simulator/instruction/alu_instructions.py", "/tomasulo_simulator/instruction/bitwise_instructions.py", "/tomasulo_simulator/instruction/control_flow_instructions.py", "/tomasulo_simulator/instruction/floating_instructions.py", "/tomasulo_simulator/instruction/label.py", "/tomasulo_simulator/instruction/logic_instructions.py", "/tomasulo_simulator/instruction/mem_instructions.py"], "/tomasulo_simulator/instruction/mem_instructions.py": ["/tomasulo_simulator/instruction/instruction.py"], "/tomasulo_simulator/instruction/logic_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/registerfile.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/log_utils.py", "/tomasulo_simulator/reservation_station/__init__.py"], "/tomasulo_simulator/reservation_station/reservation_station.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/log_utils.py"], "/tomasulo_simulator/instruction/floating_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/__init__.py": ["/tomasulo_simulator/assembler.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/cpu.py"], "/tomasulo_simulator/memory.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/log_utils.py"]} |
43,486 | fcremo/tomasulo-simulator | refs/heads/master | /tomasulo_simulator/instruction/instruction.py | from abc import ABC, abstractmethod
from ..cpu_config import CpuConfig
class Instruction(ABC):
incremental_id = 1
def __init__(self):
from ..execution_trace import ExecutionTrace
self.id = Instruction.incremental_id
Instruction.incremental_id += 1
# Variables used for execution tracing
self.stats = ExecutionTrace(self)
@property
@abstractmethod
def mnemonic(self):
raise NotImplementedError()
@property
def operands_str(self):
return ""
@property
def mnemonic_full(self):
return "{} {}".format(self.mnemonic, self.operands_str)
def latency(self, config: CpuConfig):
for c in self.__class__.__mro__:
if c is Instruction:
return None
latency = config[self.mnemonic.lower() + "_execution_latency"]
if latency is not None:
return latency
return None
def __repr__(self):
return "INS #{}: {}".format(self.id, self.mnemonic_full)
def __str__(self):
return self.mnemonic_full
class HaltInstruction(Instruction):
@property
def mnemonic(self):
return "HLT"
class BreakpointInstruction(Instruction):
def __init__(self, handler=None):
super().__init__()
self.handler = handler
@property
def mnemonic(self):
return "BREAK"
| {"/tomasulo_simulator/execution_trace.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/instruction/__init__.py"], "/tomasulo_simulator/cpu.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/cdb.py", "/tomasulo_simulator/functional_unit.py", "/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/log_utils.py", "/tomasulo_simulator/memory.py", "/tomasulo_simulator/registerfile.py", "/tomasulo_simulator/reservation_station/__init__.py"], "/tomasulo_simulator/reservation_station/__init__.py": ["/tomasulo_simulator/reservation_station/alu_reservation_station.py", "/tomasulo_simulator/reservation_station/mem_reservation_station.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/instruction/alu_instructions.py": ["/tomasulo_simulator/instruction/instruction.py"], "/tomasulo_simulator/instruction/bitwise_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/simulation.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/execution_trace.py"], "/tomasulo_simulator/assembler.py": ["/tomasulo_simulator/__init__.py"], "/tomasulo_simulator/reservation_station/alu_reservation_station.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/instruction/control_flow_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/instruction/instruction.py": ["/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/execution_trace.py"], "/tomasulo_simulator/parser/parser.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/instruction/bitwise_instructions.py", "/tomasulo_simulator/instruction/floating_instructions.py", "/tomasulo_simulator/instruction/logic_instructions.py"], "/tomasulo_simulator/reservation_station/mem_reservation_station.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/cdb.py": ["/tomasulo_simulator/execution_trace.py", "/tomasulo_simulator/log_utils.py"], "/tomasulo_simulator/instruction/__init__.py": ["/tomasulo_simulator/instruction/instruction.py", "/tomasulo_simulator/instruction/alu_instructions.py", "/tomasulo_simulator/instruction/bitwise_instructions.py", "/tomasulo_simulator/instruction/control_flow_instructions.py", "/tomasulo_simulator/instruction/floating_instructions.py", "/tomasulo_simulator/instruction/label.py", "/tomasulo_simulator/instruction/logic_instructions.py", "/tomasulo_simulator/instruction/mem_instructions.py"], "/tomasulo_simulator/instruction/mem_instructions.py": ["/tomasulo_simulator/instruction/instruction.py"], "/tomasulo_simulator/instruction/logic_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/registerfile.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/log_utils.py", "/tomasulo_simulator/reservation_station/__init__.py"], "/tomasulo_simulator/reservation_station/reservation_station.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/log_utils.py"], "/tomasulo_simulator/instruction/floating_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/__init__.py": ["/tomasulo_simulator/assembler.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/cpu.py"], "/tomasulo_simulator/memory.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/log_utils.py"]} |
43,487 | fcremo/tomasulo-simulator | refs/heads/master | /tomasulo_simulator/functional_unit.py | from abc import ABC
class FunctionalUnit(ABC):
def __init__(self, id):
self.id = id
def __str__(self):
return self.id
def __repr__(self):
return self.id
class AluFU(FunctionalUnit):
id_autoincrement = 1
def __init__(self):
id = "ALU" + str(AluFU.id_autoincrement)
AluFU.id_autoincrement += 1
super().__init__(id)
class MemFU(FunctionalUnit):
id_autoincrement = 1
def __init__(self):
id = "MEM" + str(MemFU.id_autoincrement)
MemFU.id_autoincrement += 1
super().__init__(id)
| {"/tomasulo_simulator/execution_trace.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/instruction/__init__.py"], "/tomasulo_simulator/cpu.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/cdb.py", "/tomasulo_simulator/functional_unit.py", "/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/log_utils.py", "/tomasulo_simulator/memory.py", "/tomasulo_simulator/registerfile.py", "/tomasulo_simulator/reservation_station/__init__.py"], "/tomasulo_simulator/reservation_station/__init__.py": ["/tomasulo_simulator/reservation_station/alu_reservation_station.py", "/tomasulo_simulator/reservation_station/mem_reservation_station.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/instruction/alu_instructions.py": ["/tomasulo_simulator/instruction/instruction.py"], "/tomasulo_simulator/instruction/bitwise_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/simulation.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/execution_trace.py"], "/tomasulo_simulator/assembler.py": ["/tomasulo_simulator/__init__.py"], "/tomasulo_simulator/reservation_station/alu_reservation_station.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/instruction/control_flow_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/instruction/instruction.py": ["/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/execution_trace.py"], "/tomasulo_simulator/parser/parser.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/instruction/bitwise_instructions.py", "/tomasulo_simulator/instruction/floating_instructions.py", "/tomasulo_simulator/instruction/logic_instructions.py"], "/tomasulo_simulator/reservation_station/mem_reservation_station.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/cdb.py": ["/tomasulo_simulator/execution_trace.py", "/tomasulo_simulator/log_utils.py"], "/tomasulo_simulator/instruction/__init__.py": ["/tomasulo_simulator/instruction/instruction.py", "/tomasulo_simulator/instruction/alu_instructions.py", "/tomasulo_simulator/instruction/bitwise_instructions.py", "/tomasulo_simulator/instruction/control_flow_instructions.py", "/tomasulo_simulator/instruction/floating_instructions.py", "/tomasulo_simulator/instruction/label.py", "/tomasulo_simulator/instruction/logic_instructions.py", "/tomasulo_simulator/instruction/mem_instructions.py"], "/tomasulo_simulator/instruction/mem_instructions.py": ["/tomasulo_simulator/instruction/instruction.py"], "/tomasulo_simulator/instruction/logic_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/registerfile.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/log_utils.py", "/tomasulo_simulator/reservation_station/__init__.py"], "/tomasulo_simulator/reservation_station/reservation_station.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/log_utils.py"], "/tomasulo_simulator/instruction/floating_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/__init__.py": ["/tomasulo_simulator/assembler.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/cpu.py"], "/tomasulo_simulator/memory.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/log_utils.py"]} |
43,488 | fcremo/tomasulo-simulator | refs/heads/master | /tomasulo_simulator/parser/parser.py | from lark import Lark
from pkg_resources import resource_string
from ..instruction import HaltInstruction
from ..instruction import Label
from ..instruction.bitwise_instructions import AndInstruction, OrInstruction
from ..instruction import (
BEQInstruction, BNEInstruction,
BLTInstruction, BLEInstruction,
BGTInstruction, BGEInstruction,
JumpInstruction
)
from ..instruction.floating_instructions import FAddInstruction, FSubInstruction
from ..instruction.integer_instructions import AddInstruction, SubInstruction
from ..instruction.logic_instructions import AndlInstruction, OrlInstruction
from ..instruction import LoadInstruction, StoreInstruction
grammar = resource_string("tomasulo_simulator.parser", "grammar.lark").decode("utf-8")
class Parser:
def __init__(self):
self.constants = {}
def parse_code(self, string):
parser = Lark(grammar, parser="lalr")
ast = parser.parse(string)
directives = self.ast_to_directives(list(ast.find_data("directives"))[0].children)
instructions = self.ast_to_instructions(ast)
return directives, instructions
def parse_instruction(self, instruction):
if instruction.data == "alu_instruction":
operation = self.get_alu_op(instruction)
dst = self.get_dst(instruction)
op1 = self.get_op1(instruction)
op2 = self.get_op2(instruction)
if operation == "ADD":
return AddInstruction(dst, op1, op2)
elif operation == "SUB":
return SubInstruction(dst, op1, op2)
elif operation == "AND":
return AndInstruction(dst, op1, op2)
elif operation == "OR":
return OrInstruction(dst, op1, op2)
elif operation == "ANDL":
return AndlInstruction(dst, op1, op2)
elif operation == "ORL":
return OrlInstruction(dst, op1, op2)
if instruction.data == "fp_alu_instruction":
operation = self.get_fp_alu_op(instruction)
dst = self.get_fp_dst(instruction)
op1 = self.get_fp_op1(instruction)
op2 = self.get_fp_op2(instruction)
if operation == "FADD":
return FAddInstruction(dst, op1, op2)
elif operation == "FSUB":
return FSubInstruction(dst, op1, op2)
elif instruction.data == "jump_instruction":
label = self.get_label(instruction)
return JumpInstruction(label)
elif instruction.data == "branch_instruction":
branch_op = self.get_branch_op(instruction)
op1 = self.get_op1(instruction)
op2 = self.get_op2(instruction)
label = self.get_label(instruction)
if branch_op == "BEQ":
return BEQInstruction(op1, op2, label)
elif branch_op == "BNE":
return BNEInstruction(op1, op2, label)
elif branch_op == "BLT":
return BLTInstruction(op1, op2, label)
elif branch_op == "BLE":
return BLEInstruction(op1, op2, label)
elif branch_op == "BGT":
return BGTInstruction(op1, op2, label)
elif branch_op == "BGE":
return BGEInstruction(op1, op2, label)
elif instruction.data == "load_instruction":
dst = self.get_dst(instruction)
offset_reg = self.get_register(instruction)
if offset_reg is None:
offset_reg = "R0"
base = self.get_immediate(instruction)
return LoadInstruction(dst, offset_reg, base)
elif instruction.data == "store_instruction":
src = self.get_src(instruction)
offset_reg = self.get_register(instruction)
if offset_reg is None:
offset_reg = "R0"
base = self.get_immediate(instruction)
return StoreInstruction(src, offset_reg, base)
elif instruction.data == "halt_instruction":
return HaltInstruction()
elif instruction.data == "label_declaration":
label = instruction.children[0]
return Label(label)
elif instruction.data == "const_declaration":
name = instruction.children[0]
val = int(instruction.children[1])
self.constants[name] = val
else:
raise Exception("Unrecognized instruction: {}".format(instruction.data))
@staticmethod
def ast_to_directives(ast):
directives = {}
for directive in ast:
directives[directive.children[0].lstrip(".")] = int(directive.children[1])
return directives
def ast_to_instructions(self, ast):
instructions = list(ast.find_data("instructions"))[0].children
return [self.parse_instruction(i) for i in instructions]
@staticmethod
def get_first_child(tree, child_name, upper=False):
try:
data = tree.find_data(child_name).__next__().children[0]
if isinstance(data, str) and upper:
return data.upper()
else:
return data
except StopIteration:
return None
def get_alu_op(self, instruction):
return self.get_first_child(instruction, "alu_op", upper=True)
def get_dst(self, instruction):
return self.get_first_child(instruction, "dst_register", upper=True)
def get_src(self, instruction):
return self.get_first_child(instruction, "src_register", upper=True).upper()
def get_immediate(self, instruction):
immediate = self.get_first_child(instruction, "immediate")
if immediate in self.constants:
return self.constants[immediate]
else:
return int(immediate)
def get_register(self, instruction):
return self.get_first_child(instruction, "register", upper=True)
def get_op1(self, instruction):
operand = self.get_first_child(instruction, "operand1", upper=True)
if operand in self.constants:
return self.constants[operand]
try:
return int(operand)
except ValueError:
return operand
def get_op2(self, instruction):
operand = self.get_first_child(instruction, "operand2", upper=True)
if operand in self.constants:
return self.constants[operand]
try:
return int(operand)
except ValueError:
return operand
def get_fp_alu_op(self, instruction):
return self.get_first_child(instruction, "fp_alu_op", upper=True)
def get_fp_op1(self, instruction):
operand = self.get_first_child(instruction, "fp_operand1", upper=True)
if operand in self.constants:
return self.constants[operand]
try:
return int(operand)
except ValueError:
return operand
def get_fp_op2(self, instruction):
operand = self.get_first_child(instruction, "fp_operand2", upper=True)
if operand in self.constants:
return self.constants[operand]
try:
return int(operand)
except ValueError:
return operand
def get_fp_dst(self, instruction):
return self.get_first_child(instruction, "fp_dst_register", upper=True)
def get_branch_op(self, instruction):
return self.get_first_child(instruction, "branch_op", upper=True)
def get_label(self, instruction):
return self.get_first_child(instruction, "label")
| {"/tomasulo_simulator/execution_trace.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/instruction/__init__.py"], "/tomasulo_simulator/cpu.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/cdb.py", "/tomasulo_simulator/functional_unit.py", "/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/log_utils.py", "/tomasulo_simulator/memory.py", "/tomasulo_simulator/registerfile.py", "/tomasulo_simulator/reservation_station/__init__.py"], "/tomasulo_simulator/reservation_station/__init__.py": ["/tomasulo_simulator/reservation_station/alu_reservation_station.py", "/tomasulo_simulator/reservation_station/mem_reservation_station.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/instruction/alu_instructions.py": ["/tomasulo_simulator/instruction/instruction.py"], "/tomasulo_simulator/instruction/bitwise_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/simulation.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/execution_trace.py"], "/tomasulo_simulator/assembler.py": ["/tomasulo_simulator/__init__.py"], "/tomasulo_simulator/reservation_station/alu_reservation_station.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/instruction/control_flow_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/instruction/instruction.py": ["/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/execution_trace.py"], "/tomasulo_simulator/parser/parser.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/instruction/bitwise_instructions.py", "/tomasulo_simulator/instruction/floating_instructions.py", "/tomasulo_simulator/instruction/logic_instructions.py"], "/tomasulo_simulator/reservation_station/mem_reservation_station.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/cdb.py": ["/tomasulo_simulator/execution_trace.py", "/tomasulo_simulator/log_utils.py"], "/tomasulo_simulator/instruction/__init__.py": ["/tomasulo_simulator/instruction/instruction.py", "/tomasulo_simulator/instruction/alu_instructions.py", "/tomasulo_simulator/instruction/bitwise_instructions.py", "/tomasulo_simulator/instruction/control_flow_instructions.py", "/tomasulo_simulator/instruction/floating_instructions.py", "/tomasulo_simulator/instruction/label.py", "/tomasulo_simulator/instruction/logic_instructions.py", "/tomasulo_simulator/instruction/mem_instructions.py"], "/tomasulo_simulator/instruction/mem_instructions.py": ["/tomasulo_simulator/instruction/instruction.py"], "/tomasulo_simulator/instruction/logic_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/registerfile.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/log_utils.py", "/tomasulo_simulator/reservation_station/__init__.py"], "/tomasulo_simulator/reservation_station/reservation_station.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/log_utils.py"], "/tomasulo_simulator/instruction/floating_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/__init__.py": ["/tomasulo_simulator/assembler.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/cpu.py"], "/tomasulo_simulator/memory.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/log_utils.py"]} |
43,489 | fcremo/tomasulo-simulator | refs/heads/master | /tomasulo_simulator/reservation_station/mem_reservation_station.py | from tomasulo_simulator.instruction import StoreInstruction, LoadInstruction
from .reservation_station import ReservationStation
class MemReservationStation(ReservationStation):
kind = "MemRS"
incremental_id = 1
def __init__(self, env, cpu, fu_store, rs_store):
super().__init__(env, cpu, fu_store, rs_store)
self.offset_read = None
self.src_read = None
self.offset_val = None
self.src_val = None
self.address = None
def _decode_operands(self):
if isinstance(self.instruction, StoreInstruction):
self.src_read = self._read_operand(self.instruction.src_reg)
self.offset_read = self._read_operand(self.instruction.offset_reg)
# Associate destination register with this reservation station in the RF
if isinstance(self.instruction, LoadInstruction):
self.cpu.reg_file.associate_rs_with_reg(self, self.instruction.dst_reg)
def _wait_for_dependencies(self):
dependencies = []
if self.src_read is not None:
dependencies.append(self.src_read)
dependencies.append(self.offset_read)
fu_request = self.env.process(self._get_functional_unit())
dependencies.append(fu_request)
dependencies.append(self.env.process(self.cpu.memory.wait_for_queue_turn(self)))
results = yield self.env.all_of(dependencies)
self.offset_val = results[self.offset_read]
if self.src_read is not None:
self.src_val = results[self.src_read]
self.FU = results[fu_request]
self.address = self.offset_val + self.instruction.base
yield self.env.process(self.cpu.memory.address_resolution_complete(self))
if isinstance(self.instruction, LoadInstruction):
yield self.env.process(self.cpu.memory.wait_for_other_stores(self))
else:
yield self.env.process(self.cpu.memory.wait_for_other_accesses(self))
def _execute(self):
yield self.env.timeout(self._execution_latency())
if isinstance(self.instruction, LoadInstruction):
self.result = self.cpu.memory._memory[self.address]
elif isinstance(self.instruction, StoreInstruction):
self.cpu.memory._memory[self.address] = self.src_val
else:
raise ValueError("Unrecognized instruction")
yield self.env.process(self.cpu.memory.memory_access_complete(self))
def _writeback(self):
return super()._writeback()
def _reset(self):
super()._reset()
self.offset_read = None
self.src_read = None
self.offset_val = None
self.src_val = None
self.address = None
| {"/tomasulo_simulator/execution_trace.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/instruction/__init__.py"], "/tomasulo_simulator/cpu.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/cdb.py", "/tomasulo_simulator/functional_unit.py", "/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/log_utils.py", "/tomasulo_simulator/memory.py", "/tomasulo_simulator/registerfile.py", "/tomasulo_simulator/reservation_station/__init__.py"], "/tomasulo_simulator/reservation_station/__init__.py": ["/tomasulo_simulator/reservation_station/alu_reservation_station.py", "/tomasulo_simulator/reservation_station/mem_reservation_station.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/instruction/alu_instructions.py": ["/tomasulo_simulator/instruction/instruction.py"], "/tomasulo_simulator/instruction/bitwise_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/simulation.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/execution_trace.py"], "/tomasulo_simulator/assembler.py": ["/tomasulo_simulator/__init__.py"], "/tomasulo_simulator/reservation_station/alu_reservation_station.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/instruction/control_flow_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/instruction/instruction.py": ["/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/execution_trace.py"], "/tomasulo_simulator/parser/parser.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/instruction/bitwise_instructions.py", "/tomasulo_simulator/instruction/floating_instructions.py", "/tomasulo_simulator/instruction/logic_instructions.py"], "/tomasulo_simulator/reservation_station/mem_reservation_station.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/cdb.py": ["/tomasulo_simulator/execution_trace.py", "/tomasulo_simulator/log_utils.py"], "/tomasulo_simulator/instruction/__init__.py": ["/tomasulo_simulator/instruction/instruction.py", "/tomasulo_simulator/instruction/alu_instructions.py", "/tomasulo_simulator/instruction/bitwise_instructions.py", "/tomasulo_simulator/instruction/control_flow_instructions.py", "/tomasulo_simulator/instruction/floating_instructions.py", "/tomasulo_simulator/instruction/label.py", "/tomasulo_simulator/instruction/logic_instructions.py", "/tomasulo_simulator/instruction/mem_instructions.py"], "/tomasulo_simulator/instruction/mem_instructions.py": ["/tomasulo_simulator/instruction/instruction.py"], "/tomasulo_simulator/instruction/logic_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/registerfile.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/log_utils.py", "/tomasulo_simulator/reservation_station/__init__.py"], "/tomasulo_simulator/reservation_station/reservation_station.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/log_utils.py"], "/tomasulo_simulator/instruction/floating_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/__init__.py": ["/tomasulo_simulator/assembler.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/cpu.py"], "/tomasulo_simulator/memory.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/log_utils.py"]} |
43,490 | fcremo/tomasulo-simulator | refs/heads/master | /tomasulo_simulator/cdb.py | from collections import namedtuple
import simpy
from tomasulo_simulator.execution_trace import CDBUnavailableHazard
from tomasulo_simulator.log_utils import get_logger
class CDB(simpy.Resource):
def __init__(self, env, width):
super().__init__(env, width)
self.env = env
self.waiting = {}
self.id = "CDB"
self._log = get_logger(env, self.id)
def write(self, tag, value):
"""Returns a Simpy process that writes a value to the CDB."""
return self.env.process(self._write(tag, value))
def write_with_conflict_detection(self, tag, value, rs):
"""Returns a Simpy process that writes a value to the CDB.
It also detects and logs a conflict if the CDB is not immediately available."""
return self.env.process(self._write(tag, value, rs))
def _write(self, tag, value, rs=None):
req = self.request()
res = yield req | self.env.timeout(0)
if req not in res:
if rs is not None:
self._log("Conflict: not immediately available for writing result of {} ({})", rs, rs.instruction)
detected_at = self.env.now
yield req
if rs is not None:
rs.instruction.stats.hazards.append(CDBUnavailableHazard(detected_at, self.env.now))
self._log("Writing {}: {}", tag, value)
# TODO: make CDB latency configurable
yield self.env.timeout(1)
events = self.waiting.get(tag, [])
for event in events:
event.succeed(CDBWrite(tag, value))
self.waiting[tag] = []
self.release(req)
def snoop(self, tag):
"""Returns a Simpy process that waits until a value with the given tag is written to the CDB.
The returned process yields the value written to the CDB."""
events = self.waiting.get(tag, [])
event = simpy.Event(self.env)
events.append(event)
self.waiting[tag] = events
def _snoop():
write = yield event
return write.value
return self.env.process(_snoop())
@property
def busy(self):
return self.count >= self.capacity
CDBWrite = namedtuple("CDBWrite", ["tag", "value"])
| {"/tomasulo_simulator/execution_trace.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/instruction/__init__.py"], "/tomasulo_simulator/cpu.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/cdb.py", "/tomasulo_simulator/functional_unit.py", "/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/log_utils.py", "/tomasulo_simulator/memory.py", "/tomasulo_simulator/registerfile.py", "/tomasulo_simulator/reservation_station/__init__.py"], "/tomasulo_simulator/reservation_station/__init__.py": ["/tomasulo_simulator/reservation_station/alu_reservation_station.py", "/tomasulo_simulator/reservation_station/mem_reservation_station.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/instruction/alu_instructions.py": ["/tomasulo_simulator/instruction/instruction.py"], "/tomasulo_simulator/instruction/bitwise_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/simulation.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/execution_trace.py"], "/tomasulo_simulator/assembler.py": ["/tomasulo_simulator/__init__.py"], "/tomasulo_simulator/reservation_station/alu_reservation_station.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/instruction/control_flow_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/instruction/instruction.py": ["/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/execution_trace.py"], "/tomasulo_simulator/parser/parser.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/instruction/bitwise_instructions.py", "/tomasulo_simulator/instruction/floating_instructions.py", "/tomasulo_simulator/instruction/logic_instructions.py"], "/tomasulo_simulator/reservation_station/mem_reservation_station.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/cdb.py": ["/tomasulo_simulator/execution_trace.py", "/tomasulo_simulator/log_utils.py"], "/tomasulo_simulator/instruction/__init__.py": ["/tomasulo_simulator/instruction/instruction.py", "/tomasulo_simulator/instruction/alu_instructions.py", "/tomasulo_simulator/instruction/bitwise_instructions.py", "/tomasulo_simulator/instruction/control_flow_instructions.py", "/tomasulo_simulator/instruction/floating_instructions.py", "/tomasulo_simulator/instruction/label.py", "/tomasulo_simulator/instruction/logic_instructions.py", "/tomasulo_simulator/instruction/mem_instructions.py"], "/tomasulo_simulator/instruction/mem_instructions.py": ["/tomasulo_simulator/instruction/instruction.py"], "/tomasulo_simulator/instruction/logic_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/registerfile.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/log_utils.py", "/tomasulo_simulator/reservation_station/__init__.py"], "/tomasulo_simulator/reservation_station/reservation_station.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/log_utils.py"], "/tomasulo_simulator/instruction/floating_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/__init__.py": ["/tomasulo_simulator/assembler.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/cpu.py"], "/tomasulo_simulator/memory.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/log_utils.py"]} |
43,491 | fcremo/tomasulo-simulator | refs/heads/master | /tomasulo_simulator/instruction/__init__.py | from .instruction import Instruction, BreakpointInstruction, HaltInstruction
from .alu_instructions import AluInstruction
from .bitwise_instructions import BitwiseInstruction, AndInstruction, OrInstruction
from .control_flow_instructions import (
ControlFlowInstruction, JumpInstruction, BranchInstruction,
BEQInstruction, BLEInstruction, BGTInstruction,
BGEInstruction, BNEInstruction, BLTInstruction)
from .floating_instructions import FloatingInstruction, FAddInstruction, FSubInstruction
from .integer_instructions import IntegerInstruction, AddInstruction, SubInstruction
from .label import Label
from .logic_instructions import LogicInstruction, AndlInstruction, OrlInstruction
from .mem_instructions import MemInstruction, LoadInstruction, StoreInstruction
| {"/tomasulo_simulator/execution_trace.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/instruction/__init__.py"], "/tomasulo_simulator/cpu.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/cdb.py", "/tomasulo_simulator/functional_unit.py", "/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/log_utils.py", "/tomasulo_simulator/memory.py", "/tomasulo_simulator/registerfile.py", "/tomasulo_simulator/reservation_station/__init__.py"], "/tomasulo_simulator/reservation_station/__init__.py": ["/tomasulo_simulator/reservation_station/alu_reservation_station.py", "/tomasulo_simulator/reservation_station/mem_reservation_station.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/instruction/alu_instructions.py": ["/tomasulo_simulator/instruction/instruction.py"], "/tomasulo_simulator/instruction/bitwise_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/simulation.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/execution_trace.py"], "/tomasulo_simulator/assembler.py": ["/tomasulo_simulator/__init__.py"], "/tomasulo_simulator/reservation_station/alu_reservation_station.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/instruction/control_flow_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/instruction/instruction.py": ["/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/execution_trace.py"], "/tomasulo_simulator/parser/parser.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/instruction/bitwise_instructions.py", "/tomasulo_simulator/instruction/floating_instructions.py", "/tomasulo_simulator/instruction/logic_instructions.py"], "/tomasulo_simulator/reservation_station/mem_reservation_station.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/cdb.py": ["/tomasulo_simulator/execution_trace.py", "/tomasulo_simulator/log_utils.py"], "/tomasulo_simulator/instruction/__init__.py": ["/tomasulo_simulator/instruction/instruction.py", "/tomasulo_simulator/instruction/alu_instructions.py", "/tomasulo_simulator/instruction/bitwise_instructions.py", "/tomasulo_simulator/instruction/control_flow_instructions.py", "/tomasulo_simulator/instruction/floating_instructions.py", "/tomasulo_simulator/instruction/label.py", "/tomasulo_simulator/instruction/logic_instructions.py", "/tomasulo_simulator/instruction/mem_instructions.py"], "/tomasulo_simulator/instruction/mem_instructions.py": ["/tomasulo_simulator/instruction/instruction.py"], "/tomasulo_simulator/instruction/logic_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/registerfile.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/log_utils.py", "/tomasulo_simulator/reservation_station/__init__.py"], "/tomasulo_simulator/reservation_station/reservation_station.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/log_utils.py"], "/tomasulo_simulator/instruction/floating_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/__init__.py": ["/tomasulo_simulator/assembler.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/cpu.py"], "/tomasulo_simulator/memory.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/log_utils.py"]} |
43,492 | fcremo/tomasulo-simulator | refs/heads/master | /tomasulo_simulator/instruction/mem_instructions.py | from abc import ABC, abstractmethod
from .instruction import Instruction
class MemInstruction(Instruction, ABC):
mnemonic = "MEM"
def __init__(self, offset_reg, base):
super().__init__()
self.offset_reg = offset_reg
self.base = base
class LoadInstruction(MemInstruction):
mnemonic = "LD"
def __init__(self, dst_reg, offset_reg, base):
super().__init__(offset_reg, base)
self.dst_reg = dst_reg
@property
def operands_str(self):
return "{}, [{}+{}]".format(self.dst_reg, self.offset_reg, self.base)
class StoreInstruction(MemInstruction):
mnemonic = "ST"
def __init__(self, src_reg, offset_reg, base):
super().__init__(offset_reg, base)
self.src_reg = src_reg
@property
def operands_str(self):
return "{}, [{}+{}]".format(self.src_reg, self.offset_reg, self.base)
| {"/tomasulo_simulator/execution_trace.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/instruction/__init__.py"], "/tomasulo_simulator/cpu.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/cdb.py", "/tomasulo_simulator/functional_unit.py", "/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/log_utils.py", "/tomasulo_simulator/memory.py", "/tomasulo_simulator/registerfile.py", "/tomasulo_simulator/reservation_station/__init__.py"], "/tomasulo_simulator/reservation_station/__init__.py": ["/tomasulo_simulator/reservation_station/alu_reservation_station.py", "/tomasulo_simulator/reservation_station/mem_reservation_station.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/instruction/alu_instructions.py": ["/tomasulo_simulator/instruction/instruction.py"], "/tomasulo_simulator/instruction/bitwise_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/simulation.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/execution_trace.py"], "/tomasulo_simulator/assembler.py": ["/tomasulo_simulator/__init__.py"], "/tomasulo_simulator/reservation_station/alu_reservation_station.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/instruction/control_flow_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/instruction/instruction.py": ["/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/execution_trace.py"], "/tomasulo_simulator/parser/parser.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/instruction/bitwise_instructions.py", "/tomasulo_simulator/instruction/floating_instructions.py", "/tomasulo_simulator/instruction/logic_instructions.py"], "/tomasulo_simulator/reservation_station/mem_reservation_station.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/cdb.py": ["/tomasulo_simulator/execution_trace.py", "/tomasulo_simulator/log_utils.py"], "/tomasulo_simulator/instruction/__init__.py": ["/tomasulo_simulator/instruction/instruction.py", "/tomasulo_simulator/instruction/alu_instructions.py", "/tomasulo_simulator/instruction/bitwise_instructions.py", "/tomasulo_simulator/instruction/control_flow_instructions.py", "/tomasulo_simulator/instruction/floating_instructions.py", "/tomasulo_simulator/instruction/label.py", "/tomasulo_simulator/instruction/logic_instructions.py", "/tomasulo_simulator/instruction/mem_instructions.py"], "/tomasulo_simulator/instruction/mem_instructions.py": ["/tomasulo_simulator/instruction/instruction.py"], "/tomasulo_simulator/instruction/logic_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/registerfile.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/log_utils.py", "/tomasulo_simulator/reservation_station/__init__.py"], "/tomasulo_simulator/reservation_station/reservation_station.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/log_utils.py"], "/tomasulo_simulator/instruction/floating_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/__init__.py": ["/tomasulo_simulator/assembler.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/cpu.py"], "/tomasulo_simulator/memory.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/log_utils.py"]} |
43,493 | fcremo/tomasulo-simulator | refs/heads/master | /tomasulo_simulator/instruction/logic_instructions.py | from abc import ABC, abstractmethod
from .alu_instructions import AluInstruction
class LogicInstruction(AluInstruction, ABC):
def __init__(self, dst_reg, op1, op2):
super().__init__(dst_reg)
self.OP1 = op1
self.OP2 = op2
@property
def operands_str(self):
return "{}, {}, {}".format(self.dst_reg, self.OP1, self.OP2)
@staticmethod
@abstractmethod
def result(op1, op2):
raise NotImplementedError()
@property
@abstractmethod
def mnemonic(self):
raise NotImplementedError()
class AndlInstruction(LogicInstruction):
mnemonic = "ANDL"
@staticmethod
def result(op1, op2):
if op1 & op2:
return 1
else:
return 0
class OrlInstruction(LogicInstruction):
mnemonic = "ORL"
@staticmethod
def result(op1, op2):
if op1 | op2:
return 1
else:
return 0
| {"/tomasulo_simulator/execution_trace.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/instruction/__init__.py"], "/tomasulo_simulator/cpu.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/cdb.py", "/tomasulo_simulator/functional_unit.py", "/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/log_utils.py", "/tomasulo_simulator/memory.py", "/tomasulo_simulator/registerfile.py", "/tomasulo_simulator/reservation_station/__init__.py"], "/tomasulo_simulator/reservation_station/__init__.py": ["/tomasulo_simulator/reservation_station/alu_reservation_station.py", "/tomasulo_simulator/reservation_station/mem_reservation_station.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/instruction/alu_instructions.py": ["/tomasulo_simulator/instruction/instruction.py"], "/tomasulo_simulator/instruction/bitwise_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/simulation.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/execution_trace.py"], "/tomasulo_simulator/assembler.py": ["/tomasulo_simulator/__init__.py"], "/tomasulo_simulator/reservation_station/alu_reservation_station.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/instruction/control_flow_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/instruction/instruction.py": ["/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/execution_trace.py"], "/tomasulo_simulator/parser/parser.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/instruction/bitwise_instructions.py", "/tomasulo_simulator/instruction/floating_instructions.py", "/tomasulo_simulator/instruction/logic_instructions.py"], "/tomasulo_simulator/reservation_station/mem_reservation_station.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/cdb.py": ["/tomasulo_simulator/execution_trace.py", "/tomasulo_simulator/log_utils.py"], "/tomasulo_simulator/instruction/__init__.py": ["/tomasulo_simulator/instruction/instruction.py", "/tomasulo_simulator/instruction/alu_instructions.py", "/tomasulo_simulator/instruction/bitwise_instructions.py", "/tomasulo_simulator/instruction/control_flow_instructions.py", "/tomasulo_simulator/instruction/floating_instructions.py", "/tomasulo_simulator/instruction/label.py", "/tomasulo_simulator/instruction/logic_instructions.py", "/tomasulo_simulator/instruction/mem_instructions.py"], "/tomasulo_simulator/instruction/mem_instructions.py": ["/tomasulo_simulator/instruction/instruction.py"], "/tomasulo_simulator/instruction/logic_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/registerfile.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/log_utils.py", "/tomasulo_simulator/reservation_station/__init__.py"], "/tomasulo_simulator/reservation_station/reservation_station.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/log_utils.py"], "/tomasulo_simulator/instruction/floating_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/__init__.py": ["/tomasulo_simulator/assembler.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/cpu.py"], "/tomasulo_simulator/memory.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/log_utils.py"]} |
43,494 | fcremo/tomasulo-simulator | refs/heads/master | /tomasulo_simulator/registerfile.py | from tomasulo_simulator import execution_trace as etrace
from tomasulo_simulator.log_utils import get_logger
from tomasulo_simulator.reservation_station import ReservationStation
class RegisterFile:
"""A CPU has N general purpose integer registers (R1-RN)
M floating point registers (F1-FM) and the progam counter (PC)"""
def __init__(self, env, cpu, general_purpose_regs, floating_point_regs):
self.cpu = cpu
self.general_purpose_regs = general_purpose_regs
self.floating_point_regs = floating_point_regs
self.values = {}
for reg_number in range(general_purpose_regs):
self.values["R"+str(reg_number)] = 0
for reg_number in range(floating_point_regs):
self.values["F" + str(reg_number)] = 0
self.values["PC"] = 0
self.env = env
self.id = "RF"
self._log = get_logger(env, self.id)
def associate_rs_with_reg(self, rs, reg_name):
self[reg_name] = rs
self.env.process(self._wait_for_result_to_update_register(rs, reg_name))
def read_register(self, reg_name):
src = self[reg_name]
def _read_register():
if isinstance(src, int):
yield self.env.timeout(1)
return src
elif isinstance(src, ReservationStation):
value = yield self.cpu.CDB.snoop(src)
return value
return self.env.process(_read_register())
def read_register_with_raw_detection(self, reg_name, rs):
src = self[reg_name]
if isinstance(src, int):
def _read_register():
yield self.env.timeout(1)
return src
return self.env.process(_read_register())
elif isinstance(src, ReservationStation):
detected_at = self.env.now
process = self.cpu.CDB.snoop(src)
def _read_register():
value = yield process
rs.instruction.stats.hazards.append(etrace.RAWHazard(detected_at, self.env.now, reg_name, rs))
return value
return self.env.process(_read_register())
def _wait_for_result_to_update_register(self, rs, reg_name):
instruction = rs.instruction
value = yield self.cpu.CDB.snoop(rs)
self._log("Got result of {} ({}) from CDB ({})", instruction, value, rs)
if self[reg_name] is rs:
self._log("Writing back the result to the RF")
self[reg_name] = value
else:
self._log("Not writing back result (another RS is associated to the register)")
def _is_valid_register(self, reg_name):
return self._is_valid_integer_register(reg_name) \
or self._is_valid_floating_point_register(reg_name) \
or reg_name == "PC"
def _is_valid_integer_register(self, reg_name):
if not reg_name.startswith("R"):
return False
reg_number = int(reg_name[1:])
if not 0 <= reg_number < self.general_purpose_regs:
return False
return True
def _is_valid_floating_point_register(self, reg_name):
if not reg_name.startswith("F"):
return False
reg_number = int(reg_name[1:])
if not 0 <= reg_number < self.floating_point_regs:
return False
return True
def __getitem__(self, reg_name):
if type(reg_name) is not str:
raise KeyError("Register name must be a string")
reg_name = reg_name.upper()
if not self._is_valid_register(reg_name):
raise KeyError("Register {} is not valid".format(reg_name))
return self.values.get(reg_name)
def __setitem__(self, reg_name, value):
if type(reg_name) is not str:
raise KeyError("Register name must be a string")
reg_name = reg_name.upper()
if not self._is_valid_register(reg_name):
raise KeyError("Register {} is not valid".format(reg_name))
if reg_name == "R0":
raise KeyError("Cannot write register R0")
self.values[reg_name] = value
def __repr__(self):
return ", ".join(["{}: {}".format(name, val) for name, val in self.values.items()])
| {"/tomasulo_simulator/execution_trace.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/instruction/__init__.py"], "/tomasulo_simulator/cpu.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/cdb.py", "/tomasulo_simulator/functional_unit.py", "/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/log_utils.py", "/tomasulo_simulator/memory.py", "/tomasulo_simulator/registerfile.py", "/tomasulo_simulator/reservation_station/__init__.py"], "/tomasulo_simulator/reservation_station/__init__.py": ["/tomasulo_simulator/reservation_station/alu_reservation_station.py", "/tomasulo_simulator/reservation_station/mem_reservation_station.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/instruction/alu_instructions.py": ["/tomasulo_simulator/instruction/instruction.py"], "/tomasulo_simulator/instruction/bitwise_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/simulation.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/execution_trace.py"], "/tomasulo_simulator/assembler.py": ["/tomasulo_simulator/__init__.py"], "/tomasulo_simulator/reservation_station/alu_reservation_station.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/instruction/control_flow_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/instruction/instruction.py": ["/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/execution_trace.py"], "/tomasulo_simulator/parser/parser.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/instruction/bitwise_instructions.py", "/tomasulo_simulator/instruction/floating_instructions.py", "/tomasulo_simulator/instruction/logic_instructions.py"], "/tomasulo_simulator/reservation_station/mem_reservation_station.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/cdb.py": ["/tomasulo_simulator/execution_trace.py", "/tomasulo_simulator/log_utils.py"], "/tomasulo_simulator/instruction/__init__.py": ["/tomasulo_simulator/instruction/instruction.py", "/tomasulo_simulator/instruction/alu_instructions.py", "/tomasulo_simulator/instruction/bitwise_instructions.py", "/tomasulo_simulator/instruction/control_flow_instructions.py", "/tomasulo_simulator/instruction/floating_instructions.py", "/tomasulo_simulator/instruction/label.py", "/tomasulo_simulator/instruction/logic_instructions.py", "/tomasulo_simulator/instruction/mem_instructions.py"], "/tomasulo_simulator/instruction/mem_instructions.py": ["/tomasulo_simulator/instruction/instruction.py"], "/tomasulo_simulator/instruction/logic_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/registerfile.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/log_utils.py", "/tomasulo_simulator/reservation_station/__init__.py"], "/tomasulo_simulator/reservation_station/reservation_station.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/log_utils.py"], "/tomasulo_simulator/instruction/floating_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/__init__.py": ["/tomasulo_simulator/assembler.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/cpu.py"], "/tomasulo_simulator/memory.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/log_utils.py"]} |
43,495 | fcremo/tomasulo-simulator | refs/heads/master | /tomasulo_simulator/instruction/label.py | class Label:
def __init__(self, id):
self.id = id
def __repr__(self):
return "LAB {}".format(self.id)
def __str__(self):
return str(self.id)
def __hash__(self):
return self.id.__hash__()
def __eq__(self, other):
if isinstance(other, Label):
return self.id == other.id
return other == self.id
| {"/tomasulo_simulator/execution_trace.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/instruction/__init__.py"], "/tomasulo_simulator/cpu.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/cdb.py", "/tomasulo_simulator/functional_unit.py", "/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/log_utils.py", "/tomasulo_simulator/memory.py", "/tomasulo_simulator/registerfile.py", "/tomasulo_simulator/reservation_station/__init__.py"], "/tomasulo_simulator/reservation_station/__init__.py": ["/tomasulo_simulator/reservation_station/alu_reservation_station.py", "/tomasulo_simulator/reservation_station/mem_reservation_station.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/instruction/alu_instructions.py": ["/tomasulo_simulator/instruction/instruction.py"], "/tomasulo_simulator/instruction/bitwise_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/simulation.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/execution_trace.py"], "/tomasulo_simulator/assembler.py": ["/tomasulo_simulator/__init__.py"], "/tomasulo_simulator/reservation_station/alu_reservation_station.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/instruction/control_flow_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/instruction/instruction.py": ["/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/execution_trace.py"], "/tomasulo_simulator/parser/parser.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/instruction/bitwise_instructions.py", "/tomasulo_simulator/instruction/floating_instructions.py", "/tomasulo_simulator/instruction/logic_instructions.py"], "/tomasulo_simulator/reservation_station/mem_reservation_station.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/cdb.py": ["/tomasulo_simulator/execution_trace.py", "/tomasulo_simulator/log_utils.py"], "/tomasulo_simulator/instruction/__init__.py": ["/tomasulo_simulator/instruction/instruction.py", "/tomasulo_simulator/instruction/alu_instructions.py", "/tomasulo_simulator/instruction/bitwise_instructions.py", "/tomasulo_simulator/instruction/control_flow_instructions.py", "/tomasulo_simulator/instruction/floating_instructions.py", "/tomasulo_simulator/instruction/label.py", "/tomasulo_simulator/instruction/logic_instructions.py", "/tomasulo_simulator/instruction/mem_instructions.py"], "/tomasulo_simulator/instruction/mem_instructions.py": ["/tomasulo_simulator/instruction/instruction.py"], "/tomasulo_simulator/instruction/logic_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/registerfile.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/log_utils.py", "/tomasulo_simulator/reservation_station/__init__.py"], "/tomasulo_simulator/reservation_station/reservation_station.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/log_utils.py"], "/tomasulo_simulator/instruction/floating_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/__init__.py": ["/tomasulo_simulator/assembler.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/cpu.py"], "/tomasulo_simulator/memory.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/log_utils.py"]} |
43,496 | fcremo/tomasulo-simulator | refs/heads/master | /tomasulo_simulator/reservation_station/reservation_station.py | from abc import ABC, abstractmethod
from collections import namedtuple
from tomasulo_simulator import execution_trace as etrace
from tomasulo_simulator.log_utils import get_logger
class ReservationStation(ABC):
kind = "RS"
incremental_id = 1
def __init__(self, env, cpu, fu_store, rs_store):
self.id = self.__class__.kind + str(self.__class__.incremental_id)
self.__class__.incremental_id += 1
self.env = env
self.cpu = cpu
self.instruction = None
self.FU = None
self.result = None
self.fu_store = fu_store
self.rs_store = rs_store
self._log = get_logger(env, self.id)
def issue(self, instruction):
"""Issues an instruction to the reservation station,
and returns a process which terminates when execution is complete"""
self.instruction = instruction
# Log instruction issue
instruction.stats.issued = self.env.now
instruction.stats.rs = self
# Immediately decode the operands
self._decode_operands()
return self._execution_process()
@abstractmethod
def _decode_operands(self):
"""This method must be implemented by every reservation station.
It should immediately decode the operands of the instruction."""
raise NotImplementedError()
def _read_operand(self, operand):
"""Helper method which returns a process that will wait until the operand is ready and return it"""
# If the operand is an immediate value then return it after a timeout to simulate decode times.
if isinstance(operand, int):
def process():
# TODO: make immediate operand read _execution_latency configurable
yield self.env.timeout(1)
return operand
return self.env.process(process())
# Otherwise the operand must be a register name,
# so ask the register file for the register value
elif isinstance(operand, str):
return self.cpu.reg_file.read_register_with_raw_detection(operand, self)
else:
raise Exception("Operand type incorrect")
def _execution_process(self):
yield self.env.process(self._wait_for_dependencies())
self._log("All dependencies for {} are ready (using {})", self.instruction, self.FU)
# Log start of execution
self._log("Starting execution phase of {}", self.instruction)
self.instruction.stats.start_execution = self.env.now
self.instruction.stats.fu = self.FU
yield self.env.process(self._execute())
self._log("End execution of {}, starting writeback phase", self.instruction)
yield self.env.process(self._writeback())
self.instruction.stats.written_result = self.env.now
# Append the instruction to the execution trace list
self.cpu.executed_instructions.append(self.instruction)
fu = self.FU
# Reset RS before releasing it
self._reset()
# return the FU and RS to the CPU
yield self._return_to_cpu(fu)
def _get_functional_unit(self):
# FIXME
# this code sucks a bit, but it's necessary to detect
# structural hazards immediately to print them in order.
# Checking if fu_store.items is empty does not work, as
# a FU may be be relased right after we yield so no conflict is happening
# The idea is to check if the request is granted in zero time,
# but any_of is not guaranteed to yield both the resource request and the timeout
# at the same time (as in in a single call), even if they will trigger at the same
# simulation time.
# Possible definitive solutions:
# 1) ignore the problem and not print conflicts in real time
# 2) use a 0.1 clock-cycles timeout (so the resource request will be granted first if any is available)
store_req_start = self.env.now
req = self.fu_store.get()
results = yield self.env.any_of([req, self.env.timeout(0)])
if req in results:
hazard = False
obtained_fu = req.value
else:
self._log("Structural hazard: no FU available for {}", self.instruction)
hazard = True
obtained_fu = yield req
if hazard:
self._log("Structural hazard solved: obtained {} for {}", obtained_fu, self.instruction)
self.instruction.stats.hazards.append(etrace.FUUnavailableHazard(store_req_start, self.env.now, obtained_fu))
return obtained_fu
@abstractmethod
def _wait_for_dependencies(self):
"""This method must be implemented by all reservation stations.
It should wait for all dependencies necessary before executing the instruction (operands, FU, and others)"""
raise NotImplementedError()
@abstractmethod
def _execute(self):
"""This method must be implemented by all reservation stations.
It should execute the instruction and save the result in self.result"""
raise NotImplementedError()
def _writeback(self):
self.instruction.stats.write_result = self.env.now
if self.result is not None:
yield self.cpu.CDB.write_with_conflict_detection(self, self.result, self)
else:
yield self.env.timeout(1)
def _execution_latency(self):
instruction_latency = self.instruction.latency(self.cpu.config)
if instruction_latency is not None:
return instruction_latency
for c in self.__class__.__mro__:
if c is ReservationStation:
raise Exception("Latency not defined for instruction {}".format(self.instruction.mnemonic))
latency = self.cpu.config[self.kind.lower() + "_execution_latency"]
if latency is not None:
return latency
raise Exception("Latency not defined for instruction {}".format(self.instruction.mnemonic))
def _return_to_cpu(self, fu):
"""Returns itself and the functional unit to the CPU after execution is complete"""
return self.env.all_of([self.fu_store.put(fu), self.rs_store.put(self)])
@abstractmethod
def _reset(self):
self.instruction = None
self.FU = None
self.result = None
def __str__(self):
return self.id
def __repr__(self):
return self.__str__()
RegReadResult = namedtuple("RegReadResult", ["rs", "result"])
| {"/tomasulo_simulator/execution_trace.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/instruction/__init__.py"], "/tomasulo_simulator/cpu.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/cdb.py", "/tomasulo_simulator/functional_unit.py", "/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/log_utils.py", "/tomasulo_simulator/memory.py", "/tomasulo_simulator/registerfile.py", "/tomasulo_simulator/reservation_station/__init__.py"], "/tomasulo_simulator/reservation_station/__init__.py": ["/tomasulo_simulator/reservation_station/alu_reservation_station.py", "/tomasulo_simulator/reservation_station/mem_reservation_station.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/instruction/alu_instructions.py": ["/tomasulo_simulator/instruction/instruction.py"], "/tomasulo_simulator/instruction/bitwise_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/simulation.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/execution_trace.py"], "/tomasulo_simulator/assembler.py": ["/tomasulo_simulator/__init__.py"], "/tomasulo_simulator/reservation_station/alu_reservation_station.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/instruction/control_flow_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/instruction/instruction.py": ["/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/execution_trace.py"], "/tomasulo_simulator/parser/parser.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/instruction/bitwise_instructions.py", "/tomasulo_simulator/instruction/floating_instructions.py", "/tomasulo_simulator/instruction/logic_instructions.py"], "/tomasulo_simulator/reservation_station/mem_reservation_station.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/cdb.py": ["/tomasulo_simulator/execution_trace.py", "/tomasulo_simulator/log_utils.py"], "/tomasulo_simulator/instruction/__init__.py": ["/tomasulo_simulator/instruction/instruction.py", "/tomasulo_simulator/instruction/alu_instructions.py", "/tomasulo_simulator/instruction/bitwise_instructions.py", "/tomasulo_simulator/instruction/control_flow_instructions.py", "/tomasulo_simulator/instruction/floating_instructions.py", "/tomasulo_simulator/instruction/label.py", "/tomasulo_simulator/instruction/logic_instructions.py", "/tomasulo_simulator/instruction/mem_instructions.py"], "/tomasulo_simulator/instruction/mem_instructions.py": ["/tomasulo_simulator/instruction/instruction.py"], "/tomasulo_simulator/instruction/logic_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/registerfile.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/log_utils.py", "/tomasulo_simulator/reservation_station/__init__.py"], "/tomasulo_simulator/reservation_station/reservation_station.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/log_utils.py"], "/tomasulo_simulator/instruction/floating_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/__init__.py": ["/tomasulo_simulator/assembler.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/cpu.py"], "/tomasulo_simulator/memory.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/log_utils.py"]} |
43,497 | fcremo/tomasulo-simulator | refs/heads/master | /tomasulo_simulator/cpu_config.py | DEFAULT_GP_REGISTERS = 16
DEFAULT_FP_REGISTERS = 8
DEFAULT_CDB_WIDTH = 1
DEFAULT_ALU_RS = 1
DEFAULT_ALU_FU = 1
DEFAULT_ALU_LATENCY = 1
DEFAULT_FPALU_RS = 1
DEFAULT_FPALU_FU = 1
DEFAULT_FPALU_LATENCY = 3
DEFAULT_MEM_RS = 1
DEFAULT_MEM_FU = 1
DEFAULT_MEM_LATENCY = 4
DEFAULT_MEM_ACCESS_QUEUE_SIZE = 2
DEFAULT_MEM_SIZE = 0x50
DEFAULT_FETCH_LATENCY = 1
class CpuConfig:
def __init__(self, directives={}):
# TODO: delete *rs_execution_latency
self.gp_registers = DEFAULT_GP_REGISTERS
self.fp_registers = DEFAULT_FP_REGISTERS
self.cdb_width = DEFAULT_CDB_WIDTH
self.alu_rs = DEFAULT_ALU_RS
self.alu_fu = DEFAULT_ALU_FU
self.alurs_execution_latency = DEFAULT_ALU_LATENCY
self.alufu_execution_latency = DEFAULT_ALU_LATENCY
self.fpalu_rs = DEFAULT_FPALU_RS
self.fpalu_fu = DEFAULT_FPALU_FU
self.fpalurs_execution_latency = DEFAULT_FPALU_LATENCY
self.fpalufu_execution_latency = DEFAULT_FPALU_LATENCY
self.mem_rs = DEFAULT_MEM_RS
self.mem_fu = DEFAULT_MEM_FU
self.memrs_execution_latency = DEFAULT_MEM_LATENCY
self.memfu_execution_latency = DEFAULT_MEM_LATENCY
self.mem_access_queue_size = DEFAULT_MEM_ACCESS_QUEUE_SIZE
self.mem_size = DEFAULT_MEM_SIZE
self.fetch_latency = DEFAULT_FETCH_LATENCY
self.apply_config(directives)
def apply_config(self, directives):
for k, v in directives.items():
if k not in self.__dict__:
raise Exception("Unknown directive: {}".format(k))
self.__dict__[k] = v
def __getattr__(self, item):
if item.endswith("_execution_latency"):
return self.__dict__.get(item, None)
else:
return self.__dict__[item]
def __getitem__(self, item):
return self.__getattr__(item)
| {"/tomasulo_simulator/execution_trace.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/instruction/__init__.py"], "/tomasulo_simulator/cpu.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/cdb.py", "/tomasulo_simulator/functional_unit.py", "/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/log_utils.py", "/tomasulo_simulator/memory.py", "/tomasulo_simulator/registerfile.py", "/tomasulo_simulator/reservation_station/__init__.py"], "/tomasulo_simulator/reservation_station/__init__.py": ["/tomasulo_simulator/reservation_station/alu_reservation_station.py", "/tomasulo_simulator/reservation_station/mem_reservation_station.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/instruction/alu_instructions.py": ["/tomasulo_simulator/instruction/instruction.py"], "/tomasulo_simulator/instruction/bitwise_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/simulation.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/execution_trace.py"], "/tomasulo_simulator/assembler.py": ["/tomasulo_simulator/__init__.py"], "/tomasulo_simulator/reservation_station/alu_reservation_station.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/instruction/control_flow_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/instruction/instruction.py": ["/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/execution_trace.py"], "/tomasulo_simulator/parser/parser.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/instruction/bitwise_instructions.py", "/tomasulo_simulator/instruction/floating_instructions.py", "/tomasulo_simulator/instruction/logic_instructions.py"], "/tomasulo_simulator/reservation_station/mem_reservation_station.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/cdb.py": ["/tomasulo_simulator/execution_trace.py", "/tomasulo_simulator/log_utils.py"], "/tomasulo_simulator/instruction/__init__.py": ["/tomasulo_simulator/instruction/instruction.py", "/tomasulo_simulator/instruction/alu_instructions.py", "/tomasulo_simulator/instruction/bitwise_instructions.py", "/tomasulo_simulator/instruction/control_flow_instructions.py", "/tomasulo_simulator/instruction/floating_instructions.py", "/tomasulo_simulator/instruction/label.py", "/tomasulo_simulator/instruction/logic_instructions.py", "/tomasulo_simulator/instruction/mem_instructions.py"], "/tomasulo_simulator/instruction/mem_instructions.py": ["/tomasulo_simulator/instruction/instruction.py"], "/tomasulo_simulator/instruction/logic_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/registerfile.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/log_utils.py", "/tomasulo_simulator/reservation_station/__init__.py"], "/tomasulo_simulator/reservation_station/reservation_station.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/log_utils.py"], "/tomasulo_simulator/instruction/floating_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/__init__.py": ["/tomasulo_simulator/assembler.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/cpu.py"], "/tomasulo_simulator/memory.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/log_utils.py"]} |
43,498 | fcremo/tomasulo-simulator | refs/heads/master | /tomasulo_simulator/instruction/floating_instructions.py | from abc import ABC, abstractmethod
from .alu_instructions import AluInstruction
class FloatingInstruction(AluInstruction, ABC):
def __init__(self, dst_reg, op1, op2):
super().__init__(dst_reg)
self.OP1 = op1
self.OP2 = op2
@property
def operands_str(self):
return "{}, {}, {}".format(self.dst_reg, self.OP1, self.OP2)
@staticmethod
@abstractmethod
def result(op1, op2):
raise NotImplementedError()
@property
@abstractmethod
def mnemonic(self):
raise NotImplementedError()
class FAddInstruction(FloatingInstruction):
mnemonic = "FADD"
@staticmethod
def result(op1, op2):
return op1 + op2
class FSubInstruction(FloatingInstruction):
mnemonic = "FSUB"
@staticmethod
def result(op1, op2):
return op1 - op2
| {"/tomasulo_simulator/execution_trace.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/instruction/__init__.py"], "/tomasulo_simulator/cpu.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/cdb.py", "/tomasulo_simulator/functional_unit.py", "/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/log_utils.py", "/tomasulo_simulator/memory.py", "/tomasulo_simulator/registerfile.py", "/tomasulo_simulator/reservation_station/__init__.py"], "/tomasulo_simulator/reservation_station/__init__.py": ["/tomasulo_simulator/reservation_station/alu_reservation_station.py", "/tomasulo_simulator/reservation_station/mem_reservation_station.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/instruction/alu_instructions.py": ["/tomasulo_simulator/instruction/instruction.py"], "/tomasulo_simulator/instruction/bitwise_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/simulation.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/execution_trace.py"], "/tomasulo_simulator/assembler.py": ["/tomasulo_simulator/__init__.py"], "/tomasulo_simulator/reservation_station/alu_reservation_station.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/instruction/control_flow_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/instruction/instruction.py": ["/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/execution_trace.py"], "/tomasulo_simulator/parser/parser.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/instruction/bitwise_instructions.py", "/tomasulo_simulator/instruction/floating_instructions.py", "/tomasulo_simulator/instruction/logic_instructions.py"], "/tomasulo_simulator/reservation_station/mem_reservation_station.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/cdb.py": ["/tomasulo_simulator/execution_trace.py", "/tomasulo_simulator/log_utils.py"], "/tomasulo_simulator/instruction/__init__.py": ["/tomasulo_simulator/instruction/instruction.py", "/tomasulo_simulator/instruction/alu_instructions.py", "/tomasulo_simulator/instruction/bitwise_instructions.py", "/tomasulo_simulator/instruction/control_flow_instructions.py", "/tomasulo_simulator/instruction/floating_instructions.py", "/tomasulo_simulator/instruction/label.py", "/tomasulo_simulator/instruction/logic_instructions.py", "/tomasulo_simulator/instruction/mem_instructions.py"], "/tomasulo_simulator/instruction/mem_instructions.py": ["/tomasulo_simulator/instruction/instruction.py"], "/tomasulo_simulator/instruction/logic_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/registerfile.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/log_utils.py", "/tomasulo_simulator/reservation_station/__init__.py"], "/tomasulo_simulator/reservation_station/reservation_station.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/log_utils.py"], "/tomasulo_simulator/instruction/floating_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/__init__.py": ["/tomasulo_simulator/assembler.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/cpu.py"], "/tomasulo_simulator/memory.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/log_utils.py"]} |
43,499 | fcremo/tomasulo-simulator | refs/heads/master | /tomasulo_simulator/__init__.py | from .parser import Parser
from .assembler import assemble
from .cpu_config import CpuConfig
from .cpu import CPU
name = "tomasulo_simulator"
| {"/tomasulo_simulator/execution_trace.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/instruction/__init__.py"], "/tomasulo_simulator/cpu.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/cdb.py", "/tomasulo_simulator/functional_unit.py", "/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/log_utils.py", "/tomasulo_simulator/memory.py", "/tomasulo_simulator/registerfile.py", "/tomasulo_simulator/reservation_station/__init__.py"], "/tomasulo_simulator/reservation_station/__init__.py": ["/tomasulo_simulator/reservation_station/alu_reservation_station.py", "/tomasulo_simulator/reservation_station/mem_reservation_station.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/instruction/alu_instructions.py": ["/tomasulo_simulator/instruction/instruction.py"], "/tomasulo_simulator/instruction/bitwise_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/simulation.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/execution_trace.py"], "/tomasulo_simulator/assembler.py": ["/tomasulo_simulator/__init__.py"], "/tomasulo_simulator/reservation_station/alu_reservation_station.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/instruction/control_flow_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/instruction/instruction.py": ["/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/execution_trace.py"], "/tomasulo_simulator/parser/parser.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/instruction/bitwise_instructions.py", "/tomasulo_simulator/instruction/floating_instructions.py", "/tomasulo_simulator/instruction/logic_instructions.py"], "/tomasulo_simulator/reservation_station/mem_reservation_station.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/cdb.py": ["/tomasulo_simulator/execution_trace.py", "/tomasulo_simulator/log_utils.py"], "/tomasulo_simulator/instruction/__init__.py": ["/tomasulo_simulator/instruction/instruction.py", "/tomasulo_simulator/instruction/alu_instructions.py", "/tomasulo_simulator/instruction/bitwise_instructions.py", "/tomasulo_simulator/instruction/control_flow_instructions.py", "/tomasulo_simulator/instruction/floating_instructions.py", "/tomasulo_simulator/instruction/label.py", "/tomasulo_simulator/instruction/logic_instructions.py", "/tomasulo_simulator/instruction/mem_instructions.py"], "/tomasulo_simulator/instruction/mem_instructions.py": ["/tomasulo_simulator/instruction/instruction.py"], "/tomasulo_simulator/instruction/logic_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/registerfile.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/log_utils.py", "/tomasulo_simulator/reservation_station/__init__.py"], "/tomasulo_simulator/reservation_station/reservation_station.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/log_utils.py"], "/tomasulo_simulator/instruction/floating_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/__init__.py": ["/tomasulo_simulator/assembler.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/cpu.py"], "/tomasulo_simulator/memory.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/log_utils.py"]} |
43,500 | fcremo/tomasulo-simulator | refs/heads/master | /setup.py | import setuptools
setuptools.setup(
name="tomasulo_simulator",
version="0.0.1",
author="Filippo Cremonese",
author_email="",
description="Tomasulo Algorithm simulation library",
long_description="",
long_description_content_type="text/markdown",
url="https://github.com/fcremo/tomasulo-simulator",
packages=setuptools.find_packages(),
classifiers=[
"Programming Language :: Python :: 3",
],
include_package_data=True,
package_data={
'': ["*.lark"]
}
)
| {"/tomasulo_simulator/execution_trace.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/instruction/__init__.py"], "/tomasulo_simulator/cpu.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/cdb.py", "/tomasulo_simulator/functional_unit.py", "/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/log_utils.py", "/tomasulo_simulator/memory.py", "/tomasulo_simulator/registerfile.py", "/tomasulo_simulator/reservation_station/__init__.py"], "/tomasulo_simulator/reservation_station/__init__.py": ["/tomasulo_simulator/reservation_station/alu_reservation_station.py", "/tomasulo_simulator/reservation_station/mem_reservation_station.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/instruction/alu_instructions.py": ["/tomasulo_simulator/instruction/instruction.py"], "/tomasulo_simulator/instruction/bitwise_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/simulation.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/execution_trace.py"], "/tomasulo_simulator/assembler.py": ["/tomasulo_simulator/__init__.py"], "/tomasulo_simulator/reservation_station/alu_reservation_station.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/instruction/control_flow_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/instruction/instruction.py": ["/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/execution_trace.py"], "/tomasulo_simulator/parser/parser.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/instruction/bitwise_instructions.py", "/tomasulo_simulator/instruction/floating_instructions.py", "/tomasulo_simulator/instruction/logic_instructions.py"], "/tomasulo_simulator/reservation_station/mem_reservation_station.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/cdb.py": ["/tomasulo_simulator/execution_trace.py", "/tomasulo_simulator/log_utils.py"], "/tomasulo_simulator/instruction/__init__.py": ["/tomasulo_simulator/instruction/instruction.py", "/tomasulo_simulator/instruction/alu_instructions.py", "/tomasulo_simulator/instruction/bitwise_instructions.py", "/tomasulo_simulator/instruction/control_flow_instructions.py", "/tomasulo_simulator/instruction/floating_instructions.py", "/tomasulo_simulator/instruction/label.py", "/tomasulo_simulator/instruction/logic_instructions.py", "/tomasulo_simulator/instruction/mem_instructions.py"], "/tomasulo_simulator/instruction/mem_instructions.py": ["/tomasulo_simulator/instruction/instruction.py"], "/tomasulo_simulator/instruction/logic_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/registerfile.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/log_utils.py", "/tomasulo_simulator/reservation_station/__init__.py"], "/tomasulo_simulator/reservation_station/reservation_station.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/log_utils.py"], "/tomasulo_simulator/instruction/floating_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/__init__.py": ["/tomasulo_simulator/assembler.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/cpu.py"], "/tomasulo_simulator/memory.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/log_utils.py"]} |
43,501 | fcremo/tomasulo-simulator | refs/heads/master | /tomasulo_simulator/memory.py | import simpy
from tomasulo_simulator import execution_trace as etrace
from tomasulo_simulator.cpu_config import CpuConfig
from tomasulo_simulator.instruction import StoreInstruction
from tomasulo_simulator.log_utils import get_logger
class Memory:
def __init__(self, env, config: CpuConfig, default_val=0):
self.env = env
self._access_queue = simpy.Store(env, config.mem_access_queue_size)
self._access_queue_pop_event = simpy.Event(env)
self._accesses_in_execution = simpy.FilterStore(env)
self._accesses_in_execution_pop_event = simpy.Event(env)
self.id = "MEM"
self._memory = [default_val] * config.mem_size
self._log = get_logger(env, self.id)
def enqueue_memory_access(self, rs, instruction):
# FIXME
# this code sucks a bit, but it's necessary to detect
# structural hazards immediately to print them in order.
# Checking if fu_store.items is empty does not work, as
# a FU may be be relased right after we yield so no conflict is happening
# The idea is to check if the request is granted in zero time,
# but any_of is not guaranteed to yield both the resource request and the timeout
# at the same time (as in in a single call), even if they will trigger at the same
# simulation time.
# Possible definitive solutions:
# 1) ignore the problem and not print conflicts in real time
# 2) use a 0.1 clock-cycles timeout (so the resource request will be granted first if any is available)
req_start = self.env.now
req = self._access_queue.put(rs)
results = yield self.env.any_of([req, self.env.timeout(0)])
if req in results:
hazard = False
else:
self._log("Structural hazard: no slots in the memory access queue available for {}", instruction)
hazard = True
yield req
if hazard:
self._log("Structural hazard solved, found a slot in the mem access queue")
instruction.stats.hazards.append(etrace.MemQueueSlotUnavailableHazard(req_start, self.env.now))
def wait_for_queue_turn(self, rs):
current_rs = self._access_queue.items[0]
while current_rs != rs:
yield self._access_queue_pop_event
current_rs = self._access_queue.items[0]
def address_resolution_complete(self, rs):
current_rs = yield self._access_queue.get()
if rs != current_rs:
raise Exception("Wrong order!")
ev, self._access_queue_pop_event = self._access_queue_pop_event, simpy.Event(self.env)
yield self._accesses_in_execution.put(rs)
ev.succeed()
def _has_to_wait_for_stores(self, rs):
# FIXME: I suspect there's a potential deadlock if more than two dependent instructions get queued up
# Should check only the addreses of reservation stations which had their instruction issued before ours
for other_rs in self._accesses_in_execution.items:
if (other_rs != rs and
isinstance(other_rs.instruction, StoreInstruction) and
other_rs.address == rs.address):
return True
return False
def wait_for_other_stores(self, rs):
# FIXME: I suspect there's a potential deadlock if more than two dependent instructions get queued up
# Should check only the addreses of reservation stations which had their instruction issued before ours
while self._has_to_wait_for_stores(rs):
yield self._accesses_in_execution_pop_event
def _has_to_wait_for_other_accesses(self, rs):
for other_rs in self._accesses_in_execution.items:
if other_rs != rs and other_rs.address == rs.address:
return True
return False
def wait_for_other_accesses(self, rs):
while self._has_to_wait_for_other_accesses(rs):
yield self._accesses_in_execution_pop_event
def memory_access_complete(self, rs):
if rs not in self._accesses_in_execution.items:
raise Exception("Something's wrong, please report this event as a bug")
ev, self._accesses_in_execution_pop_event = self._accesses_in_execution_pop_event, simpy.Event(self.env)
yield self._accesses_in_execution.get(lambda x: x == rs)
ev.succeed()
def __str__(self):
return self.id
def __repr__(self):
return self.__str__()
| {"/tomasulo_simulator/execution_trace.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/instruction/__init__.py"], "/tomasulo_simulator/cpu.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/cdb.py", "/tomasulo_simulator/functional_unit.py", "/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/log_utils.py", "/tomasulo_simulator/memory.py", "/tomasulo_simulator/registerfile.py", "/tomasulo_simulator/reservation_station/__init__.py"], "/tomasulo_simulator/reservation_station/__init__.py": ["/tomasulo_simulator/reservation_station/alu_reservation_station.py", "/tomasulo_simulator/reservation_station/mem_reservation_station.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/instruction/alu_instructions.py": ["/tomasulo_simulator/instruction/instruction.py"], "/tomasulo_simulator/instruction/bitwise_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/simulation.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/execution_trace.py"], "/tomasulo_simulator/assembler.py": ["/tomasulo_simulator/__init__.py"], "/tomasulo_simulator/reservation_station/alu_reservation_station.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/instruction/control_flow_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/instruction/instruction.py": ["/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/execution_trace.py"], "/tomasulo_simulator/parser/parser.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/instruction/bitwise_instructions.py", "/tomasulo_simulator/instruction/floating_instructions.py", "/tomasulo_simulator/instruction/logic_instructions.py"], "/tomasulo_simulator/reservation_station/mem_reservation_station.py": ["/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/reservation_station/reservation_station.py"], "/tomasulo_simulator/cdb.py": ["/tomasulo_simulator/execution_trace.py", "/tomasulo_simulator/log_utils.py"], "/tomasulo_simulator/instruction/__init__.py": ["/tomasulo_simulator/instruction/instruction.py", "/tomasulo_simulator/instruction/alu_instructions.py", "/tomasulo_simulator/instruction/bitwise_instructions.py", "/tomasulo_simulator/instruction/control_flow_instructions.py", "/tomasulo_simulator/instruction/floating_instructions.py", "/tomasulo_simulator/instruction/label.py", "/tomasulo_simulator/instruction/logic_instructions.py", "/tomasulo_simulator/instruction/mem_instructions.py"], "/tomasulo_simulator/instruction/mem_instructions.py": ["/tomasulo_simulator/instruction/instruction.py"], "/tomasulo_simulator/instruction/logic_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/registerfile.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/log_utils.py", "/tomasulo_simulator/reservation_station/__init__.py"], "/tomasulo_simulator/reservation_station/reservation_station.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/log_utils.py"], "/tomasulo_simulator/instruction/floating_instructions.py": ["/tomasulo_simulator/instruction/alu_instructions.py"], "/tomasulo_simulator/__init__.py": ["/tomasulo_simulator/assembler.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/cpu.py"], "/tomasulo_simulator/memory.py": ["/tomasulo_simulator/__init__.py", "/tomasulo_simulator/cpu_config.py", "/tomasulo_simulator/instruction/__init__.py", "/tomasulo_simulator/log_utils.py"]} |
43,519 | bhilosobher/interactWBL | refs/heads/master | /populate_students.py | import os
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'devproj.settings')
# names package will help us generate random names to create student users; random for creating student ids
import django, names, random
django.setup()
from interactWBL.models import Student, User, Mentor
def populate():
mentor_list = list(Mentor.objects.all())
for i in range(1,100):
# set up all the student info so it mocks UoG student info
student_number = 2000000 + random.randint(0,999999)
f_name = names.get_last_name()
l_name = names.get_last_name()
initial = l_name[0].lower()
student_year = random.randint(1,5)
student_id = student_number.__str__()+initial
student_email = student_id+'@student.gla.ac.uk'
print(student_email)
# now that we have all the pieces, create the user
u = User.objects.get_or_create(username=student_id,password='securepassword',email=student_email,
last_name=l_name,first_name=f_name)[0]
u.set_password('securepassword')
u.save()
# lastly create the user, using the random module to assign a random mentor from the existing ones
s = Student.objects.get_or_create(user=u, year = student_year,mentor=mentor_list[random.randint(0,len(mentor_list)-1)])
if __name__=='__main__':
print('starting student population script...')
populate() | {"/populate_students.py": ["/interactWBL/models.py"], "/interactWBL/views.py": ["/interactWBL/models.py", "/interactWBL/forms.py"], "/populate_academics_courses_assignments_submissions_reflections_persCompetencies.py": ["/interactWBL/models.py"], "/populate_all.py": ["/populate_academics_courses_assignments_submissions_reflections_persCompetencies.py", "/populate_mentors.py", "/populate_students.py", "/populate_with_competencies.py"], "/interactWBL/templatetags/interactwbl_template_tags.py": ["/interactWBL/models.py"], "/interactWBL/admin.py": ["/interactWBL/models.py"], "/populate_with_competencies.py": ["/interactWBL/models.py"], "/populate_mentors.py": ["/interactWBL/models.py"], "/interactWBL/forms.py": ["/interactWBL/models.py"], "/devproj/urls.py": ["/interactWBL/models.py"]} |
43,520 | bhilosobher/interactWBL | refs/heads/master | /interactWBL/apps.py | from django.apps import AppConfig
class InteractwblConfig(AppConfig):
name = 'interactWBL'
| {"/populate_students.py": ["/interactWBL/models.py"], "/interactWBL/views.py": ["/interactWBL/models.py", "/interactWBL/forms.py"], "/populate_academics_courses_assignments_submissions_reflections_persCompetencies.py": ["/interactWBL/models.py"], "/populate_all.py": ["/populate_academics_courses_assignments_submissions_reflections_persCompetencies.py", "/populate_mentors.py", "/populate_students.py", "/populate_with_competencies.py"], "/interactWBL/templatetags/interactwbl_template_tags.py": ["/interactWBL/models.py"], "/interactWBL/admin.py": ["/interactWBL/models.py"], "/populate_with_competencies.py": ["/interactWBL/models.py"], "/populate_mentors.py": ["/interactWBL/models.py"], "/interactWBL/forms.py": ["/interactWBL/models.py"], "/devproj/urls.py": ["/interactWBL/models.py"]} |
43,521 | bhilosobher/interactWBL | refs/heads/master | /interactWBL/views.py | from django.shortcuts import render, redirect
from interactWBL.models import Course, Academic, Competency, Enrolment, Student, Mentor, Company, CourseTarget, StudentLogins, MentorLogins, Reflection, Assignment, Submission
from django.contrib.auth.models import User
from django.contrib.auth import authenticate, login
from interactWBL.forms import CourseForm, StudentForm, MentorForm, SignUpForm, CompetencyForm,CourseTargetsForm,EnrollmentForm
from django.contrib.auth.decorators import login_required
from django.http import HttpResponse
# custom registration view;
def signup(request):
# if user is already logged in, no need to register - redirect to dashboard
if request.user.is_authenticated:
return redirect('interactWBL:dashboard')
if request.method == 'POST':
form = SignUpForm(request.POST)
if form.is_valid():
form.save()
username = form.cleaned_data.get('username')
raw_password = form.cleaned_data.get('password1')
user = authenticate(username=username, password=raw_password)
login(request, user)
# once user successfully registered their basic profile, now register their academic/student/mentor profile
return redirect('interactWBL:register_profile')
else:
form = SignUpForm()
return render(request, 'registration/registration_form.html', {'form': form})
def landing(request):
# the landing view, containing the login and signup buttons
return render(request, 'interactWBL/landing.html')
# not a view, this is a a helper function which helps determine what type of user is authenticated
def determine_profile(request):
# return a tuple of booleans: (is_teacher, is_student, is_mentor)
if request.user.is_authenticated:
if request.user.is_superuser:
return (False, False, False)
else:
email = request.user.email
before_at, domain = email.split("@")
if domain == 'gla.ac.uk' or domain == 'research.gla.ac.uk':
return True, False, False
elif domain == 'student.gla.ac.uk':
return False, True, False
else:
return False, False, True
def dashboard(request):
# only logged in users can access this
if not request.user.is_authenticated:
return redirect('interactWBL:landing')
# now that we know that the user is logged in, determine its type (student/mentor/academic)
(is_teacher, is_student, is_mentor) = determine_profile(request)
context_dict = {}
context_dict['all_courses'] = Course.objects.order_by('name')
context_dict['is_academic'] = is_teacher
context_dict['is_student'] = is_student
context_dict['is_mentor'] = is_mentor
# if user is an academic, find out what courses they are teaching
active_academic_courses = None
if is_teacher:
active_academic = Academic.objects.filter(user=request.user)
active_academic_courses = Course.objects.filter(teacher=active_academic)
context_dict['courses'] = active_academic_courses
# if it is a student, then find out what courses they are taking to they can be displayed as well
elif is_student:
active_student = Student.objects.filter(user=request.user)
enrolments = Enrolment.objects.filter(student=active_student)
courses = []
for e in enrolments:
courses.append(e.course)
context_dict['courses'] = courses
elif is_mentor:
mentor = Mentor.objects.get(user = request.user)
students = list(Student.objects.filter(mentor=mentor))
context_dict['students'] = students
# finally, find out the students enrolled in the courses the academic is teaching
if active_academic_courses:
active_academic_enrolments = []
for c in active_academic_courses:
enrolments = list(Enrolment.objects.filter(course=c))
# we obtain a list of enrolments for each of the academics' courses; we then in turn add it to a list (of lists)
active_academic_enrolments.append(enrolments)
context_dict['enrolments'] = active_academic_enrolments
# we also want to know the number of logins of the students in the academic user's courses
students_of_active_academic = []
# we look at all the students enrolled in the academic's courses and we collect them in a list
for enrolment_list in active_academic_enrolments:
for enrolment in enrolment_list:
students_of_active_academic.append(enrolment.student)
# now we take that list and we transform it in a set, to remove duplicates
students_set= set(students_of_active_academic)
# find out how many reflections and submissions there are for the academic's courses
number_of_submissions = 0
number_of_reflections = 0
for course in active_academic_courses:
reflections = list(Reflection.objects.filter(course=course))
submissions = list(Submission.objects.filter(course=course))
number_of_submissions+=len(submissions)
number_of_reflections+=len(reflections)
# after finding out, add them to the dictionary to pass the values to the templates for display
context_dict['reflections_number'] = number_of_reflections
context_dict['submissions_number'] = number_of_submissions
# finally, for each student login in the DB, we see if the student belongs to the set of the academic's pupils
student_logins = list(StudentLogins.objects.all())
number_of_logins = 0
for login in student_logins:
if login.student in students_set:
number_of_logins+=1
context_dict["pupils_logins"] = number_of_logins
return render(request, 'interactWBL/dashboard.html', context=context_dict)
def my_courses(request):
(is_teacher, is_student, is_mentor) = determine_profile(request)
context_dict = {}
# if the user is a teacher, determine the courses they are teaching and later list them in the template
if is_teacher:
active_academic = Academic.objects.filter(user=request.user)
context_dict['courses'] = Course.objects.filter(teacher=active_academic)
# if it is a student, then find out what courses they are taking to they can be displayed as well
elif is_student:
active_student = Student.objects.filter(user=request.user)
enrolments = Enrolment.objects.filter(student=active_student)
courses = []
for e in enrolments:
courses.append(e.course)
context_dict['courses'] = courses
context_dict['is_academic'] = is_teacher
context_dict['is_student'] = is_student
context_dict['is_mentor'] = is_mentor
return render(request, 'interactWBL/my_courses.html', context_dict)
def show_course(request, course_name_slug):
try:
# check if a course with the name passed as a parameter from the urls engine actually exists
# then pick all enrollments associated with such course
course = Course.objects.get(slug=course_name_slug)
print(course_name_slug)
print(course)
enrolments = Enrolment.objects.filter(course=course)
# some debug code
# print(enrolments.__len__())
# for e in enrolments:
# print(e)
students = []
# then, for all these enrollment, put the enrolled students in a list and order them by last name
for e in enrolments:
students.append(e.student)
def sort_funct(s):
return s.user.last_name
# when displaying the students enrolled in the course, they will be listed alphabetically (by last name)
students.sort(key=sort_funct)
(is_teacher, is_student, is_mentor) = determine_profile(request)
context_dict = {}
# get the list of target competencies for the course
context_dict['competencies'] = list(CourseTarget.objects.filter(course=course))
context_dict['is_academic'] = is_teacher
context_dict['is_student'] = is_student
context_dict['is_mentor'] = is_mentor
context_dict['course'] = course
context_dict['students'] = students
# if the user is a teacher, determine the courses they are teaching and later list them in the template
if is_teacher:
active_academic = Academic.objects.get(user=request.user)
context_dict['active_academic'] = active_academic
context_dict['courses'] = Course.objects.filter(teacher=active_academic)
# if it is a student, then find out what courses they are taking to they can be displayed as well
elif is_student:
active_student = Student.objects.filter(user=request.user)
enrolments = Enrolment.objects.filter(student=active_student)
courses = []
for e in enrolments:
courses.append(e.course)
context_dict['courses'] = courses
except Course.DoesNotExist:
# get here if the specified course does not exist; do nothing
context_dict= {}
context_dict['course'] = None
context_dict['students'] = None
return render(request, 'interactWBL/course.html', context_dict)
@login_required()
def add_competency(request):
# check if the user is academic: if not, deny access
is_academic = determine_profile(request)[0]
if not is_academic:
return HttpResponse("You do not have access to this page")
# if the user is an academic and the request is a get, display the form to be filled in
form = CompetencyForm()
# if the academic user fills in the form and attempts a HTTP POST, process it
if request.method == 'POST':
form = CompetencyForm(request.POST)
if form.is_valid():
competency = form.save(commit=True)
return dashboard(request)
else:
print(form.errors)
context_dict = {}
context_dict['form'] = form
return render(request, 'interactWBL/add_competency.html', context_dict)
@login_required
def add_target(request, course_name_slug):
course = Course.objects.get(slug = course_name_slug)
is_academic = determine_profile(request)[0]
if not is_academic:
return HttpResponse("You do not have access to this page")
form = CourseTargetsForm()
if request.method == 'POST':
form = CourseTargetsForm(request.POST)
if form.is_valid():
course_target = form.save(commit=False)
course_target.course = course
course_target.save()
return show_course(request, course.slug)
else:
print(form.errors)
context_dict = {}
context_dict['course'] = course
context_dict['form'] = form
return render(request, 'interactWBL/add_competency_targets_to_course.html', context_dict)
@login_required
def enroll(request, course_name_slug):
course = Course.objects.get(slug=course_name_slug)
is_academic = determine_profile(request)[0]
if not is_academic:
return HttpResponse("You do not have access to this page")
form = EnrollmentForm()
if request.method == 'POST':
form = EnrollmentForm(request.POST)
if form.is_valid():
course_target = form.save(commit=False)
course_target.course = course
course_target.save()
return show_course(request, course.slug)
else:
print(form.erros)
context_dict = {}
context_dict['course'] = course
context_dict['form'] = form
return render(request, 'interactWBL/enroll.html', context_dict)
@login_required
def add_course(request):
# dermine if the user requesting this page has an academic profile (i.e. is an academic)
is_academic = False
academics = Academic.objects.all()
for a in academics:
if a.user == request.user:
active_academic = a
is_academic = True
# next, determine the courses the academic is already teaching (if any), to display them in the sidebar dropdown
if active_academic:
courses = Course.objects.filter(teacher=active_academic)
# unless the user is an academic or admmin, this form won't be rendered
if request.user.is_superuser or is_academic:
form = CourseForm()
# if http POST
if request.method == 'POST':
form = CourseForm(request.POST)
# if form data is valid, save the new course to the DB
if form.is_valid():
course = form.save(commit=False)
teacher = Academic.objects.get_or_create(user=request.user)[0]
course.teacher = teacher
course.save()
# what do after created course succesfully? redirect to course page/dashboard
return add_target(request,course.slug)
else:
print(form.errors)
return render(request, 'interactWBL/add_course.html',
{'form': form, 'is_academic': is_academic, 'courses': courses})
else:
return HttpResponse("You do not have permission to access this page.")
def about(request):
return render(request, 'interactWBL/about.html')
def competencies(request):
(is_teacher, is_student, is_mentor) = determine_profile(request)
context_dict = {}
competency_list = Competency.objects.order_by('name')
context_dict['competencies'] = competency_list
context_dict['is_academic'] = is_teacher
context_dict['is_student'] = is_student
context_dict['is_mentor'] = is_mentor
# if the user is an academic, find the courses they teach and pass them to the template to be displayed
if is_teacher:
active_academic = Academic.objects.filter(user=request.user)
context_dict['courses'] = Course.objects.filter(teacher=active_academic)
# if the user is a student, find the courses they are enrolled in, put them in a list and pass it to the template
elif is_student:
active_student = Student.objects.filter(user=request.user)
enrolments = Enrolment.objects.filter(student=active_student)
courses = []
for e in enrolments:
courses.append(e.course)
context_dict['courses'] = courses
elif is_mentor:
mentor = Mentor.objects.get(user=request.user)
students = list(Student.objects.filter(mentor=mentor))
context_dict['students'] = students
return render(request, 'interactWBL/competencies.html', context_dict)
# create a user profile (and associated student/mentor/academic) according to the user's registration email
# this makes the assumption that the student/academic users will register using their university accounts
# and only caters for University of Glasgow students and staff (though method could be generalised to any UK university)
@login_required
def register_profile(request):
email = request.user.email
user, domain = email.split("@")
student = False
if domain == 'gla.ac.uk' or domain == 'research.gla.ac.uk':
academic = Academic(user=request.user)
academic.save()
return redirect('interactWBL:dashboard')
elif domain == 'student.gla.ac.uk':
form = StudentForm()
student = True
else:
form = MentorForm
#
if request.method == 'POST':
if student == True:
form = StudentForm(request.POST)
if form.is_valid():
student_profile = form.save(commit=False)
student_profile.user = request.user
student_profile.save()
return redirect('interactWBL:dashboard')
else:
print(form.errors)
else:
form = MentorForm(request.POST)
if form.is_valid():
mentor_profile = form.save(commit=False)
mentor_profile.user = request.user
mentor_profile.save()
return redirect('interactWBL:dashboard')
context_dict = {'form': form}
return render(request, 'interactWBL/profile_registration.html', context_dict)
@login_required
def profile(request, username):
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
return redirect('dashboard') # here should add red message upon return to dashboard...
student = False
email = request.user.email
before_at, domain = email.split("@")
userprofile = None
form = None
if domain == 'gla.ac.uk' or domain == 'research.gla.ac.uk':
pass
elif domain == 'student.gla.ac.uk':
userprofile = Student.objects.get_or_create(user=user)[0]
form = StudentForm()
student = True
else:
form = MentorForm()
if (request.user.is_superuser):
userprofile = None
else:
userprofile = Mentor.objects.get_or_create(user=user)[0]
if request.method == 'POST':
if form:
if student:
form = StudentForm(request.POST, instance=userprofile)
if form.is_valid():
form.save(commit=True)
return redirect('interactWBL:dashboard')
else:
print(form.errors)
else:
form = MentorForm(request.POST, instance=userprofile)
if form.is_valid():
form.save(commit=True)
return redirect('interactWBL:dashboard')
else:
print(form.errors)
(is_teacher, is_student, is_mentor) = determine_profile(request)
context_dict = {}
context_dict['is_academic'] = is_teacher
context_dict['is_student'] = is_student
context_dict['is_mentor'] = is_mentor
context_dict['userprofile'] = userprofile
context_dict['selecteduser'] = user
context_dict['form'] = form
# if the user is an academic, find the courses they teach and pass them to the template to be displayed
if is_teacher:
active_academic = Academic.objects.filter(user=request.user)
active_academic_courses = Course.objects.filter(teacher=active_academic)
context_dict['courses'] = active_academic_courses
# now let's do some further querying to find & list the student's reflections
try:
reflections = []
student_being_viewed = Student.objects.get(user=user)
for course in active_academic_courses:
reflections += list(Reflection.objects.filter(student=student_being_viewed).filter(course=course))
context_dict['reflections'] = reflections
except Student.DoesNotExist:
pass
# if the user is a student, find the courses they are enrolled in, put them in a list and pass it to the template
elif is_student:
active_student = Student.objects.filter(user=request.user)
enrolments = Enrolment.objects.filter(student=active_student)
courses = []
for e in enrolments:
courses.append(e.course)
context_dict['courses'] = courses
return render(request, 'interactWBL/profile.html', context_dict)
def mentors(request):
context_dict = {}
(is_teacher, is_student, is_mentor) = determine_profile(request)
if is_student:
student = Student.objects.get(user=request.user)
context_dict['student_mentor'] = student.mentor
# boilerplate code for properly rendering the data in the sidebar - should be refactored into custom template tags
# to avoid this repetition
context_dict = {}
context_dict['mentors'] = Mentor.objects.all()
context_dict['is_academic'] = is_teacher
context_dict['is_student'] = is_student
context_dict['is_mentor'] = is_mentor
if is_teacher:
active_academic = Academic.objects.filter(user=request.user)
context_dict['courses'] = Course.objects.filter(teacher=active_academic)
# if the user is a student, find the courses they are enrolled in, put them in a list and pass it to the template
elif is_student:
active_student = Student.objects.filter(user=request.user)
enrolments = Enrolment.objects.filter(student=active_student)
courses = []
for e in enrolments:
courses.append(e.course)
context_dict['courses'] = courses
return render(request, 'interactWBL/mentors.html', context_dict)
def remove_target(request, target_id):
course = CourseTarget.objects.get(id=target_id).course
CourseTarget.objects.get(id = target_id).delete()
return show_course(request,course.slug)
def students(request):
context_dict = {}
(is_teacher, is_student, is_mentor) = determine_profile(request)
if is_teacher:
active_academic = Academic.objects.get(user=request.user)
active_academic_courses = active_academic.course_set
context_dict['courses'] = active_academic_courses
# if it is a student, then find out what courses they are taking to they can be displayed as well
elif is_mentor:
mentor = Mentor.objects.get(user=request.user)
students = list(Student.objects.filter(mentor=mentor))
context_dict['students'] = students
return render(request, 'interactWBL/students.html', context_dict) | {"/populate_students.py": ["/interactWBL/models.py"], "/interactWBL/views.py": ["/interactWBL/models.py", "/interactWBL/forms.py"], "/populate_academics_courses_assignments_submissions_reflections_persCompetencies.py": ["/interactWBL/models.py"], "/populate_all.py": ["/populate_academics_courses_assignments_submissions_reflections_persCompetencies.py", "/populate_mentors.py", "/populate_students.py", "/populate_with_competencies.py"], "/interactWBL/templatetags/interactwbl_template_tags.py": ["/interactWBL/models.py"], "/interactWBL/admin.py": ["/interactWBL/models.py"], "/populate_with_competencies.py": ["/interactWBL/models.py"], "/populate_mentors.py": ["/interactWBL/models.py"], "/interactWBL/forms.py": ["/interactWBL/models.py"], "/devproj/urls.py": ["/interactWBL/models.py"]} |
43,522 | bhilosobher/interactWBL | refs/heads/master | /populate_academics_courses_assignments_submissions_reflections_persCompetencies.py | import os
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'devproj.settings')
import django,names,random
django.setup()
from interactWBL.models import User, Academic, Course, Competency, CourseTarget, Student, Enrolment, Assignment, Submission, Reflection, PersonalCompetency, CompetencyEndorsement
def populate():
# have existing competencies ready in a list and calculate how many there are
competencies_list = list(Competency.objects.all())
comps_no = len(competencies_list)
# academic first names
academics_names = ['Mechelle', 'Evelia', 'Sylvie', 'Stephania', 'Octavio', 'Shona', 'Spencer', 'Natisha', 'Guadalupe',
'Lea', ]
# course names for courses to be created
course_names = ['Programming', 'Cyber Security Fundamentals', 'Systems and Networks', 'Enterprise Cyber Security',
'Database Theory and Application', 'Software Engineering', 'Software Project Managements',
'Internet Technology', 'Advanced Programming', 'Algorithms and Data Structures',]
# collect existing students in a list and determine their number
students_list = list(Student.objects.all())
studs_no = len(students_list)
print("there are " + str(studs_no) + " students")
# create 10 users, then associate 10 academics with the 10 users & create 10 courses, 1 for taught by each academic
i = 0
for n in academics_names:
# create a user with name from the list above
u = User.objects.get_or_create(username=n.lower(), password='')[0]
u.set_password('securepassword')
u.save()
# then associate an academic profile with that user
a = Academic.objects.get_or_create(user=u)[0]
u.first_name = academics_names[i]
u.last_name = names.get_last_name()
u.email = academics_names[i].lower() + '.'+ u.last_name.lower()+'@gla.ac.uk'
u.save()
# after the academic profile is fully set up, create a course taught by that academic
c = Course.objects.get_or_create(teacher=a, year = random.randint(1,5), name=course_names[i],
lecture_recordings="http://www.echo360.com",missed_lecture_procedure="The exact procedure will be announced later, but please make sure to message me if you have any pressing doubts at the moment.",
ILOs="The course aims to provide students with mathematical/quantitative skills and knowledge that consititute the foundation for techniques and instruments in both microeconomic theory and intertemporal macroeconomics (such as multivariate calculus and integration, constrained optimisation, differential equations, dynamic programming methods, functional analysis), and to demonstrate various mathematical techniques are applied to economic problems.",
moodle='https://moodle.org/',
description="The aim of this course is to provide students with a comprehensive overview of web application development. It will provide students with the skills to design and develop distributed web applications in a disciplined manner, using a range of tools and technologies. It will also strengthen their understanding of the context and rationale of distributed systems.")[0]
i += 1 # this is so we iterate through academic and course names at the same time
"""
this next sections deals with randomly assigning competency objectives to the created courses
We select a competency at random from the list and add about 8-9 targets to the most recent course
"""
for j in range (1,10):
t = CourseTarget.objects.get_or_create(course=c,competency=competencies_list[random.randint(0,comps_no-1)])[0]
"""
this next sections deals with enrolling students in courses (according to the year of the course)
"""
course_list = list(Course.objects.all())
for course in course_list:
print(course)
print("now enrolling:")
for s in students_list:
if s.year == course.year:
e = Enrolment.objects.get_or_create(course=course, student=s)[0]
e.save()
print(e)
else:
pass
print("Generating assignments...")
print("Generating submissions...")
print("Generating reflections and personal competencies...")
for course in course_list[0:6]:
# for half of the courses determine the students enrolled
enrollments = list(Enrolment.objects.filter(course= course))
students_in_course =[]
# for each course, create an assignment
assignment = Assignment.objects.get_or_create(name=course.name + " project", course=course,
description="Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.",)[0]
# add associate 4 competencies with this assignment
for i in range(1, 5):
random_competency = competencies_list[random.randint(0, comps_no - 1)]
assignment.competencies.add(random_competency)
# from the list of enrollments, obtain the list of all students enrolled in this course
for e in enrollments:
students_in_course.append(e.student)
#for each student in the course, create a weekly reflection by that student
for student in students_in_course:
weekly_reflection = Reflection.objects.get_or_create(student=student, course=course,type='weekly',assignment=assignment,
content="Diam sit amet nisl suscipit adipiscing bibendum est ultricies. In pellentesque massa placerat duis ultricies lacus. Cursus mattis molestie a iaculis at erat. Libero enim sed faucibus turpis in eu mi bibendum. Purus in massa tempor nec feugiat nisl pretium fusce id. Sit amet purus gravida quis. Nisl pretium fusce id velit ut tortor. Eu consequat ac felis donec et odio pellentesque. Diam sollicitudin tempor id eu nisl nunc mi ipsum. Faucibus interdum posuere lorem ipsum. Vitae nunc sed velit dignissim sodales ut. Aenean pharetra magna ac placerat vestibulum lectus. Dictum non consectetur a erat nam at lectus urna. Nunc sed velit dignissim sodales. Eget nunc lobortis mattis aliquam faucibus purus in massa tempor. Fames ac turpis egestas integer eget aliquet nibh. Sit amet massa vitae tortor. Potenti nullam ac tortor vitae. Odio aenean sed adipiscing diam. In cursus turpis massa tincidunt dui ut ornare.")[0]
# print info to show to the user while they are waiting for pop script to complete:
print("created assignment: "+assignment.__str__())
# for each weekly reflection associated with that assignment, associate some competencies with them
for i in range(1,4):
random_competency = competencies_list[random.randint(0,comps_no-1)]
weekly_reflection.competencies.add(random_competency)
# also create a PersonalCompetency object to track the student's progress with the competencies targeted by the assignment
personal_competency = PersonalCompetency.objects.get_or_create(student=student, progress='1', competency=random_competency)[0]
personal_competency.assignments.add(assignment)
# for each assignment, create a submission
submission = Submission.objects.get_or_create(student=student, assignment=assignment, course=course)[0]
submission.file.name='codesubmission.txt'
submission.save()
print(submission)
print('Printing created academics' + '\n')
for a in Academic.objects.all():
print(a.__str__() + '\n')
print('Printing created courses' + '\n')
for c in Course.objects.all():
print(c.__str__() + '\n')
if __name__ == '__main__':
print('Starting academic, course and course targets population script:' + '\n')
populate()
| {"/populate_students.py": ["/interactWBL/models.py"], "/interactWBL/views.py": ["/interactWBL/models.py", "/interactWBL/forms.py"], "/populate_academics_courses_assignments_submissions_reflections_persCompetencies.py": ["/interactWBL/models.py"], "/populate_all.py": ["/populate_academics_courses_assignments_submissions_reflections_persCompetencies.py", "/populate_mentors.py", "/populate_students.py", "/populate_with_competencies.py"], "/interactWBL/templatetags/interactwbl_template_tags.py": ["/interactWBL/models.py"], "/interactWBL/admin.py": ["/interactWBL/models.py"], "/populate_with_competencies.py": ["/interactWBL/models.py"], "/populate_mentors.py": ["/interactWBL/models.py"], "/interactWBL/forms.py": ["/interactWBL/models.py"], "/devproj/urls.py": ["/interactWBL/models.py"]} |
43,523 | bhilosobher/interactWBL | refs/heads/master | /populate_all.py | import populate_academics_courses_assignments_submissions_reflections_persCompetencies,populate_mentors,populate_students,populate_with_competencies
# a script that collects together and runs all other population scripts
# the order in which these smaller scripts are run is important, because of Model dependencies
populate_with_competencies.populate() # add the default competencies
populate_mentors.populate() # create a bunch of companies and then some mentors working for each
populate_students.populate() # create students (and corresponding users); associate them with mentors
# next, create users; associate them with academics; associate academics with courses; create course targets; enroll
# students in courses, create an assignment for each course and associate it with each student taking it, further
# create a submission for that assignment, and reflections & personal competencies associated with the assignment
# a personal competency is an object that refers to one of the competencies defined in the DB and tracks a certain
# student's progress towards achieving that competency
populate_academics_courses_assignments_submissions_reflections_persCompetencies.populate()
| {"/populate_students.py": ["/interactWBL/models.py"], "/interactWBL/views.py": ["/interactWBL/models.py", "/interactWBL/forms.py"], "/populate_academics_courses_assignments_submissions_reflections_persCompetencies.py": ["/interactWBL/models.py"], "/populate_all.py": ["/populate_academics_courses_assignments_submissions_reflections_persCompetencies.py", "/populate_mentors.py", "/populate_students.py", "/populate_with_competencies.py"], "/interactWBL/templatetags/interactwbl_template_tags.py": ["/interactWBL/models.py"], "/interactWBL/admin.py": ["/interactWBL/models.py"], "/populate_with_competencies.py": ["/interactWBL/models.py"], "/populate_mentors.py": ["/interactWBL/models.py"], "/interactWBL/forms.py": ["/interactWBL/models.py"], "/devproj/urls.py": ["/interactWBL/models.py"]} |
43,524 | bhilosobher/interactWBL | refs/heads/master | /interactWBL/templatetags/interactwbl_template_tags.py | from django import template
from interactWBL.models import Course, Student, Academic, Enrolment, Mentor
register = template.Library()
# check what type of user is viewing the section generated by these custom template tags, then act accordingly...
@register.inclusion_tag('interactWBL/courses.html')
def get_courses(user):
try:
student = Student.objects.get(user=user)
except Student.DoesNotExist:
student = False
try:
academic = Academic.objects.get(user=user)
except Academic.DoesNotExist:
academic = False
try:
mentor = Mentor.objects.get(user=user)
except Mentor.DoesNotExist:
mentor = False
# if the user is a student, then we will show the courses the student user is enrolled in
if student:
enrolments = list(Enrolment.objects.filter(student=student))
courses = []
for e in enrolments:
courses.append(e.course)
return {'courses': courses}
# if the user is an academic, then we will show the courses they are teaching
elif academic:
courses = Course.objects.filter(teacher=academic)
return {'courses': courses}
# if the user is a mentor, then we will try to show the courses in which they their *mentorees* are enrolled in...
# difficult to query for this data due to efficient (but unfriendly) DB design...
elif mentor:
# first, collect all students mentored by the relevant mentor
students = Student.objects.filter(mentor=mentor)
enrolments = []
# the tricky part: for each student, collect their enrollments in a list and while also concatenating the lists
for student in students:
enrolments = enrolments + list(Enrolment.objects.filter(student=student))
courses_with_duplicates = []
# translate the list of enrolments into a list of courses, which will have duplicates (if more than one of
# the mentor's students is enrolled in any one course
for enrolment in enrolments:
courses_with_duplicates.append(enrolment.course)
# crucial step: by converting the list into a set, eliminate duplicates and finally determine the needed courses
distinct_courses = set(courses_with_duplicates)
return {'courses': distinct_courses}
else:
return {}
| {"/populate_students.py": ["/interactWBL/models.py"], "/interactWBL/views.py": ["/interactWBL/models.py", "/interactWBL/forms.py"], "/populate_academics_courses_assignments_submissions_reflections_persCompetencies.py": ["/interactWBL/models.py"], "/populate_all.py": ["/populate_academics_courses_assignments_submissions_reflections_persCompetencies.py", "/populate_mentors.py", "/populate_students.py", "/populate_with_competencies.py"], "/interactWBL/templatetags/interactwbl_template_tags.py": ["/interactWBL/models.py"], "/interactWBL/admin.py": ["/interactWBL/models.py"], "/populate_with_competencies.py": ["/interactWBL/models.py"], "/populate_mentors.py": ["/interactWBL/models.py"], "/interactWBL/forms.py": ["/interactWBL/models.py"], "/devproj/urls.py": ["/interactWBL/models.py"]} |
43,525 | bhilosobher/interactWBL | refs/heads/master | /interactWBL/admin.py | from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
# Register your models here.
from .models import Student, Mentor, Academic, Course, Competency, Company, Enrolment, Assignment,\
PersonalCompetency, CompetencyEndorsement, CourseTarget, Reflection, Submission, Appraisal, StudentLogins,\
MentorLogins, ReflectionTopic
class CompetencyAdmin(admin.ModelAdmin):
list_display = ('name','description','type')
class MentorAdmin(admin.ModelAdmin):
list_display = ('user','company',)
class StudentAdmin(admin.ModelAdmin):
list_display = ('user','mentor')
class CourseAdmin(admin.ModelAdmin):
prepopulated_fields = {'slug':('name',)}
admin.site.register(ReflectionTopic)
admin.site.register(Student, StudentAdmin)
admin.site.register(Academic)
admin.site.register(Mentor, MentorAdmin)
admin.site.register(Course, CourseAdmin)
admin.site.register(Company)
admin.site.register(Enrolment)
admin.site.register(Assignment)
admin.site.register(PersonalCompetency)
admin.site.register(CourseTarget)
admin.site.register(CompetencyEndorsement)
admin.site.register(Reflection)
admin.site.register(Submission)
admin.site.register(Appraisal)
admin.site.register(Competency, CompetencyAdmin)
admin.site.register(StudentLogins)
admin.site.register(MentorLogins) | {"/populate_students.py": ["/interactWBL/models.py"], "/interactWBL/views.py": ["/interactWBL/models.py", "/interactWBL/forms.py"], "/populate_academics_courses_assignments_submissions_reflections_persCompetencies.py": ["/interactWBL/models.py"], "/populate_all.py": ["/populate_academics_courses_assignments_submissions_reflections_persCompetencies.py", "/populate_mentors.py", "/populate_students.py", "/populate_with_competencies.py"], "/interactWBL/templatetags/interactwbl_template_tags.py": ["/interactWBL/models.py"], "/interactWBL/admin.py": ["/interactWBL/models.py"], "/populate_with_competencies.py": ["/interactWBL/models.py"], "/populate_mentors.py": ["/interactWBL/models.py"], "/interactWBL/forms.py": ["/interactWBL/models.py"], "/devproj/urls.py": ["/interactWBL/models.py"]} |
43,526 | bhilosobher/interactWBL | refs/heads/master | /populate_with_competencies.py | import os
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'devproj.settings')
import django
django.setup()
from interactWBL.models import Competency
def populate():
# technical skills
analytical_technical_capability = ('Project management', 'Requirements analysis', 'Software design', 'Programming',
'Validation and verification tests', 'Configuration management', 'Quality',
'Tests', 'Documentation', 'Maintenance',)
use_of_technology = ("Evaluation and selection of tools to support problem areas",
"Adaptation and use of tools to support problem areas",)
# social skills
interpersonal = ('Communication', 'Adaptability', 'Aptitude to relate', 'Sociability',
'Interpersonal sensibility',)
cooperation = ('Understanding of the dynamics of debates and the follow-up of an agenda', 'Desire to contribute',
'Leadership', 'Motivation', 'Decision making to allow different opinions',
'The skill of presenting ideas and listening to the ideas of others', 'Orientation to achievement',)
conflict_handling = ('Effective handling of the emotions', 'Aptitude to listen to others',
'Resolution of conflicts',
'Negotiating skills', 'Judgement, common sense and realism', 'Empathy',)
# personal skills
development_in_job = ('Capability to learn alone', 'Capability to search information', 'Capability to take risks',
'Flexibility', 'Verbal reasoning', 'Stress resistance', 'Pro-activeness', 'Responsibility',)
personal_development = ('Identify areas of personal opportunity', 'Define a project and establish a personal goal',
'Determine priorities and refine the goals',
'Identify and evaluate available and required resources',
'Balance necessary resources to satisfy multiple goals',
'Monitor the progress, to make adjustments during the project development',
'Learn from past actions to project future results', 'High self-esteem',
'Entrepreneurial skill',
'Commitment', 'Self-control', 'Optimism',)
rights_limits = ('Ability to understand own interest and needs',
'Know the rules and written principles to identify limits',
'Ability to argue for own rights', 'Ability to suggest arrangements or alternative solutions',)
technicals = {analytical_technical_capability: 'Possess analytical and learning capability in this technical area',
use_of_technology: 'Masters the use of technology thus', }
socials = {interpersonal: 'Ability in handling interpersonal relations',
cooperation: 'Cooperates and works well in a team',
conflict_handling: 'Has the ability to handle and solve conflicts by exhibiting this competency', }
personals = {
development_in_job: 'The student possesses the ability to adapt and excel in the work environment through this competency',
personal_development: 'The students has this competency, which enables them to develop and grow',
rights_limits: 'The student is aware of their rights and limits in any given context and adapts to these'}
competencies = {'technical': technicals ,'social': socials, 'personal':personals}
add_competency(competencies)
for c in Competency.objects.all():
print(c.__str__() + "\n")
def add_competency(competencies):
for competency_type, competency_list in competencies.items():
for competency_group, competency_description in competency_list.items():
for competency_name in competency_group:
c = Competency.objects.get_or_create(name=competency_name, description=competency_description,
type=competency_type)[0]
#start execution here
if __name__ == '__main__':
print("Starting competencies population script...")
populate() | {"/populate_students.py": ["/interactWBL/models.py"], "/interactWBL/views.py": ["/interactWBL/models.py", "/interactWBL/forms.py"], "/populate_academics_courses_assignments_submissions_reflections_persCompetencies.py": ["/interactWBL/models.py"], "/populate_all.py": ["/populate_academics_courses_assignments_submissions_reflections_persCompetencies.py", "/populate_mentors.py", "/populate_students.py", "/populate_with_competencies.py"], "/interactWBL/templatetags/interactwbl_template_tags.py": ["/interactWBL/models.py"], "/interactWBL/admin.py": ["/interactWBL/models.py"], "/populate_with_competencies.py": ["/interactWBL/models.py"], "/populate_mentors.py": ["/interactWBL/models.py"], "/interactWBL/forms.py": ["/interactWBL/models.py"], "/devproj/urls.py": ["/interactWBL/models.py"]} |
43,527 | bhilosobher/interactWBL | refs/heads/master | /populate_mentors.py | import os
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'devproj.settings')
import django, names
django.setup()
from interactWBL.models import Company, User, Mentor
def populate():
# a list with 10 fictitious company names
companies=[]
company_names = ['Morgan', 'JP', 'Macrosoft', 'Poodle', 'SAR', 'Samsong', 'Parclays', 'HRBC', 'BM', 'PristineMedia', ]
for n in company_names:
c = Company.objects.get_or_create(name=n)[0]
companies.append(c)
# a list of 50 names
mentor_names = ['James', 'George', 'Dean', 'Arran', 'Claudius', 'John', 'Tom', 'Thomas', 'Charles', 'Paul',
'David', 'Dave', 'Chris', 'Derek', 'Sam', 'Jacob', 'Bilbo', 'Gabriel', 'Fred', 'Joe',
'Joseph', 'Euan', 'Martin', 'Declan', 'Arthur', 'Daisy', 'Margaret', 'Mary', 'Tina', 'Georgia',
'Rachel', 'Chloe', 'Stella', 'Jeanne', 'Jane', 'Immogen', 'Victoria', 'Adele', 'Martha', 'Melissa',
'Paula', 'Andrea', 'Ninon', 'Susan', 'Bertha', 'Margery', 'Holy', 'Ruby', 'Olivia', 'Julia', ]
mentors = []
i = 0
while i < 50:
for c in companies:
u = User.objects.get_or_create(username=mentor_names[i].lower(), password='securepassword')[0]
u.set_password('securepassword')
u.save()
m = Mentor.objects.get_or_create(company=c, user=u)
u.first_name=mentor_names[i]
u.last_name=names.get_last_name()
u.email=mentor_names[i]+'.'+u.last_name.lower()+'@'+ c.name.lower()+'.com'
u.save()
print(u.email+'\n')
i+=1
mentors.append(m)
if __name__=='__main__':
print("Starting mentors population script...")
populate() | {"/populate_students.py": ["/interactWBL/models.py"], "/interactWBL/views.py": ["/interactWBL/models.py", "/interactWBL/forms.py"], "/populate_academics_courses_assignments_submissions_reflections_persCompetencies.py": ["/interactWBL/models.py"], "/populate_all.py": ["/populate_academics_courses_assignments_submissions_reflections_persCompetencies.py", "/populate_mentors.py", "/populate_students.py", "/populate_with_competencies.py"], "/interactWBL/templatetags/interactwbl_template_tags.py": ["/interactWBL/models.py"], "/interactWBL/admin.py": ["/interactWBL/models.py"], "/populate_with_competencies.py": ["/interactWBL/models.py"], "/populate_mentors.py": ["/interactWBL/models.py"], "/interactWBL/forms.py": ["/interactWBL/models.py"], "/devproj/urls.py": ["/interactWBL/models.py"]} |
43,528 | bhilosobher/interactWBL | refs/heads/master | /interactWBL/forms.py | from django import forms
from interactWBL.models import Course, Competency, PersonalCompetency, Mentor, Student, Company, CourseTarget, Enrolment
from django.contrib.auth.forms import UserCreationForm
from django.contrib.auth.models import User
# these are form models which handle creating and saving a model into the database (by users)
# SignUpForm taken from https://simpleisbetterthancomplex.com/tutorial/2017/02/18/how-to-create-user-sign-up-view.html
class SignUpForm(UserCreationForm):
first_name = forms.CharField(max_length=30, required=True, help_text='')
last_name = forms.CharField(max_length=30, required=True, help_text='')
email = forms.EmailField(max_length=254, help_text='Required. Please provide your university or company email.')
class Meta:
model = User
fields = ('username', 'first_name', 'last_name', 'email', 'password1', 'password2', )
class CompetencyForm(forms.ModelForm):
COMPETENCY_TYPE_CHOICES = (
('technical', 'Technical competency'),
('social', 'Social competency'),
('personal', 'Personal competency')
)
name = forms.CharField(max_length=128, help_text="Please enter the name for your new competency")
description = forms.Textarea()
type = forms.ChoiceField( choices=COMPETENCY_TYPE_CHOICES, help_text="Please select the new competency's type")
class Meta:
help_texts = {'description': 'Please define and describe the competency',}
model = Competency
exclude = ()
class CourseTargetsForm(forms.ModelForm):
competency = forms.ModelChoiceField(queryset=Competency.objects.all(),
help_text="Please select a competency for the course to target")
class Meta:
model = CourseTarget
exclude = ('course',)
class EnrollmentForm(forms.ModelForm):
student = forms.ModelChoiceField(queryset=Student.objects.all(), help_text="Please select a student to enroll in course")
class Meta:
model = Enrolment
exclude = ('course',)
class StudentForm(forms.ModelForm):
YEAR_CHOICES = (
('1', 'first year'),
('2', 'second year'),
('3', 'third year'),
('4', 'fourth year'),
('5', 'postgraduate')
)
year = forms.ChoiceField(choices=YEAR_CHOICES, initial=1, help_text="Please enter your current year group")
# select the mentor if known in advance
mentor = forms.ModelChoiceField(queryset=Mentor.objects.all(), required=False,
help_text="Please select your mentor, if known in advance ")
class Meta:
model = Student
exclude = ('user',)
class MentorForm(forms.ModelForm):
company = forms.ModelChoiceField(queryset=Company.objects.all())
class Meta:
model = Mentor
exclude = ('user','competency_in_focus')
class CourseForm(forms.ModelForm):
YEAR_CHOICES = (
('1', 'first year'),
('2', 'second year'),
('3', 'third year'),
('4', 'fourth year'),
('5', 'postgraduate')
)
name = forms.CharField(max_length=129, help_text="Please enter the course's name")
year = forms.ChoiceField(choices=YEAR_CHOICES,initial=1,help_text="Please enter the year group for the course")
moodle = forms.URLField(max_length=200,
help_text="Please enter link to course page in existing learning environment")
description = forms.Textarea()
ILOs = forms.Textarea()
missed_lecture_procedure = forms.Textarea()
lecture_recordings = forms.URLField(required=False, max_length=300, help_text="Please enter the link to the video "
"recordings resource")
slug = forms.CharField(widget=forms.HiddenInput(), required=False)
def clean(self):
cleaned_data = self.cleaned_data
moodle = cleaned_data.get('moodle')
if moodle and not (moodle.startswith('https://') or moodle.startswith('http://')):
moodle ='https'+ moodle
cleaned_data['moodle'] = moodle
return cleaned_data
class Meta:
help_texts = {'description': 'Please enter course description',
'missed_lecture_procedure': 'Please specify the missed lectures procedure',
'ILOs': 'Please list the intended learning outcomes of the course',}
model = Course
exclude = ('teacher',) | {"/populate_students.py": ["/interactWBL/models.py"], "/interactWBL/views.py": ["/interactWBL/models.py", "/interactWBL/forms.py"], "/populate_academics_courses_assignments_submissions_reflections_persCompetencies.py": ["/interactWBL/models.py"], "/populate_all.py": ["/populate_academics_courses_assignments_submissions_reflections_persCompetencies.py", "/populate_mentors.py", "/populate_students.py", "/populate_with_competencies.py"], "/interactWBL/templatetags/interactwbl_template_tags.py": ["/interactWBL/models.py"], "/interactWBL/admin.py": ["/interactWBL/models.py"], "/populate_with_competencies.py": ["/interactWBL/models.py"], "/populate_mentors.py": ["/interactWBL/models.py"], "/interactWBL/forms.py": ["/interactWBL/models.py"], "/devproj/urls.py": ["/interactWBL/models.py"]} |
43,529 | bhilosobher/interactWBL | refs/heads/master | /devproj/urls.py | """devproj URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.contrib import admin
from django.conf.urls import include
from django.http import HttpResponseRedirect
from django.contrib.auth import login
from interactWBL import views
from interactWBL.models import Student, Mentor, StudentLogins, MentorLogins
from django.conf import settings
from django.conf.urls.static import static
from registration.backends.simple.views import RegistrationView
from django.core.urlresolvers import reverse
from django.views.generic import RedirectView
from django.contrib.auth.views import LoginView
""" custom view class that was used during development and is now abandoned here
class MyRegistrationView(RegistrationView):
def get_success_url(self, user):
return reverse('interactWBL:register_profile')
def registration_allowed(self):
if self.request.user.is_authenticated:
return False
else:
return True
"""
class MyLoginView(LoginView):
def form_valid(self, form):
login(self.request, form.get_user())
if self.request.user.is_authenticated:
user = self.request.user
try:
logged_student = Student.objects.get(user=user)
StudentLogins.objects.create(student=logged_student)
print("One student login logged!")
except Student.DoesNotExist:
print("logged in user is not a student")
pass
try:
logged_mentor = Mentor.objects.get(user=user)
MentorLogins.objects.create(mentor=logged_mentor)
print("One mentor login logged!")
except Mentor.DoesNotExist:
print("logged in user is not a mentor!")
pass
return HttpResponseRedirect(self.get_success_url())
redirect_authenticated_user = True
urlpatterns = [
url(r'^$', views.landing, name='landing'),
url(r'^interactWBL/', include('interactWBL.urls')), # mapping the WBL app urls to the project urls w/ 'include'function
url(r'^admin/', admin.site.urls),
url(r'^accounts/register/$', views.signup, name='registration_register'),
# url(r'^accounts/register/$', MyRegistrationView.as_view(), name='registration_register'),
url(r'^accounts/login/$', MyLoginView.as_view(), name='auth_login'),
url(r'^accounts/', include('registration.backends.simple.urls')),
# favicon to be displayed in browser
url(r'^favicon\.ico$',RedirectView.as_view(url='/static/images/favicon.ico')),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| {"/populate_students.py": ["/interactWBL/models.py"], "/interactWBL/views.py": ["/interactWBL/models.py", "/interactWBL/forms.py"], "/populate_academics_courses_assignments_submissions_reflections_persCompetencies.py": ["/interactWBL/models.py"], "/populate_all.py": ["/populate_academics_courses_assignments_submissions_reflections_persCompetencies.py", "/populate_mentors.py", "/populate_students.py", "/populate_with_competencies.py"], "/interactWBL/templatetags/interactwbl_template_tags.py": ["/interactWBL/models.py"], "/interactWBL/admin.py": ["/interactWBL/models.py"], "/populate_with_competencies.py": ["/interactWBL/models.py"], "/populate_mentors.py": ["/interactWBL/models.py"], "/interactWBL/forms.py": ["/interactWBL/models.py"], "/devproj/urls.py": ["/interactWBL/models.py"]} |
43,530 | bhilosobher/interactWBL | refs/heads/master | /interactWBL/urls.py | from django.conf.urls import url
from interactWBL import views
app_name = 'interactWBL'
# this is the app-specific urls file, which defines how each address is to be mapped to a view function
urlpatterns = [
url(r'^$', views.landing, name='landing'),
url(r'^dashboard/$', views.dashboard, name='dashboard'),
url(r'^about/$', views.about, name='about'),
url(r'^competencies/dasadd_competency/$', views.add_competency, name='add_competency'),
url(r'^add_course/$', views.add_course, name='add_course'),
url(r'^course/(?P<course_name_slug>[\w\-]+)/$', views.show_course, name='show_course'),
url(r'^course/$', views.my_courses, name='my_courses'),
url(r'^dashboard/competencies/$', views.competencies, name='competencies'),
url(r'^register_profile/$', views.register_profile, name='register_profile'),
url(r'^profile/(?P<username>[\w\-]+)/$', views.profile, name='profile'),
url(r'^dashboard/mentors/$', views.mentors, name='mentors'),
url(r'^remove_target/(?P<target_id>[\w]+)/$', views.remove_target, name='remove_target'),
url(r'^course/(?P<course_name_slug>[\w\-]+)/add_target_to_course/$', views.add_target, name='add_competency_targets_to_course'),
url(r'^course/(?P<course_name_slug>[\w\-]+)/enroll/$', views.enroll, name='enroll'),
]
| {"/populate_students.py": ["/interactWBL/models.py"], "/interactWBL/views.py": ["/interactWBL/models.py", "/interactWBL/forms.py"], "/populate_academics_courses_assignments_submissions_reflections_persCompetencies.py": ["/interactWBL/models.py"], "/populate_all.py": ["/populate_academics_courses_assignments_submissions_reflections_persCompetencies.py", "/populate_mentors.py", "/populate_students.py", "/populate_with_competencies.py"], "/interactWBL/templatetags/interactwbl_template_tags.py": ["/interactWBL/models.py"], "/interactWBL/admin.py": ["/interactWBL/models.py"], "/populate_with_competencies.py": ["/interactWBL/models.py"], "/populate_mentors.py": ["/interactWBL/models.py"], "/interactWBL/forms.py": ["/interactWBL/models.py"], "/devproj/urls.py": ["/interactWBL/models.py"]} |
43,531 | bhilosobher/interactWBL | refs/heads/master | /interactWBL/models.py | from django.contrib.auth.models import User
from django.db import models
from django.template.defaultfilters import slugify
from datetime import timedelta, timezone
class Competency(models.Model):
COMPETENCY_TYPE_CHOICES = (
('technical', 'Technical competency'),
('social', 'Social competency'),
('personal', 'Personal competency')
)
name = models.CharField(max_length=128, unique=True)
description = models.TextField()
type = models.CharField(choices=COMPETENCY_TYPE_CHOICES, default='technical', max_length=16)
class Meta:
verbose_name_plural = "competencies"
def __str__(self):
return self.name
class Company(models.Model):
name = models.CharField(max_length=128)
class Meta:
verbose_name_plural = "companies"
def __str__(self):
return self.name
class Mentor(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE, primary_key=True)
company = models.ForeignKey(Company)
def __str__(self):
return self.user.username
class Student(models.Model):
YEAR_CHOICES = (
('1', 'first year'),
('2', 'second year'),
('3', 'third year'),
('4', 'fourth year'),
('5', 'postgraduate')
)
user = models.OneToOneField(User, on_delete=models.CASCADE, primary_key=True)
mentor = models.ForeignKey(Mentor, on_delete=models.SET_NULL, null=True)
year = models.CharField(default=1,choices=YEAR_CHOICES,max_length=8)
competencies_in_focus = models.ManyToManyField(Competency, blank=True)
def __str__(self):
return self.user.username
class Academic(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE, primary_key=True)
def __str__(self):
return self.user.username
class StudentLogins (models.Model):
student = models.ForeignKey(Student)
date = models.DateTimeField(auto_now_add=True)
class Meta:
verbose_name_plural = "Student logins"
def __str__(self):
return str(self.student) + ':' + str(self.date)
class MentorLogins(models.Model):
mentor = models.ForeignKey(Mentor)
date = models.DateTimeField(auto_now_add=True)
class Meta:
verbose_name_plural = "Mentor logins"
def __str__(self):
return str(self.mentor) + ':' + str(self.date)
class Course (models.Model):
YEAR_CHOICES = (
('0', 'mixed year'),
('1', 'first year'),
('2', 'second year'),
('3', 'third year'),
('4', 'fourth year'),
('5', 'postgraduate')
)
name = models.CharField(max_length=128)
teacher = models.ForeignKey(Academic, blank=True, on_delete=models.SET_NULL, null=True)
description = models.TextField(blank=True)
year = models.CharField(blank=True,choices=YEAR_CHOICES, null=True,max_length=8)
moodle = models.URLField(blank=True,null=True,max_length=200)
ILOs = models.TextField(blank=True,null=True)
missed_lecture_procedure = models.TextField(blank=True,null=True)
lecture_recordings = models.URLField(blank=True,null=True,max_length=300)
slug = models.SlugField()
def save(self,*args,**kwargs):
i = 0
courses = Course.objects.all()
for c in courses:
if c.name == self.name:
i+=1
if i == 0:
self.slug = slugify(self.name)
else:
self.slug = slugify(self.name) + i.__str__()
super(Course,self).save(*args,**kwargs)
def __str__(self):
return self.name
class Enrolment(models.Model):
course = models.ForeignKey(Course, on_delete=models.CASCADE)
student = models.ForeignKey(Student, on_delete=models.CASCADE)
def __str__(self):
return self.student.user.username + ' in ' + self.course.name
# assigment.competency.add(c)
class Assignment(models.Model):
name = models.CharField(max_length=128)
course = models.ForeignKey(Course, on_delete=models.CASCADE, blank=True, null=True)
description = models.TextField()
deadline = models.DateField(auto_now_add=True)
mentor = models.ForeignKey(Mentor, blank=True, null=True)
# the assignment model refers to the student field because it was envisaged that the students might be given
# individual assignments by their mentors, through the system (as an extra feature)
student = models.ForeignKey(Student, blank=True, null=True)
competencies = models.ManyToManyField(Competency)
def __str__(self):
return self.name + " assignment"
class ReflectionTopic(models.Model):
topic = models.TextField()
student = models.ForeignKey(Student, null=True, blank=True)
course = models.ForeignKey(Course, null =True, blank= True)
academic = models.ForeignKey(Academic, null=True, blank=True)
mentor = models.ForeignKey(Mentor, null= True, blank=True)
def __str__(self):
return "reflection on "+self.topic
class Reflection(models.Model):
REFLECTION_TYPE_CHOICES = (
('quick', 'Quick Reflection'),
('weekly', 'Weekly Reflection'),
)
topic = models.ForeignKey(ReflectionTopic, null=True, blank=True)
student = models.ForeignKey(Student, on_delete=models.CASCADE)
course = models.ForeignKey(Course, on_delete=models.CASCADE, blank=True,null=True)
type = models.CharField(choices=REFLECTION_TYPE_CHOICES, max_length=32)
date = models.DateField(auto_now_add=True)
content = models.TextField()
competencies = models.ManyToManyField(Competency)
assignment = models.ForeignKey(Assignment, blank=True, null=True)
def __str__(self):
return "reflection on the " + self.course.name + " course"
class CourseTarget(models.Model):
course = models.ForeignKey(Course, on_delete=models.CASCADE)
competency = models.ForeignKey(Competency, on_delete=models.CASCADE)
class Meta:
verbose_name_plural = 'Course targets'
def __str__(self):
return "["+self.course.name + "] " + self.competency.name
class Submission(models.Model):
assignment = models.ForeignKey(Assignment, on_delete=models.CASCADE)
student = models.ForeignKey(Student, on_delete=models.CASCADE)
feedback = models.TextField(max_length=1000, blank=True)
grade = models.CharField(max_length=16, default="not graded")
file = models.FileField(upload_to='coursework_submissions', null=True,blank = True)
date = models.DateField(auto_now_add=True)
grader = models.ForeignKey(Academic, on_delete=models.CASCADE, blank=True, null=True)
course = models.ForeignKey(Course, null=True, blank=True)
def __str__(self):
return 'submission for '+self.course.name + " by "+ self.student.user.username
class PersonalCompetency(models.Model):
student = models.ForeignKey(Student)
COMPETENCY_PROGRESS_CHOICES = (
('0', 'none'),
('1', 'basic'),
('2', 'good'),
('3', 'excellent'),
)
competency = models.ForeignKey(Competency)
assignments = models.ManyToManyField(Assignment, blank=True)
progress = models.CharField(choices=COMPETENCY_PROGRESS_CHOICES, max_length=16, default='0')
class Meta:
verbose_name_plural = "personal competencies"
def __str__(self):
return self.student.user.username + " " + self.competency.name + " competency"
class CompetencyEndorsement(models.Model):
COMPETENCY_PROGRESS_CHOICES = (
('0', 'none'),
('1', 'basic'),
('2', 'good'),
('3', 'excellent'),
)
mentor = models.ForeignKey(Mentor, null=True,blank=True)
academic = models.ForeignKey(Academic, null=True,blank=True)
competency = models.ForeignKey(PersonalCompetency)
date = models.DateField(auto_now_add=True)
progress = (models.CharField(choices=COMPETENCY_PROGRESS_CHOICES, max_length=16))
class Meta:
verbose_name_plural = "competency endorsements"
def __str__(self):
return self.competency.competency.name + " endorsement"
class Appraisal(models.Model):
author = models.ForeignKey(Mentor)
subject = models.ForeignKey(Student)
content = models.TextField()
date = models.DateField(auto_now_add=True)
def __str__(self):
return self.subject.user.username + "'s appraisal"
| {"/populate_students.py": ["/interactWBL/models.py"], "/interactWBL/views.py": ["/interactWBL/models.py", "/interactWBL/forms.py"], "/populate_academics_courses_assignments_submissions_reflections_persCompetencies.py": ["/interactWBL/models.py"], "/populate_all.py": ["/populate_academics_courses_assignments_submissions_reflections_persCompetencies.py", "/populate_mentors.py", "/populate_students.py", "/populate_with_competencies.py"], "/interactWBL/templatetags/interactwbl_template_tags.py": ["/interactWBL/models.py"], "/interactWBL/admin.py": ["/interactWBL/models.py"], "/populate_with_competencies.py": ["/interactWBL/models.py"], "/populate_mentors.py": ["/interactWBL/models.py"], "/interactWBL/forms.py": ["/interactWBL/models.py"], "/devproj/urls.py": ["/interactWBL/models.py"]} |
43,552 | Rrm129/BlackJackSimulation | refs/heads/master | /datastructures.py | #Tarek Sayed
#Copy right Notice:
#Copyright ©2020. Tarek Sayed . All Rights Reserved.
# Permission required from author to use, copy, modify, and distribute this software and its documentation.
#Students are allowed to use this for their own personal review but may not copy, modify, or distribute this software.
import random
class Node:
def __init__(self, val):
self.data = val
self.next = None # the pointer initially points to nothing
def __str__(self):
return self.data
def __eq__(self, other):
if (type(other) == Node):
return self.data == other.data
else:
n = Node(other)
return self.data == n.data
def __ne__(self, other):
if (type(other) == Node):
return self.data != other.data
else:
n = Node(other)
return self.data != n.data
class Stack:
def __init__(self, sname=None, otherStack=None):
self.stackTop = None
self.nNodes = 0
self.stackName = sname
if (otherStack != None): # Copy Constructor
self.stackTop = None
self.copyStack(otherStack)
def initializeStack(self):
self.nNodes = 0
while (self.stackTop != None):
temp = self.stackTop
self.stackTop = self.stackTop.next
del temp
def isEmptyStack(self):
return self.stackTop == None
def isEmpty(self):
return self.stackTop == None
def isFullStack(self):
return False;
def pushNode(self, newNode):
if (self.size() > 0 and type(newNode.data) != type(self.stackTop.data)):
print("Invalid data Type. This stack is a stack for the " + str(type(self.stackTop.data)) + " data type.")
return
newNode.next = self.stackTop
self.stackTop = newNode
self.nNodes += 1
def push(self, newItem):
if (self.size() > 0 and type(newItem) != type(self.stackTop.data)):
print("Invalid data Type. This stack is a stack for the " + str(type(self.stackTop.data)) + " data type.")
return
newNode = Node(newItem)
newNode.next = self.stackTop
self.stackTop = newNode
self.nNodes += 1
def top(self):
if (self.stackTop != None):
return self.stackTop.data
else:
return None
def topNode(self):
if (self.stackTop != None):
return self.stackTop
else:
return None
def pop(self):
if (self.stackTop != None):
temp = self.stackTop
self.stackTop = self.stackTop.next
del temp
self.nNodes -= 1
else:
print("Cannot remove from an empty stack.")
def size(self):
return self.nNodes
def __str__(self):
retstr = "" + "top | "
s = self.stackTop
while (s != None):
retstr += str(s.data) + " "
s = s.next
retstr += "| bottom"
return retstr
# make a copy of otherStack to this stack.*/
def copyStack(self, otherStack):
if (self.stackTop != None): # if stack is nonempty, make it empty
self.initializeStack()
if (otherStack.stackTop == None):
self.stackTop = None
else:
current = otherStack.stackTop
# self.stackTop = new Node<Type>; #create the node
self.stackTop = Node(current.data)
self.stackTop.next = None # set the next field of the node to NULL
last = self.stackTop
current = current.next
# copy the remaining stack
while (current != None):
newNode = Node(current.data)
newNode.next = None
last.next = newNode
last = newNode
current = current.next
self.nNodes += 1
self.nNodes += 1
def isEqual(self, other):
if (not isinstance(other, Stack)):
return False
if (not self.isEmpty() and not other.isEmpty() and type(self.stackTop) != type(other.stackTop)):
return False;
if (self.isEmpty() and other.isEmpty()):
return True;
if (self.size() != other.size()):
return False;
lstA = []
lstB = []
result = False
thisSize = self.size()
otherSize = other.size()
for i in range(0, thisSize):
lstA.append(self.top())
self.pop();
for i in range(0, otherSize):
lstB.append(other.top())
other.pop();
self.stackTop = None
self.nNodes = 0
for j in range(thisSize - 1, -1, -1):
self.pushNode(Node(lstA[j]))
for j in range(otherSize - 1, -1, -1):
other.pushNode(Node(lstB[j]))
i = 0
while (not self.isEmpty() and not (other.isEmpty())):
if (self.top() == other.top()):
t = self.top()
self.pop()
o = other.top()
other.pop()
if (self.isEmpty() and other.isEmpty()):
result = True
break
i += 1
else:
result = False
break
while (not other.isEmpty()):
o = other.top()
other.pop()
self.stackTop = None
self.nNodes = 0
for j in range(thisSize - 1, -1, -1):
self.push(Node(lstA[j]))
for j in range(otherSize - 1, -1, -1):
other.push(Node(lstB[j]))
return result
def reverse(self):
current = prev = self.stackTop
current = current.next
prev.next = None
while (current != None):
succ = current.next
current.next = prev
prev = current
current = succ
self.stackTop = prev
def print(self):
s = self.stackTop;
print("top ", end="")
while (s != None):
print(str(s.data) + " ", end="")
s = s.next;
print(" bottom")
print()
def deleteInnerNode(self, loc):
s = Stack()
assert self.stackTop != None, "Stack is Empty"
current = self.stackTop
i = 0
size = self.size()
assert loc < size and not (
self.isEmpty()), "Stack is Empty or location of deleted node is outside of stack size"
while (i < loc and current != None):
# print("i="+str(i))
current = current.next
i += 1
n = current
i = 0
size = self.size()
while (not self.isEmpty() and i < size):
if (i != loc):
s.push(self.stackTop.data)
self.pop()
else:
self.pop()
i += 1
s.reverse()
self.stackTop = s.stackTop
self.nNodes = s.size()
return n.data
def getInnerNode(self, loc):
s = Stack()
assert self.stackTop != None, "Stack is Empty"
current = self.stackTop
i = 0
size = self.size()
assert loc < size, "requested location outside of stack size"
while (i < loc and current != None):
current = current.next
i += 1
return current.data
def __eq__(self, other):
return self.isEqual(other)
def __ne__(self, other):
return (not self.isEqual(other))
class SLinkedList:
def __init__(self):
self.head = None
self.tail = None
self.count = 0
def addAtHead(self, newNode):
self.count += 1
if (self.head == None): # empty
self.head = newNode
self.tail = newNode
else:
newNode.next = self.head
self.head = newNode
def addAtTail(self, newNode):
self.count += 1
if (self.head == None): # empty
self.head = newNode
self.tail = newNode
else:
self.tail.next = newNode
self.tail = newNode
def insert(self, d):
if (self.head == None):
self.addAtHead(d)
elif (d.data <= self.head.data):
self.addAtHead(d)
elif (d.data >= self.tail.data):
self.addAtTail(d)
else:
n = d
self.count += 1
t1 = self.head
t2 = None
while (d.data > t1.data):
t2 = t1
t1 = t1.next
n.next = t1
t2.next = n
def __str__(self):
current = self.head
lstr = ""
while (current != None):
lstr = lstr + str(current.data).rjust(4) + " --> "
current = current.next
lstr = lstr + " None"
return lstr
def isEmpty(self):
return self.head == None
def contains(self, searchItem):
current = self.head
while (current != None):
if (current.data == searchItem):
return True;
current = current.next
return False
def deleteItem(self, deleteItem):
current = Node(None) # current is a pointer to traverse the list
trailCurrent = Node(None) # pointerjust before current bool found
if (self.head == None): # Case 1; the list is empty.
print("Cannot delete from an empty list.")
else:
if (self.head.data == deleteItem): # Case2
current = self.head
self.head = self.head.next
self.count -= 1
if (self.head == None): # the list has only one node
self.tail = None
del current
else: # search the list for the node with the given info
found = False
trailCurrent = self.head # set trailCurrent to point to the first node
current = self.head.next # set current to point to the second node
while (current != None and not found):
if (current.data != deleteItem):
trailCurrent = current
current = current.next
else:
found = True
if (found): # Case 3; if found, delete the node
trailCurrent.next = current.next;
self.count -= 1
if (self.tail == current): # node to be deleted was the last node
self.tail = trailCurrent; # update the value of tail
del current # delete the node from the list
else:
print("The item to be deleted is not in the list.")
def swap(self, p1, p2):
temp = Node(None)
temp.data = p1.data;
p1.data = p2.data;
p2.data = temp.data;
def selectionSort(self):
start = Node(None)
current = Node(None)
start = self.head;
if (self.isEmpty()):
return
while (start.next != None):
min = start
current = start.next
while (current != None):
# Find minimum element in the list
if (min.data > current.data):
min = current
current = current.next
# print(current.data)
# swap minimum element with start location
self.swap(start, min)
start = start.next
def __contains__(self, value):
current = self.head
while (current != None):
if (current.data == value):
return True
current = current.next
return False
class Deck(Stack):
def __init__(self, sname=None, otherStack=None):
super(Deck,self).__init__(sname=None, otherStack=None)
def printDeck(self):
s = self.stackTop;
while (s != None):
print(str(s.data) + " ", end="")
s = s.next;
print()
def printPlayerDeck(self):
print('Player cards: ', end="")
s = self.stackTop;
while (s != None):
print(str(s.data) + " ", end="")
s = s.next;
print('\nPoints: ', self.total())
def printDealerDeck(self):
print('Dealer Cards: ', end="")
s = self.stackTop;
while (s != None):
print(str(s.data) + " ", end="")
s = s.next;
print('\nPoints: ', self.total())
print('---------------------------')
#Calculate Total Points
def total(self):
s = self.stackTop;
totValue = 0
for i in range(self.size()):
totValue = totValue + s.data.numberval
s=s.next
if s == None:
continue
return totValue
def popToDeck(self,deck2):
if self.size() != 0:
deck2.push(self.top())
self.pop()
def clearStack(self):
for i in range(self.size()):
self.pop()
class Cards(object):
def __init__(self, card):
self.card = card
if len(card) == 2:
self.value = card[-1]
if len(card) == 3:
self.value = card[-2]+card[-1]
if self.value == 'K':
self.numberval = 10
if self.value == 'Q':
self.numberval = 10
if self.value == 'J':
self.numberval = 10
if self.value == 'A':
self.numberval = 11
if self.value == '10':
self.numberval = 10
if self.value == '9':
self.numberval = 9
if self.value == '6':
self.numberval = 6
if self.value == '8':
self.numberval = 8
if self.value == '7':
self.numberval = 7
if self.value == '5':
self.numberval = 5
if self.value == '4':
self.numberval = 4
if self.value == '3':
self.numberval = 3
if self.value == '2':
self.numberval = 2
def __str__(self):
return self.card
class Game(object):
def __init__(self,playerDeck,dealerDeck,mainDeck):
self.playerwins = 0
self.dealerwins = 0
self.playerDeck = playerDeck
self.dealerDeck= dealerDeck
self.mainDeck = mainDeck
def playerwon(self):
print('\033[1m' + 'Player Won!!\n' + '\033[0m')
self.playerwins += 1
def dealerwon(self):
print('\033[1m' + 'Dealer Won!!\n' + '\033[0m')
self.dealerwins += 1
def clearPlayerAndDealerDeck(self):
self.dealerDeck.clearStack()
self.playerDeck.clearStack()
def start(self):
while self.playerDeck.total() < 18 or self.dealerDeck.total()<17:
#random choice from player to draw card
if 12 <= self.playerDeck.total() < 18:
choice = random.randint(0, 1)
if choice == 0 and self.dealerDeck.total() > 16:
break
elif choice == 1:
self.mainDeck.popToDeck(self.playerDeck)
#draw card to player deck and print deck
if self.playerDeck.total() < 12:
self.mainDeck.popToDeck(self.playerDeck)
self.playerDeck.printPlayerDeck()
#draw card to dealer deck
if self.dealerDeck.total() < 17:
self.mainDeck.popToDeck(self.dealerDeck)
#break if went over 21
if self.playerDeck.total() > 21:
self.dealerDeck.printDealerDeck()
break
self.dealerDeck.printDealerDeck()
if self.dealerDeck.total() > 21:
break
def results(self):
#Decide winner and clear decks
if self.playerDeck.total() > 21:
self.dealerwon()
self.clearPlayerAndDealerDeck()
return
if self.dealerDeck.total() > 21 or self.playerDeck.total() > self.dealerDeck.total():
self.playerwon()
self.clearPlayerAndDealerDeck()
return
else:
self.dealerwon()
self.clearPlayerAndDealerDeck()
def showScore(self):
PlayerPercentage = self.playerwins / ( self.playerwins + self.dealerwins)
DealerPercetage = self.dealerwins / ( self.playerwins + self.dealerwins)
print('_______________SCORE______________')
print('Player: ', '{:.0%}'.format(PlayerPercentage), 'of games with ', self.playerwins,'Wins')
print('Dealer: ', '{:.0%}'.format(DealerPercetage), 'of games with ', self.dealerwins, 'Wins')
| {"/main.py": ["/datastructures.py"]} |
43,553 | Rrm129/BlackJackSimulation | refs/heads/master | /main.py | from datastructures import *
import random
def readDeck(deck, filename):
car = open(filename, 'r')
for i in car:
card = i.rstrip()
deck.push(Cards(card))
def shuffle(deck):
deck1 = Deck()
deck2 = Deck()
decksize = deck.size()
for i in range(decksize):
if deck.size() == decksize/2:
break
deck1.push(deck.top())
deck.pop()
for i in range(decksize):
if deck.isEmpty():
break
deck2.push(deck.top())
deck.pop()
for i in range(deck1.size() + deck2.size()):
if deck2.isEmpty():
break
deck.push(deck1.top())
deck1.pop()
deck.push(deck2.top())
deck2.pop()
def FisherYates(deck):
# Much better than the regular sorting because this is true random.
# The other algorithm has a pattern so it is not efficient as the Fisher Yates that
# changes the order o each node with a completely random node of the stack.
# It should not be used for a trading card game.
deck2 = Deck()
n = deck.size()
for i in range(deck.size()-1, 0, -1):
loc = random.randint(0, n-1)
deck2.push(deck.getInnerNode(loc))
deck.deleteInnerNode(loc)
n -= 1
if n == 1:
deck2.push(deck.top())
deck.pop()
break
deck.copyStack(deck2)
def main():
file = "Cards.txt"
playerDeck = Deck()
dealerDeck = Deck()
mainDeck = Deck()
# Three decks used
readDeck(mainDeck, file)
readDeck(mainDeck, file)
readDeck(mainDeck, file)
g = Game(playerDeck, dealerDeck, mainDeck)
FisherYates(g.mainDeck)
n = 1
while g.mainDeck.size() > 5:
print('Game #', n)
g.start()
g.results()
n += 1
g.showScore()
if __name__ == '__main__':
main()
| {"/main.py": ["/datastructures.py"]} |
43,556 | david128kim/region_based_project | refs/heads/master | /scrapbooking_source.py | import os
#import commands
import subprocess
import string
from app_r1 import execution_path_r1
from app_r2 import execution_path_r2
source_line, source_r1, source_r2, ir_r1, ir_r2, ir_line, = [], [], [], [], [], []
counter_r1, entry_r1, return_r1, counter_r2, entry_r2, return_r2, region_combination, brackets = 0, 0, 0, 0, 0, 0, 0, 0
program_name = input("Please key in your program name: \n")
shared_data = input("Please key in your shared data name: \n")
file = open(program_name)
whole = open('whole_program.c', 'w')
for line in file:
if "{" in line:
brackets += 1
elif "}" in line:
brackets -= 1
elif "(" not in line and "{" not in line and "}" not in line:
# break
if brackets <= 1 and "return" not in line and "+" not in line:
source_line.append(line)
print (line)
else:
continue
file.close()
for i in range(1, execution_path_r1+1):
source_r1 =[]
region1 = open('exe_r1_path'+str(i)+'.c','r')
for line in region1:
source_r1.append(line)
region1.close()
sequential = open('exe_r1_path'+str(i)+'_ok.c','a')
for k in range(0, len(source_line)):
sequential.write(source_line[k])
sequential.write("int main() {")
for k in range(0, len(source_r1)):
sequential.write(source_r1[k])
sequential.write("return 0; }")
sequential.close()
os.system('clang -Os -S -emit-llvm exe_r1_path'+str(i)+'_ok.c -o exe_r1_path'+str(i)+'.ll')
os.system('mv exe_r1_path'+str(i)+'_ok.c exe_source/')
os.system('mv exe_r1_path'+str(i)+'.ll exe_IR/')
os.system('rm exe_r1_path'+str(i)+'.c')
for i in range(1, execution_path_r2+1):
source_r2 =[]
region2 = open('exe_r2_path'+str(i)+'.c','r')
for line in region2:
source_r2.append(line)
region2.close()
sequential = open('exe_r2_path'+str(i)+'_ok.c','a')
for k in range(0, len(source_line)):
sequential.write(source_line[k])
sequential.write("int main() {")
for k in range(0, len(source_r2)):
sequential.write(source_r2[k])
sequential.write("return 0; }")
sequential.close()
os.system('clang -Os -S -emit-llvm exe_r2_path'+str(i)+'_ok.c -o exe_r2_path'+str(i)+'.ll')
os.system('mv exe_r2_path'+str(i)+'_ok.c exe_source/')
os.system('mv exe_r2_path'+str(i)+'.ll exe_IR/')
os.system('rm exe_r2_path'+str(i)+'.c')
whole.write('#include "../klee_src/include/klee/klee.h"\n')
for k in range(0, len(source_line)):
whole.write(source_line[k])
whole.write('int main(int argc, char **argv) {\n')
whole.write('klee_make_symbolic(&'+shared_data+', sizeof('+shared_data+'), "'+shared_data+'");\n')
for k in range(0, len(source_r1)):
whole.write(source_r1[k])
for k in range(0, len(source_r2)):
whole.write(source_r2[k])
whole.write('return '+shared_data+'; }\n')
whole.close()
os.system('clang -Os -S -emit-llvm whole_program.c -o whole_program.ll')
for i in range(1, execution_path_r1+1):
ir_r1 =[]
counter_r1 = 0
region1 = open('exe_IR/exe_r1_path'+str(i)+'.ll','r')
for line in region1:
counter_r1 += 1
ir_r1.append(line)
if "define" in line:
entry_r1 = counter_r1
elif "ret" in line:
return_r1 = counter_r1
region1.close()
for j in range(1, execution_path_r2+1):
ir_r2 = []
counter_r2 = 0
region2 = open('exe_IR/exe_r2_path'+str(j)+'.ll','r')
for line in region2:
counter_r2 += 1
ir_r2.append(line)
if "define" in line:
entry_r2 = counter_r2
elif "ret" in line:
return_r2 = counter_r2
region2.close()
region_combination += 1
sequential = open('concurrent_program'+str(region_combination)+'.ll','a')
#for k in range(0, len(ir_line)):
#sequential.write(ir_line[k])
sequential.write("region1: \n")
for k in range(entry_r1, return_r1-1):
sequential.write(ir_r1[k])
sequential.write("region2: \n")
for k in range(entry_r2, return_r2-1):
sequential.write(ir_r2[k])
sequential.close()
os.system('mv concurrent_program'+str(region_combination)+'.ll exe_concurrent/')
| {"/scrapbooking_source.py": ["/app_r1.py"], "/src_interleaving.py": ["/scrapbooking_klee.py"], "/ins_interleaving.py": ["/scrapbooking_klee.py"], "/region2.py": ["/app_r1.py"]} |
43,557 | david128kim/region_based_project | refs/heads/master | /src_interleaving.py | import os
import subprocess
import itertools
import scrapbooking_klee
import time
######## global initialization section
Region_Text, multilist, permutation, recording, Region_Index, states, dr_num = [], [], [], [], [], [], ""
count, p_count, p_flag, count_src, insert_num, Region_length, t_analyze, t_enumerate, t_verify = 0, 0, 1, 0, 0, 0, 0, 0, 0
######## testing info.
#print ("I_num: ", scrapbooking_klee.I_num)
#print ("p_num: ", scrapbooking_klee.p_num)
#print ("Valid-INPUT: ", scrapbooking_klee.ValidInputs)
#print ("INPUT-index: ", scrapbooking_klee.Inputs_Index)
########
t_start = time.time()
#for i in range(1, int(scrapbooking_klee.num_region)+1):
for i in range(1, 2):
if int(scrapbooking_klee.I_num) < i:
break
for j in range(1, int(scrapbooking_klee.p_num)+1):
#for j in range(1, 2):
t_analyzing = time.time()
testcase = open('testcase.c','w')
######## only for test _1_2
file = open('exe_concurrent/concurrent_'+str(i)+'_'+str(j)+'.c', 'r')
#file = open('exe_concurrent/concurrent_1_3.c', 'r')
for line in file:
Region_Text.append(line)
count_src += 1
if "mutex_lock" in line or "mutex_unlock" in line or "signal" in line or "wait" in line or "End" in line:
Region_Text.insert(count_src + insert_num, "tie")
insert_num += 1
elif "mutex_lock" not in scrapbooking_klee.temp_ins or "mutex_unlock" not in scrapbooking_klee.temp_ins or "signal" not in scrapbooking_klee.temp_ins or "wait" not in scrapbooking_klee.temp_ins or scrapbooking_klee.shared_data in line:
Region_Text.insert(count_src + insert_num, "tie")
insert_num += 1
print ("tie:", Region_Text)
if "tie" in Region_Text:
if "tie" in Region_Text[len(Region_Text)-1]:
del Region_Text[len(Region_Text)-1]
#print ("tie: ", Region_Text)
Region_Text = " ".join(Region_Text)
Region_Text = Region_Text.split('tie')
if Region_Text[len(Region_Text)-1] == "":
Region_Text.pop()
print ("untie: ", Region_Text)
######## update new lengh of each sublist(region) ########
for k in range(0, len(Region_Text)):
if "R1" in Region_Text[k]:
if len(Region_Index) == 0:
Region_Index.insert(0, k+1)
else:
del Region_Index[0]
Region_Index.insert(0, k+1)
elif "R2" in Region_Text[k]:
if len(Region_Index) == 1:
Region_length = k
Region_Index.insert(1, k+1-Region_length)
else:
del Region_Index[1]
Region_Index.insert(1, k+1-Region_length)
elif "R3" in Region_Text[k]:
if len(Region_Index) == 2:
Region_length = k
Region_Index.insert(2, k+1-Region_length)
else:
del Region_Index[2]
Region_Index.insert(2, k+1-Region_length)
elif "R4" in Region_Text[k]:
if len(Region_Index) == 3:
Region_length = k
Region_Index.insert(3, k+1-Region_length)
else:
del Region_Index[3]
Region_Index.insert(3, k+1-Region_length)
#print (Region_Text[k])
#print ("Region_Index: ", Region_Index)
######## divide by index pointer ########
for length in Region_Index:
multilist.append([Region_Text[k+count] for k in range(length)])
count += length
print ("Atomic: ", multilist)
#count = 0
### ready to enumerate ###
t_StoE = time.time()
t_analyze += t_StoE-t_analyzing
new_order, old_order = (), ()
inter_permutation = [l for l, group in enumerate(multilist) for j in range(len(group))]
for new_order in itertools.permutations(inter_permutation):
if new_order <= old_order:
continue
old_order = new_order
iters = [iter(group) for group in multilist]
for l in new_order:
test = next(iters[l])
permutation.append(test)
testcase.write(test)
file.close()
testcase.close()
t_EofE = time.time()
t_enumerate += t_EofE-t_StoE
### end of enumeration ###
while (p_count <= len(Region_Text)):
if not permutation:
#print ("Permutation is empty. ")
break
os.system('cp sample.c interleave-'+str(p_flag)+'.c')
generating = open('interleave-'+str(p_flag)+'.c', 'a')
generating.write(scrapbooking_klee.shared_data+'= '+scrapbooking_klee.ValidInputs[j-1]+'; \n')
'''
########## Insert Valid Inputs here ##########
print ("p_num: ", scrapbooking_klee.p_num)
if scrapbooking_klee.p_num == 1:
continue
else:
generating.write(scrapbooking_klee.shared_data+'= '+scrapbooking_klee.ValidInputs[j-1]+'; \n')
##########
'''
for m in range(0, len(Region_Text)):
recording.append(permutation.pop())
p_count += 1
#if p_count == len(Region_Text):
p_count = 0
for n in range(len(Region_Text)-1, -1, -1):
generating.write(recording[n])
generating.write('printf("%d", '+scrapbooking_klee.shared_data+'); \n')
generating.write("return 0; \n}")
generating.close()
#time.sleep(0.1)
DL_filter = subprocess.getoutput('python DL_filter.py')
#time.sleep(1)
'''
print ("DL_filter: ", DL_filter)
print ("\n")
'''
if "deadlock" in DL_filter and "inexistent" not in DL_filter:
print ("now detect potential deadlock......")
print ("===================================================================")
print ("********Examining interleave", p_flag)
print (" ", DL_filter)
print ("===================================================================\n")
os.system('cp interleave-'+str(p_flag)+'.c exe_concurrent/interleaving-'+str(p_flag)+'_'+str(j)+'.c')
#break
#'''
elif "inexistent" in DL_filter and "deadlock" not in DL_filter:
print ("now exclude bug-unrelated interleaving......")
print ("===================================================================")
print ("********Examining interleave", p_flag)
print ("skip the ", DL_filter)
print ("===================================================================\n")
#'''
else:
print ("now detect potential data race......")
os.system('gcc -w interleave-'+str(p_flag)+'.c -o interleave'+str(p_flag)+' -lpthread')
exe_result = subprocess.getoutput('./interleave'+str(p_flag))
if (exe_result not in states):
if states:
print ("===================================================================")
print ("********Examining interleave", p_flag)
print (" We find a bug! Error symbolic state: ", exe_result)
print ("===================================================================\n")
else:
if "dump" in exe_result or "fault" in exe_result:
print ("===================================================================")
print ("********Examining interleave", p_flag)
print (" It is the crash state: ", exe_result)
print ("===================================================================\n")
#break
else:
print ("===================================================================")
print ("********Examining interleave", p_flag)
print (" It is the first new symbolic state: ", exe_result)
print ("===================================================================\n")
states.append(exe_result)
dr_num += str(p_flag)
dr_num += ", "
os.system('cp interleave-'+str(p_flag)+'.c exe_concurrent/interleaving-'+str(p_flag)+'_'+str(j)+'.c')
else:
states.append(exe_result)
print ("=========================================")
print ("********Examining interleave", p_flag)
print ("The latest symbolic state: ", exe_result)
print ("=========================================\n")
p_flag += 1
recording = []
Region_Text = []
Region_Index = []
multilist = []
states = []
#dr_num = ""
p_flag = 1
count = 0
### end of verification
t_EofV = time.time()
t_verify += t_EofV-t_EofE
#os.system('rm interleave*')
t_end = time.time()
if len(states) > int(scrapbooking_klee.p_num):
print ("\n===============================================")
print ("Please check suspicious "+dr_num+" interleaving")
print ("===============================================\n")
print ("===============================" )
print (" Total :", t_end-t_start)
print ("------------------")
print (" Analsis :", t_analyze)
print ("------------------")
print (" Enumeration :", t_enumerate)
print ("------------------")
print (" Verifation :", t_verify)
print ("==============================" )
| {"/scrapbooking_source.py": ["/app_r1.py"], "/src_interleaving.py": ["/scrapbooking_klee.py"], "/ins_interleaving.py": ["/scrapbooking_klee.py"], "/region2.py": ["/app_r1.py"]} |
43,558 | david128kim/region_based_project | refs/heads/master | /ins_interleaving.py | import os
import subprocess
import string
import itertools
import time
import scrapbooking_klee
interleaving, a, b, temp, old_order = [], [], [], [], ()
combination = {}
counter_t1, counter_t2, t1_insert_number, t2_insert_number, end_tie = 0, 0, 0, 0, 0
strthread1 = "SVA from thread 1: "
strthread2 = "SVA from thread 2: "
strmerge = "merge two thread function to main: "
strpermutation = "exhaustive representation of interleaving combination ... "
strwrite = "generate feasible testcase: "
#test region
constraints = [[1, 2], [3, 4], [5, 6]]
recording, filetest, a_tie, b_tie = [] ,[], [], []
counter, path_amount, file_length, temp_exe_result, counter_DL, HasOrder = 0, 0, 0, 0, 0, 0
lock_name, lock_1st = "", ""
for i in range(0, int(scrapbooking_klee.num_region)):
file = open('exe_IR/exe_r'+str(i+1)+'.ll')
for line in file:
lock_split = line.split()
if "mutex_lock" in line and (lock_1st == ""):
#print ("length: ", len(lock_split)-2)
#print (lock_split[len(lock_split)-2])
lock_1st = lock_split[len(lock_split)-2]
break
elif "mutex_lock" in line and (lock_1st != ""):
if lock_1st == lock_split[len(lock_split)-2]:
HasOrder = 1
else:
HasOrder = 0
break
file.close()
print ("Order?: ", HasOrder)
for k in range(1, 2):
#testcase = open('testcase'+str(k)+'.ll','w')
testcase = open('testcase.ll','w')
file = open('exe_concurrent/concurrent_program.ll')
#def extract_t1():
counter_t1, t1_insert_number, a_tie, a = 0, 0, [], []
insert_temp = 0
for line in file:
if "region1" not in line and "region2" not in line and "printf" not in line and "llvm.lifetime" not in line: #unnecessary prune
a.append(line)
counter_t1 += 1
temp_split = line.split()
#if ("mutex" in line) or ("signal" in line) or ("wait" in line):
#if (("load" in line or "store" in line) and scrapbooking_klee.shared_data in line):
#if (("load" in line or "store" in line) and scrapbooking_klee.shared_data in line) or ("mutex" in line) or ("signal" in line):
#if ("mutex_unlock" in line) or ("signal" in line) or ("wait" in line):
if HasOrder == 1:
if "mutex_lock" in line:
if lock_name == "":
#lock_name = temp_split[len(lock_split)-2]
lock_name = temp_split[len(temp_split)-3]
#print (temp_split[len(temp_split)-3])
'''
if counter_t1+t1_insert_number-1 == 0:
continue
else:
a.insert(counter_t1+t1_insert_number-1, "tie")
insert_temp += 1
t1_insert_number += 1
end_tie = counter_t1+t1_insert_number
'''
else:
continue
elif "mutex_unlock" in line or ("signal" in line) or ("wait" in line):
#print (temp_split[len(temp_split)-3])
#if "mutex_unlock" in line and temp_split[len(lock_split)-2] == lock_name:
if "mutex_unlock" in line and temp_split[len(temp_split)-3] == lock_name:
lock_name = ""
a.insert(counter_t1+t1_insert_number, "tie")
insert_temp += 1
t1_insert_number += 1
end_tie = counter_t1+t1_insert_number
#elif "mutex_unlock" in line and temp_split[len(lock_split)-2] != lock_name:
elif "mutex_unlock" in line and temp_split[len(temp_split)-3] != lock_name:
continue
else:
a.insert(counter_t1+t1_insert_number, "tie")
insert_temp += 1
t1_insert_number += 1
end_tie = counter_t1+t1_insert_number
else:
if ("mutex_lock" in line) or ("mutex_unlock" in line) or ("signal" in line) or ("wait" in line):
#if "load" in line or "store" in line:
#if "store" in line:
a.insert(counter_t1+t1_insert_number, "tie")
insert_temp += 1
t1_insert_number += 1
end_tie = counter_t1+t1_insert_number
'''
a.insert(counter_t1+t1_insert_number, "tie")
insert_temp += 1
t1_insert_number += 1
end_tie = counter_t1+t1_insert_number
'''
elif "region2" in line:
break
del a[end_tie-1]
t1_insert_number -= 1
print ("a: ", a)
a_tie = " ".join(a)
a_tie = a_tie.split('tie')
if a_tie[len(a_tie)-1] == "":
a_tie.pop()
print ("a_tie: ", a_tie)
print ("a_tie_length: ", len(a_tie))
#def extract_t2():
counter_t2, t2_insert_number, b_tie, b = 0, 0, [], []
insert_temp = 0
for line in file:
if "printf" not in line and "llvm.lifetime" not in line: #unnecessary prune
b.append(line)
counter_t2 += 1
temp_split = line.split()
#if ("mutex" in line) or ("signal" in line) or ("wait" in line):
#if (("load" in line or "store" in line) and scrapbooking_klee.shared_data in line):
#if (("load" in line or "store" in line) and scrapbooking_klee.shared_data in line) or ("mutex" in line) or ("signal" in line):
#if ("mutex_unlock" in line) or ("signal" in line) or ("wait" in line):
if HasOrder == 1:
if "mutex_lock" in line:
if lock_name == "":
lock_name = temp_split[len(temp_split)-3]
#print (temp_split[len(temp_split)-3])
'''
if counter_t2+t2_insert_number-1 == 0:
continue
else:
b.insert(counter_t2+t2_insert_number-1, "tie")
insert_temp += 1
t2_insert_number += 1
end_tie = counter_t2+t2_insert_number
'''
else:
continue
elif "mutex_unlock" in line or ("signal" in line) or ("wait" in line):
#print (temp_split[len(temp_split)-3])
if "mutex_unlock" in line and temp_split[len(temp_split)-3] == lock_name:
lock_name = ""
b.insert(counter_t2+t2_insert_number, "tie")
insert_temp += 1
t2_insert_number += 1
end_tie = counter_t2+t2_insert_number
elif "mutex_unlock" in line and temp_split[len(temp_split)-3] != lock_name:
continue
else:
b.insert(counter_t2+t2_insert_number, "tie")
insert_temp += 1
t2_insert_number += 1
end_tie = counter_t2+t2_insert_number
else:
if ("mutex_lock" in line) or ("mutex_unlock" in line) or ("signal" in line) or ("wait" in line):
#if "load" in line or "store" in line:
#if "store" in line:
b.insert(counter_t2+t2_insert_number, "tie")
insert_temp += 1
t2_insert_number += 1
end_tie = counter_t2+t2_insert_number
'''
b.insert(counter_t2+t2_insert_number, "tie")
insert_temp += 1
t2_insert_number += 1
end_tie = counter_t2+t2_insert_number
'''
del b[end_tie-1]
t2_insert_number -= 1
#b.insert(len(b), "tie")
print ("b: ", b)
b_tie = " ".join(b)
b_tie = b_tie.split('tie')
if b_tie[len(b_tie)-1] == "":
b_tie.pop()
print ("b_tie: ", b_tie)
print ("b_tie length: ", len(b_tie))
print ("##################################################################")
#for k in range(1, region_combination+1):
#testcase = open('testcase'+str(k)+'.ll','w')
#file = open('exe_concurrent/concurrent_program'+str(k)+'.ll')
#extract_t1()
#extract_t2()
# divide to two sublist. depends on list length
if(len(a_tie) >= len(b_tie)):
combination = a_tie + b_tie
interleaving = [combination[i:i+len(a_tie)] for i in range(0, len(combination), len(a_tie))]
print ("interleaving: ", interleaving)
else:
combination = b_tie + a_tie
interleaving = [combination[i:i+len(b_tie)] for i in range(0, len(combination), len(b_tie))]
print ("interleaving: ", interleaving)
# list permutation
print (strpermutation)
new_order, old_order = (), ()
inter_permutation = [i for i, group in enumerate(interleaving) for j in range(len(group))]
for new_order in itertools.permutations(inter_permutation):
if new_order <= old_order:
continue
old_order = new_order
iters = [iter(group) for group in interleaving]
for i in new_order:
test = next(iters[i])
#print (test)
testcase.write(test)
file.close()
testcase.close()
print ("##################################################################")
#file = open('testcase'+str(k)+'.ll')
file = open('testcase.ll')
flag = 1
for line in file:
temp.append(line)
#print temp
#temp.pop()
print ("temp_length: ", len(temp))
file_length = len(temp)
file.close()
print ("a_l, b_l: ", len(a), len(b))
print ("t1_i, t2_i: ", t1_insert_number, t2_insert_number)
path_amount = file_length/(len(a)+len(b)-t1_insert_number-t2_insert_number)
print ("total path amount: ", file_length/(len(a)+len(b)-t1_insert_number-t2_insert_number))
####################################################################################################
#for i in range(1, len(scrapbooking_klee.ValidInputs)+1):
for i in range(1, 2):
#while(counter < file_length):
while(counter <= (len(a)+len(b)-t1_insert_number-t2_insert_number)):
if not temp:
print ("temp is empty. ")
break
generating = open('answer.ll', 'w')
for i in range(0,len(a)+len(b)-t1_insert_number-t2_insert_number):
#if not temp:
#print ("temp is empty. ")
#break
#else:
recording.append(temp.pop())
counter += 1
if counter == (len(a)+len(b)-t1_insert_number-t2_insert_number):
counter = 0
for i in range(len(a)+len(b)-t1_insert_number-t2_insert_number-1, -1, -1):
#if "store" in recording[i] and scrapbooking_klee.shared_data not in recording[i]:
#continue
#else:
generating.write(recording[i])
#generating.write(' %4 = sext i32 %3 to i64 \n %5 = inttoptr i64 %4 to i8* \n')
#generating.write(' %6 = tail call i32 (i8*, ...)* @printf(i8* %5) #2 \n')
#generating.write(' %7 = load i32* @Global, align 4, !tbaa !1 \n')
#generating.write(' %4 = tail call i32 (i8*, ...)* @printf(i8* getelementptr inbounds ([3 x i8]* @.str, i64 0, i64 0), i32 %3) #2 \n %5 = load i32* @Global, align 4, !tbaa !1 \n')
generating.close()
os.system('python scrapbooking_IR.py')
temp_filter = subprocess.getoutput('python filter.py')
print ("filter: ", temp_filter)
if "deadlock" in temp_filter and "inexistent" not in temp_filter:
counter_DL += 1
print ("error "+str(flag)+" : "+str(temp_filter)+" accumulated error: "+str(counter_DL))
#continue
#elif "inexistent" in temp_filter:
#continue
else:
temp_llc = subprocess.getoutput('llc -O3 -march=x86-64 answer_ok.ll -o answer_ok.s')
if "error" in temp_llc:
print ("error at number of file, and its cause: ", flag, temp_llc)
break
os.system('llc -O3 -march=x86-64 answer_ok.ll -o answer_ok.s')
os.system('gcc -o answer_ok answer_ok.s -lpthread')
#os.system('timeout 3 ./answer_ok')
exe_result = subprocess.getoutput('timeout 0.1 ./answer_ok \n')
os.system('cp answer.ll answer_'+str(flag)+'.ll')
os.system('cp answer_ok.ll answer_ok_'+str(flag)+'.ll')
#exe_result = subprocess.getoutput('./answer_ok \n')
#temp_result = exe_result
if (flag > 1) and (temp_result != exe_result):
print ("now: ", exe_result)
print ("We find a bug!")
break
else:
temp_result = exe_result
print ("last: ", temp_result)
#os.system('rm -r klee-last && rm -r klee-out-* && rm answer*')
flag += 1
recording = []
file_length -= (len(a)+len(b)-t1_insert_number-t2_insert_number)
print ("verifying path",flag-1)
#os.system('rm testcase.ll')
print ("deadlock(DL) number: ", counter_DL)
| {"/scrapbooking_source.py": ["/app_r1.py"], "/src_interleaving.py": ["/scrapbooking_klee.py"], "/ins_interleaving.py": ["/scrapbooking_klee.py"], "/region2.py": ["/app_r1.py"]} |
43,559 | david128kim/region_based_project | refs/heads/master | /app_r1.py | import os
import subprocess
import string
from tree import Tree
(_ROOT, _DEPTH, _BREADTH) = range(3)
tree = Tree()
file = open("region_text/counter.txt")
region_path = open("r1_path.c", "w")
treeID, height, brackets_match, branch_boundrary, counter_if, execution_path_r1, counter_bp, fork = 0, 0, 0, 0, 0, 0, 0, 0
info, node_height, branch_layer, branch_point, branch_leaf, breakpoint, dfs, dfs_temp, cond_num, if_layer, branch_type = [], [], [], [], [], [], [], [], [], [], []
start, end, counter_r2, r2_flag, main_flag, temp_node_length, temp_dfs, temp_pop = 1, 0, 0, 0, 0, 0, 0, 0
constrain, constrain_state, path_cond_state, path_cond, cond_text, p_num, path, cond_list, cond_dfs, cond_final_list = "", [], [], [], [], [], [], [], [], []
temp_layer, temp_branch, temp_brackets, branch_end, info_length, loop_flag, gap, temp_i = 0, 0, 0, 0, 0, 0, 0, 0
loop_body, loop_start, loop_end, loop_brackets, temp_info, temp_body, branch_start, branch_finish = [], [], [], [], [], [], [], []
fork_cond, fork_state = [], []
def extract_branch(a,b):
a = b.replace("if", "")
a = a.replace("(", "", 1)
a = a.replace(")", "", 1)
a = a.replace("{", "")
return a
return b
############## break at region2: ##############
for line in file:
counter_r2 += 1
if "region2" in line:
r2_flag = counter_r2
break
info.append(line)
############# loop heuristic reduction AND printing instead pthread_cond_wait ##############
for i in range(1, len(info)):
if "{" in info[i]:
brackets_match += 1
if "while" in info[i]:
constrain = info[i].replace("while", "if")
del info[i]
info.insert(i, constrain)
loop_start.append(i)
loop_brackets.append(brackets_match)
elif "}" in info[i]:
brackets_match -= 1
if (loop_brackets != []) and (brackets_match == loop_brackets[len(loop_brackets)-1] - 1):
loop_end.append(i)
loop_brackets.pop()
'''
elif "pthread_cond_wait" in info[i]:
constrain = info[i].replace('pthread_cond_wait(&', 'printf ("wait ')
constrain = constrain.replace(');' , '");')
del info[i]
info.insert(i, constrain)
'''
#print ("original info: ", info)
############# eliminate while statement and implement loop heuristic reduction
loop_end.reverse()
info_length = len(info)
for i in range(len(loop_start)-1, -1, -1):
for j in range(loop_start[i] + 1, loop_end[i] + gap):
#if "while" not in info[j]:
temp_body.append(info[j])
# for j in range(loop_start[i], loop_end[i] + 1 + gap):
# del info[loop_start[i]]
# print ("temp_body: ", temp_body)
# print (i)
# if i == 0:
# loop_body.extend(temp_body)
# print ("loop_body: ", loop_body)
# temp_body = temp_body * 2
temp_i = loop_start[i] + 1
# print (temp_body)
if "pthread_cond" in temp_body[0]:
#print ("+++++++")
break
else:
for j in range(0, len(temp_body)):
info.insert(temp_i, temp_body[j])
temp_i += 1
gap = len(info) - info_length
temp_body = []
#print ("fixing info: ", info)
tree.add_node(info[treeID])
############# construct tree and path condition ##########
for i in range(1, len(info)):
#info[i] = info[i] + "/*R1 line:" + str(treeID) + "*/"
info[i] = "/*R1 line:" + str(treeID) + "*/" + info[i]
#print (info[i])
if (("if" in info[i]) and ("else" not in info[i])):
brackets_match += 1
cond_num.append(brackets_match)
branch_layer.append(brackets_match)
branch_point.append(treeID)
if cond_num[len(cond_num)-1] == cond_num[len(cond_num)-2]: ################ same depth #################################
temp = branch_point[brackets_match-1]
tree.add_node(info[i], info[temp])
p_num.append(temp)
cond_list.append(temp)
cond_final_list.append(temp)
# print ("info, i, temp: ", info[i], i, temp)
if branch_start == []:
branch_start.append(temp + 1)
else:
branch_start.append(i)
else: ################ else/while/if/elif then "if" appear ################
tree.add_node(info[i], info[treeID])
p_num.append(treeID)
cond_list.append(treeID)
cond_final_list.append(treeID)
# print ("info, i, treeID: ", info[i], i, treeID)
# constrain = info[i].replace("if", "")
# constrain = constrain.replace("(", "", 1)
# constrain = constrain.replace(")", "", 1)
# constrain = constrain.replace("{", "")
# constrain = constrain.replace("/*R1 line:" + str(treeID) + "*/", "")
# constrain_state.append(constrain)
# path_cond_state.append(constrain)
# cond_text.append(constrain)
elif ("else if" in info[i]):
brackets_match += 1
cond_num.append(brackets_match)
branch_layer.append(brackets_match)
if (branch_layer[len(branch_layer)-1] < branch_layer[len(branch_layer)-2]):
branch_point.pop()
temp = branch_point[brackets_match-1]
tree.add_node(info[i], info[temp])
# print ("info, i, temp: ", info[i], i, temp)
p_num.append(temp)
cond_list.append(temp)
cond_final_list.append(temp)
# constrain = info[i].replace("else if", "")
# constrain = constrain.replace("(", "", 1)
# constrain = constrain.replace(")", "", 1)
# constrain = constrain.replace("{", "")
# constrain_state.append("null")
# path_cond_state.append(constrain)
# cond_text.append(constrain)
elif (("else" in info[i]) and ("if" not in info[i])):
brackets_match += 1
cond_num.append(brackets_match)
branch_layer.append(brackets_match)
if (branch_layer[len(branch_layer)-1] < branch_layer[len(branch_layer)-2]):
branch_point.pop()
temp = branch_point[brackets_match-1]
tree.add_node(info[i], info[temp])
# print ("info, i, temp: ", info[i], i, temp)
p_num.append(temp)
cond_list.append(temp)
cond_final_list.append(temp)
# constrain_state.append("null")
# constrain = " !(" + constrain_state[temp] + ") "
# path_cond_state.append(constrain)
# cond_text.append(constrain)
# elif "while" in info[i]:
# brackets_match += 1
# cond_num.append(brackets_match)
# branch_layer.append(brackets_match)
# branch_point.append(treeID)
# if cond_num[len(cond_num)-1] == cond_num[len(cond_num)-2]:
# temp = branch_point[brackets_match-1]
# tree.add_node(info[i], info[temp])
# p_num.append(temp)
# cond_list.append(temp)
# cond_final_list.append(temp)
## print ("info, i, temp: ", info[i], i, temp)
# else: ################ else/while then "while" appear ################
# tree.add_node(info[i], info[treeID])
# p_num.append(treeID)
# cond_list.append(treeID)
# cond_final_list.append(treeID)
## print ("info, i, treeID: ", info[i], i, treeID)
#
# constrain = info[i].replace("while", "")
# constrain = constrain.replace("(", "", 1)
# constrain = constrain.replace(")", "", 1)
# constrain = constrain.replace("{", "")
# constrain = constrain.replace("/*R1 line:" + str(treeID) + "*/", "")
# constrain_state.append(constrain)
# path_cond_state.append(constrain)
# cond_text.append(constrain)
else:
if "end of branch" in info[i]:
branch_boundrary = i
break
else:
tree.add_node(info[i], info[treeID])
p_num.append(treeID)
constrain_state.append("null")
# print ("info, i, treeID: ", info[i], i, treeID)
if ("}" in info[i]):
######## layer problem #########
brackets_match -= 1
if brackets_match == 0:
branch_leaf.append(i)
# info[i] = info[i] + " //R1 line:" + str(treeID)
treeID += 1
#print (info[0])
#print (info[1])
########### record every branch type ############
#for i in range(1, len(info)):
# if "if" in info[i] and "else" not in info[i]:
# branch_type.append("if")
# elif "else" in info[i] and "if" in info[i]:
# branch_type.append("else if")
# elif "else" in info[i] and "if" not in info[i]:
# branch_type.append("else")
# elif "while" in info[i]:
# branch_type.append("while")
################# merge constrain we go through at each execution path
#temp = len(cond_list)
#fork_cond = path_cond_state
#print ("path_cond state: ", path_cond_state)
#print ("branch_layer: ", branch_layer)
#print ("cond_final_list: ", cond_final_list)
#for i in range(0, temp-1):
# cond_dfs.append(cond_text[i])
# if (cond_final_list[i+1] <= cond_final_list[i]) and "else" in branch_type[i+1]:
# constrain = "1"
# for j in range(0, len(cond_dfs)):
# constrain = constrain + " && " + path_cond_state[j]
# path_cond.append(constrain)
# temp_state_pop = 0
# temp_cond_dfs_length = len(cond_dfs)
# for k in range(len(cond_dfs)-1, -1, -1):
# if cond_list[temp_cond_dfs_length] <= cond_list[k]:
# cond_dfs.pop()
# temp_state_pop += 1
# temp_del = k
# else:
# break
# for l in range(0, temp_state_pop):
# del cond_list[temp_del]
# del path_cond_state[temp_del]
#constrain = "1"
#for i in range(0, len(path_cond_state)):
# constrain = constrain + " && " + path_cond_state[i]
#path_cond.append(constrain)
#print ("path_cond state: ", path_cond_state)
############### if there is any statement behind branch statement region ###############
if branch_boundrary > 0:
for i in range(0, 1):
counter = 0
for j in range(branch_boundrary, len(info)):
temp = branch_leaf[i]
branch_leaf.append(j)
if counter == 0:
tree.add_node(info[j], info[temp])
else:
tree.add_node(info[j], info[j-1])
counter += 1
tree.display(info[0])
#20180621 change procedure: loop heuristic then build tree
#print("***** DEPTH-FIRST ITERATION *****"), '\n'
for node in tree.traverse(info[0]): # calculate path amount
if "region" not in node:
dfs.append(node)
for i in range(0, len(dfs)):
region_path.write(dfs[i])
region_path.close()
"""
############ insert 99 instead none meanningful "}"
for i in range(0, len(p_num)-1):
if (p_num[i+1] < p_num[i]) and ("{" not in dfs[i+1]):
p_num.insert(i+1, 99)
#print ("parents: \n", p_num)
############ partition each execution path (existing in the tree except forking ones)
for i in range(0, len(p_num)-1):
path.append(dfs[i])
#if ("}" in dfs[i]) and ("}" not in dfs[i+1]) and (("else" in dfs[i+1]) or ("else if" in dfs[i+1]) or ("while" in dfs[i+1])):
if ("}" in dfs[i]) and ("}" not in dfs[i+1]) and ("{" in dfs[i+1]):
partition = open("partition.c", "w")
execution_path_r1 += 1
#if loop_brackets != 0
for i in range(0, len(path)):
#if ("if" not in dfs[i]) and ("elif" not in dfs[i]) and ("else" not in dfs[i]) and ("}" not in dfs[i]):
partition.write(path[i])
partition.close()
# print ("path: ", path)
os.system("mv partition.c exe_r1_path"+str(execution_path_r1)+".c")
temp_pop = 0
temp_path = len(path)
#print ("p_num: ", p_num[temp_path])
for j in range(len(path)-1, -1, -1):
if (p_num[temp_path] <= p_num[j]):
path.pop()
temp_pop += 1
temp_del = j
#print ("temp_del: ", temp_del)
else:
break
for k in range(0, temp_pop):
del p_num[temp_del]
#print ("after del p_num: ", p_num)
#print ("after pop: ", path)
########### partition the last path ############
partition = open("partition.c", "w")
execution_path_r1 += 1
for i in range(0, len(path)):
partition.write(path[i])
#partition.write("}\n")
#print ("the last: ", path)
partition.close()
os.system("mv partition.c exe_r1_path"+str(execution_path_r1)+".c")
#print ("branch_type: ", branch_type)
########### forking each false condition without other choose ("elif", "else"): for "if" only
for i in range(1, len(info)):
if (("if" in info[i] ) and "else" not in info[i]) or "while" in info[i]:
temp_layer += 1
# if "while" in info[i]:
# temp_branch = -1
# else:
for j in range(0, len(branch_layer)):
if (branch_layer[j] == temp_layer) and ("else" in branch_type[j]) and ("if" not in branch_type[j]):
temp_branch += 1
if temp_branch >= 1:
fork_state.append("null")
continue
else:
fork_state.append("fork")
temp_brackets = 0
#temp_brackets = temp_layer - 1
# print ("if start location: ", i)
for k in range(i, len(info)):
if "{" in info[k]:
temp_brackets += 1
elif "}" in info[k]:
temp_brackets -= 1
#if k == len(info)-2:
#temp_brackets -= 1
if (temp_brackets == 0):
#if k < len(info)-2 and "else if" not in info[k+1]:
if k != len(info) -1 and "else if" not in info[k+1]:
branch_end = k
break
#elif k == len(info)-2 and "}" in info[k+1]:
elif k == len(info)-1:
branch_end = len(info)
break
else:
continue
# print ("branch_end: ", branch_end)
# branch_start.append(i)
if i in branch_start:
branch_finish.append(branch_end + 1)
partition = open("partition.c", "w")
execution_path_r1 += 1
temp_brackets = 0
for l in range(1, i):
if "{" in info[l]:
temp_brackets += 1
elif "}" in info[l]:
temp_brackets -= 1
partition.write(info[l])
# print ("upper part: ",info[l])
for l in range(branch_end+1, len(info)):
if "{" in info[l]:
temp_brackets += 1
elif "}" in info[l]:
temp_brackets -= 1
if temp_brackets >= 0:
partition.write(info[l])
# print ("down part: ", info[l])
if temp_brackets > 0:
for m in range(0, temp_brackets):
partition.write("}\n")
partition.close()
os.system("mv partition.c exe_r1_path"+str(execution_path_r1)+".c")
#print ("fork_state: ", fork_state)
#print ("fork_cond: ", fork_cond)
#for i in range(0, len(fork_state)):
# #constrain = "1"
# if "fork" in fork_state[i]:
# constrain = " !(" + fork_cond[i] + ") "
# for j in range(0, i):
# constrain = constrain + "&&" + fork_cond[j]
# for j in range(i+1, len(fork_state)):
# constrain = constrain + "&&" + fork_cond[j]
# path_cond.append(constrain)
print ("path_cond: ", path_cond)
info_length = len(info)
if len(branch_finish) > 1:
partition = open("partition.c", "w")
execution_path_r1 += 1
for i in range(0, len(branch_start)):
gap = info_length - len(info)
for j in range(0, branch_finish[i] - branch_start[i]):
del info[branch_start[i] - gap]
for i in range(1, len(info)):
partition.write(info[i])
# print ("All False Path: ", info[i])
partition.close()
os.system("mv partition.c exe_r1_path"+str(execution_path_r1)+".c")
"""
| {"/scrapbooking_source.py": ["/app_r1.py"], "/src_interleaving.py": ["/scrapbooking_klee.py"], "/ins_interleaving.py": ["/scrapbooking_klee.py"], "/region2.py": ["/app_r1.py"]} |
43,560 | david128kim/region_based_project | refs/heads/master | /region2.py | import os
#import commands
import subprocess
import string
#from app_r1 import temp_name
#reload(app_r1)
#print temp_name
#print app_r1.program_name
from tree import Tree
from app_r1 import execution_path_r1
(_ROOT, _DEPTH, _BREADTH) = range(3)
tree = Tree()
#tree.add_node("Harry") # root node
#tree.add_node("Jane", "Harry")
#tree.add_node("Bill", "Harry")
#tree.add_node("David", "Jane")
#tree.add_node("Joe", "Jane")
#tree.display("Harry")
#print("***** DEPTH-FIRST ITERATION *****")
#for node in tree.traverse("Harry"):
#print(node)
#print("***** BREADTH-FIRST ITERATION *****")
#for node in tree.traverse("Harry", mode=_BREADTH):
#print(node)
#file = open("select_region1.c")
file = open("region_text/datarace.txt")
treeID = 0
info = []
info_bottom = []
height = 0
brackets_match_record = []
brackets_match = 0
branch_point = []
branch_boundrary = 0
branch_leaf = []
counter_if = 0
execution_path_r2 = 0
breakpoint = []
counter_bp = 0
dfs = []
start, end, counter_r2, r2_flag, main_flag= 1, 0, 0, 0, 0
for line in file:
counter_r2 += 1
if "region2" in line:
r2_flag = counter_r2
#elif "main" in line:
#main_flag = counter_r2
info.append(line)
main_flag = len(info)
### set "region2: " into info_bottom ######
for i in range(0, main_flag-r2_flag+1):
info_bottom.append(info.pop())
#print "bottom: ", info_bottom
info = []
for i in range(main_flag-r2_flag, -1, -1):
info.append(info_bottom[i])
#print "main_flag: ", main_flag
#print "len(info): ", len(info)
tree.add_node(info[treeID])
#print "len(info): ", len(info)
#print "aloha: ", info[1]
#print r2_flag
#print main_flag
#for i in range(1, len(info)):
for i in range(1, len(info)):
if (("if" in info[i]) and ("else" not in info[i])):
#height += 1
counter_if += 1
brackets_match += 1
brackets_match_record.append(brackets_match)
branch_point.append(treeID)
if ("else" not in info[i-1]):
temp = branch_point[brackets_match-1]
tree.add_node(info[i], info[temp])
else:
tree.add_node(info[i], info[treeID])
#tree.add_node(info[i], info[branch_point])
#print "if(i, treeID, bp):", i, treeID, branch_point
#print "if_height: ", brackets_match
elif (("else" in info[i]) and ("if" in info[i])):
brackets_match += 1
brackets_match_record.append(brackets_match)
if (brackets_match_record[len(brackets_match_record)-1] < brackets_match_record[len(brackets_match_record)-2]):
branch_point.pop()
#if height > 1:
#else:
#branch_point.append(treeID)
temp = branch_point[brackets_match-1]
#tree.add_node(info[i], info[branch_point])
tree.add_node(info[i], info[temp])
#print "elif(i, treeID, bp):", i, treeID, branch_point[brackets_match-1]
#print "elif_height: ", brackets_match
elif (("else" in info[i]) and ("if" not in info[i])):
brackets_match += 1
brackets_match_record.append(brackets_match)
if (brackets_match_record[len(brackets_match_record)-1] < brackets_match_record[len(brackets_match_record)-2]):
branch_point.pop()
temp = branch_point[brackets_match-1]
#tree.add_node(info[i], info[branch_point])
tree.add_node(info[i], info[temp])
#print "else(i, treeID, bp):", i, treeID, branch_point[brackets_match-1]
#print "else_height: ", brackets_match
else:
if "end of branch" in info[i]:
branch_boundrary = i
#print i
break
#print i
#tree.add_node(info[i], info[treeID])
else:
tree.add_node(info[i], info[treeID])
if("}" in info[i]):
######## layer problem #########
brackets_match -= 1
if brackets_match == 0:
#print i
branch_leaf.append(i)
#if ((counter_if - brackets_match) == 1):
#print i
#branch_leaf.append(i)
#elif
#tree.add_node(info[i], info[branch_point+1])
#print "normal(i, treeID, bp):", i, treeID, branch_point
#print "height: ", height
treeID += 1
#print treeID
#print "len(branch_leaf): ", len(branch_leaf)
#print "branch_leaf[0]: ", branch_leaf[0]
#print branch_leaf[1]
#print branch_leaf[2]
#print "branch_boundrary: ", branch_boundrary
#print "info[branch_boundrary]: ", info[branch_boundrary]
#counter = 0
#for i in range(branch_boundrary, len(info)):
#for j in range(0, len(branch_leaf)):
#temp = branch_leaf[j]
#tree.add_node(info[i], branch_leaf[j])
#tree.add_node(info[i], info[temp])
#branch_leaf.insert(j, i)
#branch_leaf.remove(branch_leaf[j+1])
#print "!"
#for i in range(0, len(branch_leaf)-1):
if branch_boundrary > 0:
for i in range(0, 1):
counter = 0
#print "i: ", i
for j in range(branch_boundrary, len(info)):
temp = branch_leaf[i]
if counter == 0:
tree.add_node(info[j], info[temp])
else:
tree.add_node(info[j], info[j-1])
counter += 1
#print "counter: ", counter
#print "j: ", j
#print "###############################################"
#tree.add_node("/* #############breakline############### */", info[len(info)-1])
tree.display(info[0])
#print("***** DEPTH-FIRST ITERATION *****"), '\n'
for node in tree.traverse(info[0]):
dfs.append(node)
#if "#######breakline#######" in node:
#execution_path += 1
#print node
if node == info[len(info)-1]:
partition = open("partition.c", "w")
execution_path_r2 += 1
for i in range(1, len(dfs)):
if ("if" not in dfs[i]) and ("elif" not in dfs[i]) and ("else" not in dfs[i]) and ("}" not in dfs[i]):
partition.write(dfs[i])
partition.close()
os.system("mv partition.c exe_r2_path"+str(execution_path_r2)+".c")
#os.system('clang -Os -S -emit-llvm exe_r2_path'+str(execution_path_r2)+'.c -o exe_r2_path'+str(execution_path_r2)+'.ll')
for i in range(len(dfs)-1, -1, -1):
if ("if" not in dfs[i]) and ("elif" not in dfs[i]) and ("else" not in dfs[i]):
dfs.pop()
#print "dfs: ", dfs
#print "execution_path: ", execution_path
#dfs.pop()
#print "dfs.pop(): ", dfs
#counter_bp = len(dfs)-1
#for i in range(len(dfs)-1, -1, -1):
#counter_bp += 1
#if "#######breakline#######" in dfs[i]:
#breakpoint.append(counter_bp)
#counter_bp -= 1
#print breakpoint
#partition = open("partition.c", "w")
#while(execution_path > 0):
#partition = open("partition.c", "w")
#execution_path -= 1
#end = breakpoint.pop()-1
#for i in range(start, end):
#partition.write(dfs[i])
#partition.close()
#os.system("mv partition.c exe_r2_path"+str(execution_path)+".c")
#start = end+1
#os.system("python app_r2.py")
| {"/scrapbooking_source.py": ["/app_r1.py"], "/src_interleaving.py": ["/scrapbooking_klee.py"], "/ins_interleaving.py": ["/scrapbooking_klee.py"], "/region2.py": ["/app_r1.py"]} |
43,561 | david128kim/region_based_project | refs/heads/master | /test-room/dynamic-path.py | import os
import subprocess
ValidInputs, kquery, ins, exe_path = [], [], [], []
k_point, appendable, brackets = 0, 1, 0
os.system('clang -Os -S -emit-llvm region1.c -o region1.ll')
os.system('llvm-as region1.ll -o region1.bc')
os.system('klee -search=dfs -write-kqueries -write-paths region1.bc')
num = subprocess.getoutput('find klee-last/ -type f |wc -l')
end = (int(num) - 7 + 2) / 2
for i in range(1, int(end)):
temp = subprocess.getoutput('ktest-tool --write-ints klee-last/test00000'+str(i)+'.ktest')
tmp = temp.split()
if "found" not in tmp[len(tmp)-1]:
ValidInputs.append(tmp[len(tmp)-1])
print ("valid inputs: ", ValidInputs)
file = open('region1.c')
for line in file:
ins.append(line)
file.close()
p_num = subprocess.getoutput('find klee-last/ -name *.path -type f |wc -l')
print (p_num)
for i in range(1, int(p_num)+1):
file = open('klee-last/test00000'+str(i)+'.path')
for line in file:
kquery.append(line)
file.close()
for j in range(0, len(ins)):
if "num" in ins[j] and "if" in ins[j]:
if "0" in kquery[k_point]:
appendable = 0
k_point += 1
elif "}" in ins[j]:
appendable = 1
if appendable == 1 and "}" not in ins[j] and "if" not in ins[j]:
exe_path.append(ins[j])
#print (ins[j])
else:
continue
path = open("path_r1.c", "w")
for j in range(0, len(exe_path)):
path.write(exe_path[j])
path.close()
os.system('mv path_r1.c path_r1_'+str(i)+'.c')
k_point = 0
kquery = []
exe_path = []
| {"/scrapbooking_source.py": ["/app_r1.py"], "/src_interleaving.py": ["/scrapbooking_klee.py"], "/ins_interleaving.py": ["/scrapbooking_klee.py"], "/region2.py": ["/app_r1.py"]} |
43,562 | david128kim/region_based_project | refs/heads/master | /region1.py | import os
#import commands
import subprocess
import string
from tree import Tree
from tree import depth_list
(_ROOT, _DEPTH, _BREADTH) = range(3)
tree = Tree()
file = open("region_text/datarace.txt")
treeID = 0
info = []
height = 0
node_height = []
brackets_match_record = []
brackets_match = 0
branch_point = []
branch_point_record = []
branch_boundrary = 0
branch_leaf = []
counter_if = 0
execution_path_r1 = 0
breakpoint = []
counter_bp = 0
dfs = []
dfs_temp = []
start, end, counter_r2, r2_flag, main_flag, temp_node_length= 1, 0, 0, 0, 0, 0
for line in file:
counter_r2 += 1
if "region2" in line:
r2_flag = counter_r2
break
info.append(line)
tree.add_node(info[treeID])
for i in range(1, len(info)):
if (("if" in info[i]) and ("else" not in info[i])):
height += 1
counter_if += 1
brackets_match += 1
brackets_match_record.append(brackets_match)
branch_point.append(treeID)
branch_point_record.append(treeID)
node_height.append(height)
if ("else" not in info[i-1]):
temp = branch_point[brackets_match-1]
tree.add_node(info[i], info[temp])
else:
tree.add_node(info[i], info[treeID])
elif ("else if" in info[i]):
brackets_match += 1
brackets_match_record.append(brackets_match)
if (brackets_match_record[len(brackets_match_record)-1] < brackets_match_record[len(brackets_match_record)-2]):
branch_point.pop()
height = brackets_match
temp = branch_point[brackets_match-1]
tree.add_node(info[i], info[temp])
node_height.append(height)
temp_node_length = len(node_height)
elif (("else" in info[i]) and ("if" not in info[i])):
brackets_match += 1
brackets_match_record.append(brackets_match)
if (brackets_match_record[len(brackets_match_record)-1] < brackets_match_record[len(brackets_match_record)-2]):
branch_point.pop()
height = brackets_match
#print height
#print "branch_point(else): ", branch_point[1]
temp = branch_point[brackets_match-1]
#temp = branch_point[len(branch_point)-1]
#print "branch_point(else): ", temp
tree.add_node(info[i], info[temp])
#print "else = i, temp: ", (i, temp)
node_height.append(height)
temp_node_length = len(node_height)
#print "length of node_height: ", temp_node_length
#if (node_height[temp_node_length-1] < node_height[temp_node_length-2]):
#branch_leaf.append(i-1)
else:
if "end of branch" in info[i]:
branch_boundrary = i
#print "branch_boundrary: ", branch_boundrary
break
else:
height += 1
#print height
tree.add_node(info[i], info[treeID])
#print "others = i, treeID: ", (i, treeID)
node_height.append(height)
if("}" in info[i]):
######## layer problem #########
brackets_match -= 1
if brackets_match == 0:
branch_leaf.append(i)
#print "b-m: ", brackets_match
#print "info[i]: ", info[i]
treeID += 1
if branch_boundrary > 0:
for i in range(0, 1):
counter = 0
for j in range(branch_boundrary, len(info)):
temp = branch_leaf[i]
branch_leaf.append(j)
if counter == 0:
tree.add_node(info[j], info[temp])
#print "j, temp: ", (j, temp)
else:
tree.add_node(info[j], info[j-1])
#print "j, j-1: ", (j, j-1)
counter += 1
tree.display(info[0])
#print("***** DEPTH-FIRST ITERATION *****"), '\n'
for node in tree.traverse(info[0]): # calculate path amount
dfs.append(node)
if node == info[len(info)-1]:
partition = open("partition.c", "w")
execution_path_r1 += 1
for i in range(1, len(dfs)):
if ("if" not in dfs[i]) and ("elif" not in dfs[i]) and ("else" not in dfs[i]) and ("}" not in dfs[i]):
partition.write(dfs[i])
partition.close()
os.system("mv partition.c exe_r1_path"+str(execution_path_r1)+".c")
#os.system('clang -Os -S -emit-llvm exe_r1_path'+str(execution_path_r1)+'.c -o exe_r1_path'+str(execution_path_r1)+'.ll')
for i in range(len(dfs)-1, -1, -1):
if ("if" not in dfs[i]) and ("elif" not in dfs[i]) and ("else" not in dfs[i]):
dfs.pop()
| {"/scrapbooking_source.py": ["/app_r1.py"], "/src_interleaving.py": ["/scrapbooking_klee.py"], "/ins_interleaving.py": ["/scrapbooking_klee.py"], "/region2.py": ["/app_r1.py"]} |
43,563 | david128kim/region_based_project | refs/heads/master | /clean.py | import os
os.system('rm answer*')
os.system('rm exe_r*')
os.system('rm r1_path*')
os.system('rm r2_path*')
os.system('rm interleave*')
os.system('rm region1_klee*')
os.system('rm region2_klee*')
os.system('cd exe_IR && rm r*')
os.system('cd exe_concurrent && rm c* && rm i*')
os.system('cd exe_source && rm exe_r* && rm region*')
os.system('rm whole*')
os.system('rm -r klee-out-*')
os.system('cd program && rm path* && rm e*')
os.system('rm kleer*')
| {"/scrapbooking_source.py": ["/app_r1.py"], "/src_interleaving.py": ["/scrapbooking_klee.py"], "/ins_interleaving.py": ["/scrapbooking_klee.py"], "/region2.py": ["/app_r1.py"]} |
43,564 | david128kim/region_based_project | refs/heads/master | /DL_filter.py | import os
import subprocess
#from src_interleaving import p_flag
#import src_interleaving
ins, sig_wait, lock_usage, length = [], [], [], []
cir_wait, check_cond_to_end = 0, 0
condition, region_num = "", ""
interleaving = subprocess.getoutput('find -name "interleave-*" -type f |wc -l')
#print ("interleaving: ", interleaving)
#file = open('interleave-1.c', 'r')
file = open('interleave-'+str(interleaving)+'.c', 'r')
for line in file:
length.append(line)
file.close()
#file = open('interleave-1.c', 'r')
file = open('interleave-'+str(interleaving)+'.c', 'r')
for line in file:
ins.append(line)
temp_split = line.split()
#print ("lock_usage: ", lock_usage)
#print ("sig_wait: ", sig_wait)
if "cond_wait" in line: ######### pthread_mutex_unlock(&m); // cond_wait &full R2
if temp_split[0].lstrip('pthread_mutex_unlock(').rstrip(');') in lock_usage:
lock_usage.remove(temp_split[0].lstrip('pthread_mutex_unlock(').rstrip(');'))
'''
else:
print ("4. error lock releasing")
'''
sig_wait.append(temp_split[3])
region_num = temp_split[len(temp_split)-1]
'''
if condition == "":
condition = temp_split[len(temp_split)-1]
else:
print ("1. wait: inexistent enumeration")
break
'''
elif "pthread_cond_signal" in line: ######### %11 = tail call i32 @pthread_cond_signal(%union.pthread_cond_t* nonnull @full) #3 ;R?
if temp_split[0].lstrip('pthread_cond_signal(').rstrip(');') in sig_wait:
sig_wait.remove(temp_split[0].lstrip('pthread_cond_signal(').rstrip(');'))
#region_num = ""
#else:
#if region_num != "":
#print ("1. wait: inexistent enumeration")
#break
#region_num = ""
#condition = ""
#check_cond_to_end = 0
#print (temp_split[len(temp_split)-1].lstrip('//'))
'''
if region_num in temp_split[len(temp_split)-1] and sig_wait:
print ("1. wait: inexistent enumeration")
#### only for avoiding the last branch: if sig_wait: => not good ####
#sig_wait = []
####
break
else:
region_num = ""
'''
'''
if condition != "":
if condition not in line:
continue
else:
check_cond_to_end = 1
if len(length) == len(ins):
check_cond_to_end = 0
'''
elif "mutex_lock" in line: ######## type 2: %9 = call i32 @pthread_mutex_lock(%union.pthread_mutex_t* %8) #5
if temp_split[0].lstrip('pthread_mutex_lock(').rstrip(');') in lock_usage:
if len(lock_usage) >= 2:
print ("deadlock: circular waitting same lock ", temp_split[0].lstrip('pthread_mutex_lock(&').rstrip(');'))
break
else:
print ("2. lock: inexistent enumeration")
break
else:
lock_usage.append(temp_split[0].lstrip('pthread_mutex_lock(').rstrip(');'))
elif "mutex_unlock" in line:
if temp_split[0].lstrip('pthread_mutex_unlock(').rstrip(');') in lock_usage:
lock_usage.remove(temp_split[0].lstrip('pthread_mutex_unlock(').rstrip(');'))
'''
if condition != "":
if condition not in line:
continue
else:
check_cond_to_end = 1
if len(length) == len(ins):
check_cond_to_end = 0
'''
else:
'''
if condition != "":
if condition not in line:
continue
else: #### too early to check condition
#print ("3. normal op.: inexistent enumeration")
#break
check_cond_to_end = 1
if len(length) == len(ins):
check_cond_to_end = 0
'''
continue
#print (line)
#print ("SIG: ", sig_wait)
#print ("region_num: ", region_num)
file.close()
#if ";R2" in ins[0]:
#print ("different thread order")
#else:
'''
if check_cond_to_end == 1:
print ("3. normal op.: inexistent enumeration")
else:
'''
if sig_wait:
print ("deadlock: condition variable order violation. ")
'''
if len(lock_usage) > 0:
print ("error: incomplete lock usage. ")
'''
| {"/scrapbooking_source.py": ["/app_r1.py"], "/src_interleaving.py": ["/scrapbooking_klee.py"], "/ins_interleaving.py": ["/scrapbooking_klee.py"], "/region2.py": ["/app_r1.py"]} |
43,565 | david128kim/region_based_project | refs/heads/master | /filter.py | import os
import subprocess
ins, sig_wait, lock_usage, length = [], [], [], []
cir_wait, check_cond_to_end = 0, 0
condition = ""
file = open('answer.ll', 'r')
for line in file:
length.append(line)
file.close()
file = open('answer.ll', 'r')
for line in file:
ins.append(line)
temp_split = line.split()
if "pthread_cond_wait" in line: ######### %6 = tail call i32 @pthread_cond_wait(%union.pthread_cond_t* nonnull @empty, %union.pthread_mutex_t* nonnull @m) #3 ;R?
sig_wait.append(temp_split[7].lstrip('@').rstrip(','))
#if temp_split[len(temp_split)-3].lstrip('@').rstrip(')') in lock_usage:
#lock_usage.remove(temp_split[len(temp_split)-3].lstrip('@').rstrip(')'))
if condition == "":
condition = temp_split[len(temp_split)-1]
else:
print ("1. wait: inexistent enumeration")
break
elif "pthread_cond_signal" in line: ######### %11 = tail call i32 @pthread_cond_signal(%union.pthread_cond_t* nonnull @full) #3 ;R?
if temp_split[7].lstrip('@').rstrip(')') in sig_wait:
sig_wait.remove(temp_split[7].lstrip('@').rstrip(')'))
condition = ""
check_cond_to_end = 0
if condition != "":
if condition not in line:
continue
else:
check_cond_to_end = 1
if len(length) == len(ins):
check_cond_to_end = 0
elif "mutex_lock" in line: ######## type 2: %9 = call i32 @pthread_mutex_lock(%union.pthread_mutex_t* %8) #5
if temp_split[len(temp_split)-3].lstrip('@').rstrip(')') in lock_usage:
if len(lock_usage) >= 2:
print ("deadlock: circular waitting same lock. ")
break
else:
print ("2. lock: inexistent enumeration")
break
else:
lock_usage.append(temp_split[len(temp_split)-3].lstrip('@').rstrip(')'))
#print ("lock usage: ", lock_usage)
elif "mutex_unlock" in line:
if temp_split[len(temp_split)-3].lstrip('@').rstrip(')') in lock_usage:
lock_usage.remove(temp_split[len(temp_split)-3].lstrip('@').rstrip(')'))
if condition != "":
if condition not in line:
continue
else:
check_cond_to_end = 1
if len(length) == len(ins):
check_cond_to_end = 0
else:
if condition != "":
if condition not in line:
continue
else: #### too early to check condition
#print ("3. normal op.: inexistent enumeration")
#break
check_cond_to_end = 1
if len(length) == len(ins):
check_cond_to_end = 0
continue
#print (line)
#print (sig_wait)
file.close()
#if ";R2" in ins[0]:
#print ("different thread order")
#else:
if check_cond_to_end == 1:
print ("3. normal op.: inexistent enumeration")
else:
if (len(sig_wait) > 0):
print ("deadlock: condition variable order violation. ")
'''
if len(lock_usage) > 0:
print ("error: incomplete lock usage. ")
'''
| {"/scrapbooking_source.py": ["/app_r1.py"], "/src_interleaving.py": ["/scrapbooking_klee.py"], "/ins_interleaving.py": ["/scrapbooking_klee.py"], "/region2.py": ["/app_r1.py"]} |
43,566 | david128kim/region_based_project | refs/heads/master | /scrapbooking_IR.py | import os
import subprocess
import string
#import scrapbooking_klee
#print (scrapbooking_klee.ValidInputs)
def replaceIR(instruction_list, new_list):
instruction_list.pop()
instruction_list.append(new_list)
new_list = ""
def splitIR(original_line, temp_line, line_split):
temp_line = original_line
print ("temp_r: ", temp_line)
line_split = temp_line.split()
print ("split: ", line_split)
############### initialization ##############################
temp_testcase, temp_path1 = [], []
file = open('testcase.ll')
for line in file:
temp_testcase.append(line)
testcase_length = len(temp_testcase)
file.close()
file = open('answer.ll')
for line in file:
temp_path1.append(line)
path_length = len(temp_path1)
file.close()
############### start point of scapbooking ##################
file = open('program/exe_path1_2.ll')
scrap = open('answer_o.ll','w')
scrapping = []
start_counter = 0
for line in file:
if "define" in line:
scrapping.append(line)
break
else:
scrapping.append(line)
if "common local_unnamed_addr" in line or "common global" in line:
start_counter += 1
for i in range(0, len(scrapping)):
scrap.write(scrapping[i])
#scrap.write("entry: \n")
file.close()
scrap.close()
################# 1st time replacing part #####################
file = open('answer.ll')
booking = open('answer_o.ll','a')
counter_ins, counter_load, before_1stBB, counter_temp, counter_call, label_point, phi_point, counter_store, opening_load, counter_re = 0, 1, 0, -1, 0, 0, 0, 0, 0, -1
load_number, operation_name, assert_answer, temp = "", "", "", ""
instruction, label, re_instruction, cmp_point, br_label, br_value = [], [], [], [], [], []
#counter_ins += start_counter
counter_ins += 2
for line in file:
if "load" in line:
counter_ins += 1
#if opening_load == 0:
#opening_load = counter_ins
instruction.append(line)
temp = line
temp_1 = temp.split()
load_number = str(temp_1[0])
temp = temp.replace(str(load_number), "%"+str(counter_ins))
"""
instruction.pop()
instruction.append(temp)
temp = ""
"""
replaceIR(instruction, temp)
#before_1stBB = 1 ##### special case to increase program counter
elif ("store" in line):
#if before_1stBB == 0:
#counter_ins += 1
#else:
counter_store += 1
instruction.append(line)
temp = line
temp_assert_answer = temp.split()
assert_answer = str(temp_assert_answer[2])
if "%" not in assert_answer:
i = 3
while ("%" not in assert_answer) and (i < len(temp_assert_answer)-1) :
assert_answer = str(temp_assert_answer[i])
if "%" in assert_answer:
temp = temp.replace(str(assert_answer), str(operation_name)+',')
break
i += 1
else:
temp = temp.replace(str(assert_answer), str(operation_name)+',')
'''
instruction.pop()
instruction.append(temp)
'''
replaceIR(instruction, temp)
elif ("call" in line) and ("pthread_cond" not in line):
counter_ins += 1
instruction.append(line)
temp = line
temp_1 = temp.split()
load_number = str(temp_1[0])
temp = temp.replace(str(load_number), "%"+str(counter_ins))
replaceIR(instruction, temp)
#before_1stBB = 1 ############## special reason about local variable initialization ##############
elif ("call" in line) and ("pthread_cond" in line):
continue
else:
instruction.append(line)
if "=" not in line and "%" not in line:
continue
elif "label" in line:
if ";" not in line: ############## br instruction ###############
counter_ins += 1
temp = line
temp_split = line.split()
#print (temp_split[1])
if temp_split[1] == 'label':
temp = temp.replace(str(temp_split[2]), '%'+str(counter_ins))
replaceIR(instruction, temp)
else:
temp = temp.replace(str(temp_split[2]), '%'+str(counter_ins-1)+',')
replaceIR(instruction, temp)
else:
temp = line
temp_split = line.split()
temp = temp.replace(str(temp_split[1]), '<label>:'+str(counter_ins)+':')
replaceIR(instruction, temp)
label.append(counter_ins)
before_1stBB = 1
continue
elif "phi" in line: ################ phi instruction (ex:%8 = phi i32 [ %7, %5 ], [ 0, %2 ]): format of [ %7, %5 ] => [ BB last line -1, label ] #################
counter_ins += 1
temp = line
temp_split = line.split()
load_number = str(temp_split[0])
temp = temp.replace(str(load_number), "%"+str(counter_ins), 1)
replaceIR(instruction, temp)
else:
counter_ins += 1
temp = line
if "cmp" in line:
cmp_point.append(counter_ins)
temp_1 = temp.split()
load_number = str(temp_1[0])
temp = temp.replace(str(load_number), "%"+str(counter_ins))
load_number = str(temp_1[1])
if "%" not in load_number:
i = 1
while (("%" not in load_number) and ("mutex" not in load_number) and i < len(temp_1)):
load_number = str(temp_1[i])
if "%" in load_number and "mutex" not in load_number:
#print "load_number", load_number
if "sext" in line or "inttoptr" in line:
temp = temp.replace(str(load_number), "%"+str(counter_ins-1))
break
else:
temp = temp.replace(str(load_number), "%"+str(counter_ins-1)+",")
break
i += 1
else:
temp = temp.replace(str(load_number), "%"+str(counter_ins-1)+",")
temp_1 = temp.split()
operation_name = str(temp_1[0])
replaceIR(instruction, temp)
file.close()
for i in range(0, len(instruction)):
booking.write(instruction[i])
booking.close()
############ 2nd time replacing part ####################
label_point = 0
file = open('answer_o.ll','r')
scrapbooking = open('answer_ok.ll','w')
for line in file:
counter_re += 1
temp = line
temp_split = temp.split()
if "br" in line and temp_split[1] != 'label': ############# br i1 %14, label %15, label %18 #############
temp = temp.replace('label '+str(temp_split[6])+'', 'label %'+str(label[label_point+1]))
temp = temp.replace('label '+str(temp_split[4]), 'label %'+str(label[label_point])+',')
#print (temp)
label_point += 2
re_instruction.append(temp)
elif "phi" in line: ################ phi instruction (ex:%8 = phi i32 [ %7, %5 ], [ 0, %2 ]): format of [ %7, %5 ] => [ BB last line -1, label ] #################
for i in range(counter_re-1, 0, -1):
if 'label %'+str(int(temp_split[0].strip('%'))-1) in str(re_instruction[i]):
#print ("ins[i]: ", re_instruction[i])
for j in range(i-1, 0, -1):
if "cmp" not in re_instruction[j] and "=" in re_instruction[j]:
#print ("ins[j]: ", re_instruction[j])
temp_value = re_instruction[j].split()
br_value.append(temp_value[0])
break
for j in range(i-1, 0, -1):
if "; <label>" in re_instruction[j]:
temp_label = re_instruction[j].split()
br_label.append(temp_label[1].lstrip('; <label>:').rstrip(':'))
break
#print ("brv: ", br_value)
#print ("brl: ", br_label)
if len(br_label) < len(br_value):
br_label.append(str(opening_load-1))
temp = temp.replace(temp_split[5], str(br_value[0])+',')
temp = temp.replace(temp_split[6]+' ]', "%"+str(br_label[0])+' ]')
'''
if str(temp_split[5]) == str(temp_split[6])+',':
temp = temp.replace(temp_split[5], "%"+str(label[label_point]-3)+',')
temp = temp.replace(temp_split[6]+' ]', "%"+str(label[label_point-2])+' ]')
'''
temp = temp.replace('[ '+temp_split[9], "[ "+str(br_value[1])+',')
temp = temp.replace(temp_split[10]+' ]', "%"+str(br_label[1])+' ]')
for i in range(0, len(br_value)):
br_value.pop()
br_label.pop()
re_instruction.append(temp)
else:
re_instruction.append(line)
file.close()
for i in range(0, len(re_instruction)):
scrapbooking.write(re_instruction[i])
scrapbooking.close()
########### ending part of scapbooking ##################
file = open('program/exe_path1_2.ll')
scrapbooking = open('answer_ok.ll','a')
counter, temp_cut, cut = 0, 0, 0
ending, temp_2 = [], []
for line in file:
temp_2.append(line)
if ("ret" in line):
temp = line
temp_1 = temp.split()
temp_ret = str(temp_1[2])
temp = temp.replace(str(temp_ret), "%"+str(counter_ins))
temp_2.pop()
temp_2.append(temp)
break
file.close()
file = open('program/exe_path1_2.ll')
for line in file:
counter += 1
ending.append(line)
cut = len(temp_2) #before 20171121
#print ("\nlength: ", counter)
'''
if ("cmp" in line) and ("br" not in line): # not good
temp_cut = counter
temp = line
temp_1 = temp.split()
assert_source = str(temp_1[5])
temp = temp.replace(str(assert_source), str(assert_answer))
#print temp_cut
ending.append(temp)
else:
ending.append(line)
if (temp_cut < cut) and (temp_cut != 0):
cut = temp_cut
scrapbooking.write(temp)
'''
#print ("cut: ", ending[cut])
for i in range(cut-1, counter): # include cmp ( before conv )
scrapbooking.write(ending[i])
file.close()
scrapbooking.close()
| {"/scrapbooking_source.py": ["/app_r1.py"], "/src_interleaving.py": ["/scrapbooking_klee.py"], "/ins_interleaving.py": ["/scrapbooking_klee.py"], "/region2.py": ["/app_r1.py"]} |
43,567 | david128kim/region_based_project | refs/heads/master | /scrapbooking_klee.py | import os
import subprocess
import string
#import app_r1
#import app_r2
temp_ins, loop_body, ValidInputs, Inputs_Index, Region_Index, source_line, source_r, ir_line, local_var, program, source_path, ins, kquery, exe_path, exe_r1_path, exe_r2_path, exe_r3_path, exe_r4_path = [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []
region_combination, counter_r1, entry_r1, return_r1, counter_r2, entry_r2, return_r2, num_ins, region_flag, entry_region, return_region, counter_region, k_point, appendable, brackets, is_loop, cond_wait = 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0
#program_name = input("Please key in your program name: \n")
shared_data = input("Please key in your shared data name: \n")
num_region = input("How many regions do you circle: \n")
#file = open(program_name)
#klee = open('klee_program.c', 'w')
#whole = open('whole_program.c', 'w')
#################################### delete point ####################################
'''
for line in file:
if "(" in line:
break
source_line.append(line)
for line in file:
#if "(" not in line and "{" not in line and "}" not in line and "+" not in line and "-" not in line and "*" not in line and "/" not in line and "return" not in line and shared_data not in line:
if "(" not in line and "{" not in line and "}" not in line and "+" not in line and "-" not in line and "=" not in line and "/" not in line and "return" not in line:
local_var.append(line)
file.close()
for i in range(1, int(num_region)+1):
file = open("r"+str(i)+"_path.c")
region = open('region'+str(i)+'.c', 'w')
for k in range(0, len(source_line)):
region.write(source_line[k])
region.write('int main(int argc, char **argv) {\n')
for j in range(0, len(local_var)):
region.write(local_var[j])
for line in file:
region.write(line)
region.write('return 0;\n}')
region.close()
os.system('mv region'+str(i)+'.c exe_source/')
dy_path_r1 = open("Itrigger_1.c", "w")
dy_path_r2 = open("Itrigger_2.c", "w")
for j in range(0, len(source_line)):
dy_path_r1.write(source_line[j])
dy_path_r2.write(source_line[j])
dy_path_r1.write('int main(int argc, char **argv) {\n')
dy_path_r2.write('int main(int argc, char **argv) {\n')
for k in range(0, len(local_var)):
dy_path_r1.write(local_var[k])
dy_path_r2.write(local_var[k])
dy_path_r1.write('klee_make_symbolic(&'+shared_data+', sizeof('+shared_data+'), "'+shared_data+'");\n')
dy_path_r2.write('klee_make_symbolic(&'+shared_data+', sizeof('+shared_data+'), "'+shared_data+'");\n')
#os.system('cp Itrigger_1.c Itrigger_2.c')
for i in range(0, int(num_region)): ##### two region: need to be dynamic (ex: find r?_path.c number) #####
file = open("r"+str(i+1)+"_path.c")
dy_path_r1 = open("Itrigger_1.c", "a")
for line in file:
source_path.append(line)
for l in range(0, len(source_path)):
if "pthread_cond_wait" in source_path[l]:
source_path[l] = source_path[l].replace('pthread_cond_wait(&', 'printf ("')
source_path[l] = source_path[l].replace(');' , 'wait");')
dy_path_r1.write(source_path[l])
source_path = []
for i in range(int(num_region), 0, -1):
file = open("r"+str(i)+"_path.c")
dy_path_r2 = open("Itrigger_2.c", "a")
for line in file:
source_path.append(line)
for l in range(0, len(source_path)):
if "pthread_cond_wait" in source_path[l]:
source_path[l] = source_path[l].replace('pthread_cond_wait(&', 'printf ("')
source_path[l] = source_path[l].replace(');' , 'wait");')
dy_path_r2.write(source_path[l])
source_path = []
dy_path_r1.write('return 0; }\n')
dy_path_r1.close()
dy_path_r2.write('return 0; }\n')
dy_path_r2.close()
'''
################################### handle exceptional condition (e.g. while-loop, wait(), loop simplification) ######################################
I_num = subprocess.getoutput('find -name Itrigger_* -type f |wc -l')
file = open('Itrigger_'+str(I_num)+'.c')
klee = open('kleer.c', 'w')
for line in file:
if "{" in line and "while" in line:
brackets += 1
is_loop = 1
line = line.replace("while", "if")
line = line.replace("//", "//entering while ")
loop_body.append(line)
temp_ins.append(line)
elif "}" in line and is_loop == 1:
brackets -= 1
if brackets == 0:
is_loop = 0
line = line.replace("//", "//breaking while ")
loop_body.append(line)
temp_ins.append(line)
if cond_wait != 1:
temp_ins.extend(loop_body)
print ("loop_body: ", loop_body)
else:
continue
loop_body = []
cond_wait = 0
elif brackets != 0 and is_loop != 0:
cond_wait = 1
if "pthread_cond_wait" in line:
'''
line = line.replace('pthread_cond_wait(&', 'printf ("')
line = line.replace(');' , ' wait");')
'''
### conditional wait examples: pthread_cond_wait(&full, &m); ###
temp_wait = line.split()
temp_wait = temp_wait[0].lstrip('pthread_cond_wait(').rstrip(',')
line = line.replace('pthread_cond_wait('+temp_wait+', ', 'pthread_mutex_unlock(')
line = line.replace('//', '// cond_wait '+temp_wait+' ')
#print (line)
temp_ins.append(line)
loop_body.append(line)
else:
temp_ins.append(line)
file.close()
for i in range(0, len(temp_ins)):
klee.write(temp_ins[i])
klee.close()
################################### manaul insert point #####################################
for i in range(0, int(I_num)):
#os.system('clang -emit-llvm -g -c -w Itrigger_'+str(i+1)+'.c -o Itrigger'+str(i+1)+'.bc')
#os.system('klee -search=dfs -write-paths Itrigger'+str(i+1)+'.bc')
os.system('clang -emit-llvm -g -c -w kleer.c -o kleer.bc')
os.system('klee -search=dfs -write-paths kleer.bc')
num = subprocess.getoutput('find klee-last/ -type f |wc -l')
end = (int(num) - 7 + 2) / 2
for j in range(1, int(end)):
temp = subprocess.getoutput('ktest-tool --write-ints klee-last/test00000'+str(j)+'.ktest')
tmp = temp.split()
if "found" not in tmp:
'''
if i == 0:
ValidInputs_1.append(tmp[len(tmp)-1])
else:
ValidInputs_2.append(tmp[len(tmp)-1])
'''
ValidInputs.append(tmp[len(tmp)-1])
Inputs_Index.append(len(ValidInputs))
print ("valid inputs: ", ValidInputs)
'''
print ("valid inputs: ", ValidInputs_1)
print ("valid inputs: ", ValidInputs_2)
'''
file = open('kleer.c')
for line in file:
ins.append(line)
file.close()
#print ("ins: ", ins)
p_num = subprocess.getoutput('find klee-last/ -name *.path -type f |wc -l')
for k in range(1, int(p_num)+1):
file = open('klee-last/test00000'+str(k)+'.path')
for line in file:
kquery.append(line)
file.close()
#print ("kquery: ", kquery)
for j in range(0, len(ins)):
#if "num" in ins[j] and "if" in ins[j]:
if shared_data in ins[j] and "if" in ins[j]:
if "0" in kquery[k_point]:
appendable = 0
k_point += 1
elif "}" in ins[j]:
appendable = 1
if appendable == 1 and "}" not in ins[j] and "if" not in ins[j]:
if "klee" not in ins[j]:
exe_path.append(ins[j])
#print (ins[j])
else:
continue
#print (ins[j])
#print ("exe_path: ", exe_path)
path = open("path.c", "w")
for j in range(0, len(exe_path)):
'''
if "wait" in exe_path[j]:
exe_path[j] = exe_path[j].replace('printf ("', 'pthread_cond_wait(&')
exe_path[j] = exe_path[j].replace('wait");', ');')
'''
path.write(exe_path[j])
###### import valid inputs ######
'''
if "int main" in exe_path[j]:
path.write(shared_data+'= '+ValidInputs[k-1]+'; //R1 \n')
'''
#path.write('return 0;\n}')
path.write('}')
path.close()
os.system('mv path.c program/path_'+str(i+1)+'_'+str(k)+'.c')
#os.system('clang -Os -S -emit-llvm program/path_'+str(i+1)+'_'+str(k)+'.c -o program/path_'+str(i+1)+'_'+str(k)+'.ll')
k_point = 0
kquery = []
exe_path = []
ins = []
for i in range(0, int(I_num)):
for j in range(1, int(p_num)+1):
file = open('program/path_'+str(i+1)+'_'+str(j)+'.c')
for line in file:
if "printf" not in line:
if "R1" in line:
exe_r1_path.append(line)
elif "R2" in line:
exe_r2_path.append(line)
elif "R3" in line:
exe_r3_path.append(line)
elif "R4" in line:
exe_r4_path.append(line)
'''
else:
if "klee" not in line:
exe_r1_path.append(line)
exe_r2_path.append(line)
'''
file.close()
#print (exe_r3_path)
sequential = open('concurrent_'+str(i+1)+'_'+str(j)+'.c','w')
#sequential.write('region 1: \n')
#sequential.write(shared_data+'= '+ValidInputs[j-1]+';\n')
for k in range(0 , len(exe_r1_path)):
if k == len(exe_r1_path)-1:
exe_r1_path[k] = exe_r1_path[k].replace('//R1', '//R1 End')
sequential.write(exe_r1_path[k])
Region_Index.append(len(exe_r1_path))
#sequential.write('region 2: \n')
for k in range(0 , len(exe_r2_path)):
if k == len(exe_r2_path)-1:
exe_r2_path[k] = exe_r2_path[k].replace('//R2', '//R2 End')
sequential.write(exe_r2_path[k])
Region_Index.append(len(exe_r2_path))
#if num_region == 3:
for k in range(0 , len(exe_r3_path)):
if k == len(exe_r3_path)-1:
exe_r3_path[k] = exe_r3_path[k].replace('//R3', '//R3 End')
sequential.write(exe_r3_path[k])
Region_Index.append(len(exe_r3_path))
for k in range(0 , len(exe_r4_path)):
if k == len(exe_r4_path)-1:
exe_r4_path[k] = exe_r4_path[k].replace('//R4', '//R4 End')
sequential.write(exe_r4_path[k])
Region_Index.append(len(exe_r4_path))
####### set branch condition here to match over 2 regions cases #######
sequential.close()
exe_r1_path = []
exe_r2_path = []
exe_r3_path = []
exe_r4_path = []
os.system('mv concurrent_'+str(i+1)+'_'+str(j)+'.c exe_concurrent/')
#print ("region index: ", Region_Index)
#print ("ValidInputs: ", ValidInputs)
#print ("Inputs_Index", Inputs_Index)
'''
for i in range(1 , int(num_region)+1):
executions = open("exe_r"+str(i)+".c", "w")
if i == 1:
for j in range(0 , len(exe_r1_path)):
executions.write(exe_r1_path[j])
else:
for j in range(0 , len(exe_r2_path)):
executions.write(exe_r2_path[j])
executions.close()
#os.system('clang -Os -S -emit-llvm exe_r'+str(i)+'.c -o exe_r'+str(i)+'.ll')
#os.system('mv exe_r'+str(i)+'.ll exe_IR/')
os.system('mv exe_r'+str(i)+'.c exe_source/')
'''
'''
for i in range(1, int(num_region)+1):
ir_line =[]
counter_region = 0
region = open('exe_IR/exe_r'+str(i)+'.ll','r')
for line in region:
counter_region += 1
ir_line.append(line)
if "define" in line:
entry_region = counter_region
elif "ret" in line:
return_region = counter_region
sequential = open('concurrent_program.ll','a')
sequential.write('region'+str(i)+': \n')
for k in range(entry_region, return_region-1):
sequential.write(ir_line[k].rstrip('\n')+' ;R'+str(i)+'\n')
region.close()
sequential.close()
os.system('mv concurrent_program.ll exe_concurrent/')
for i in range(1, int(num_region)+1):
if i == 1:
for j in range(0, len(ValidInputs_1)):
file = open("program/path_"+str(i)+"_"+str(j+1)+".ll", "r")
for line in file:
program.append(line)
file.close()
path = open('exe_path'+str(i)+'_'+str(j+1)+'.ll', 'w')
for k in range(0, len(program)):
if shared_data in program[k] and "global" in program[k] and "common" in program[k]:
program[k] = program[k].replace("common local_unnamed_addr global i32 0", "global i32 "+ValidInputs_1[j]+"")
elif shared_data in program[k] and "global" in program[k] and "common" not in program[k]:
program[k] = program[k].replace(""+ValidInputs_1[j-1]+"", ""+ValidInputs_1[j]+"")
path.write(program[k])
path.close()
program = []
elif i == 2:
for j in range(0, len(ValidInputs_2)):
file = open("program/path_"+str(i)+"_"+str(j+1)+".ll", "r")
for line in file:
program.append(line)
file.close()
path = open('exe_path'+str(i)+'_'+str(j+1)+'.ll', 'w')
for k in range(0, len(program)):
if shared_data in program[k] and "global" in program[k] and "common" in program[k]:
program[k] = program[k].replace("common local_unnamed_addr global i32 0", "global i32 "+ValidInputs_2[j]+"")
elif shared_data in program[k] and "global" in program[k] and "common" not in program[k]:
program[k] = program[k].replace(""+ValidInputs_2[j-1]+"", ""+ValidInputs_2[j]+"")
path.write(program[k])
path.close()
program = []
os.system('mv exe_path* program/')
'''
| {"/scrapbooking_source.py": ["/app_r1.py"], "/src_interleaving.py": ["/scrapbooking_klee.py"], "/ins_interleaving.py": ["/scrapbooking_klee.py"], "/region2.py": ["/app_r1.py"]} |
43,614 | HaizhaoYang/generative_encoder | refs/heads/master | /models/networks/gan/gan_trainer.py | import os
import json
import pickle as pkl
import torch
from ..base.base_trainer import BaseTrainer
from ...utils.config import BaseConfig, getConfigFromDict, getDictFromConfig
class GANTrainer(BaseTrainer):
"""
A class managing GAN training. Logs, chekpoints,
visualization, and number iterations are managed here.
"""
def __init__(self,
pathdb,
visualisation=None,
**kwargs):
"""
Initializer for all GAN trainers
"""
BaseTrainer.__init__(self, pathdb, **kwargs)
# set up visualisation for GAN training
self.visualisation = visualisation
self.tokenWindowFake = None
self.tokenWindowFakeSmooth = None # may not need here
self.tokenWindowReal = None
self.tokenWindowLosses = None
self.refVectorPath = None
self.nDataVisualization = 16
self.refVectorVisualization = \
self.model.buildNoiseData(self.nDataVisualization)
# additional checkpoint paths
if self.checkPointDir is not None:
self.pathLossLog = os.path.abspath(os.path.join(self.checkPointDir,
self.modelLabel
+ '_losses.pkl'))
self.pathRefVector = os.path.abspath(os.path.join(self.checkPointDir,
self.modelLabel
+ '_refVectors.pt'))
def initModel(self):
"""
Initialize the GAN model.
"""
pass
def loadSavedTraining(self,
pathModel,
pathTrainConfig,
pathTmpConfig,
loadG=True,
loadD=True,
finetune=False):
"""
Load a given checkpoint.
Args:
- pathModel (string): path to the file containing the model
structure (.pt)
- pathTrainConfig (string): path to the reference configuration
file of the training. WARNING: this
file must be compatible with the one
pointed by pathModel
- pathTmpConfig (string): path to the temporary file describing the
state of the training when the checkpoint
was saved. WARNING: this file must be
compatible with the one pointed by
pathModel
"""
# Load the temp configuration
tmpPathLossLog = None
tmpConfig = {}
if pathTmpConfig is not None:
tmpConfig = json.load(open(pathTmpConfig, 'rb'))
self.startScale = tmpConfig["scale"]
self.startIter = tmpConfig["iter"]
self.runningLoss = tmpConfig.get("runningLoss", {})
tmpPathLossLog = tmpConfig.get("lossLog", None)
if tmpPathLossLog is None:
self.lossProfile = [
{"iter": [], "scale": self.startScale}]
elif not os.path.isfile(tmpPathLossLog):
print("WARNING : couldn't find the loss logs at " +
tmpPathLossLog + " resetting the losses")
self.lossProfile = [
{"iter": [], "scale": self.startScale}]
else:
self.lossProfile = pkl.load(open(tmpPathLossLog, 'rb'))
self.lossProfile = self.lossProfile[:(self.startScale + 1)]
if self.lossProfile[-1]["iter"][-1] > self.startIter:
indexStop = next(x[0] for x in enumerate(self.lossProfile[-1]["iter"])
if x[1] > self.startIter)
self.lossProfile[-1]["iter"] = self.lossProfile[-1]["iter"][:indexStop]
for item in self.lossProfile[-1]:
if isinstance(self.lossProfile[-1][item], list):
self.lossProfile[-1][item] = \
self.lossProfile[-1][item][:indexStop]
# Read the training configuration
if not finetune:
trainConfig = json.load(open(pathTrainConfig, 'rb'))
self.readTrainConfig(trainConfig)
# Re-initialize the model
self.initModel()
self.model.load(pathModel,
loadG=loadG,
loadD=loadD,
finetuning=finetune)
# Build retrieve the reference vectors
self.refVectorPath = tmpConfig.get("refVectors", None)
if self.refVectorPath is None:
self.refVectorVisualization = \
self.model.buildNoiseData(self.nDataVisualization)
elif not os.path.isfile(self.refVectorPath):
print("WARNING : no file found at " + self.refVectorPath
+ " building new reference vectors")
self.refVectorVisualization = \
self.model.buildNoiseData(self.nDataVisualization)
else:
self.refVectorVisualization = torch.load(
open(self.refVectorPath, 'rb'))
def getDefaultConfig(self):
pass
def resetVisualization(self, nDataVisualization):
self.nDataVisualization = nDataVisualization
self.refVectorVisualization = \
self.model.buildNoiseData(self.nDataVisualization)
def saveBaseConfig(self, outPath):
"""
Save the model basic configuration (the part that doesn't change with
the training's progression) at the given path
"""
outConfig = getDictFromConfig(
self.modelConfig, self.getDefaultConfig())
with open(outPath, 'w') as fp:
json.dump(outConfig, fp, indent=4)
def saveCheckpoint(self, outDir, outLabel, scale, iter):
"""
Save a checkpoint at the given directory. Please note that the basic
configuration won't be saved.
This function produces 2 files:
outDir/outLabel_tmp_config.json -> temporary config
outDir/outLabel -> networks' weights
And update the two followings:
outDir/outLabel_losses.pkl -> losses util the last registered iteration
outDir/outLabel_refVectors.pt -> reference vectors for visualization
"""
pathModel = os.path.join(outDir, outLabel + ".pt")
self.model.save(pathModel)
# Tmp Configuration
pathTmpConfig = os.path.join(outDir, outLabel + "_tmp_config.json")
outConfig = {'scale': scale,
'iter': iter,
'lossLog': self.pathLossLog,
'refVectors': self.pathRefVector,
'runningLoss': self.runningLoss}
# Save the reference vectors
torch.save(self.refVectorVisualization, open(self.pathRefVector, 'wb'))
with open(pathTmpConfig, 'w') as fp:
json.dump(outConfig, fp, indent=4)
if self.pathLossLog is None:
raise AttributeError("Logging mode disabled")
if self.pathLossLog is not None:
pkl.dump(self.lossProfile, open(self.pathLossLog, 'wb'))
if self.visualisation is not None:
ref_g = self.model.test(self.refVectorVisualization)
imgSize = max(128, ref_g.size()[2])
self.visualisation.saveTensor(ref_g, (imgSize, imgSize),
os.path.join(outDir, outLabel + '.jpg'))
ref_g_smooth = self.model.test(self.refVectorVisualization, True)
self.visualisation.saveTensor(ref_g_smooth, (imgSize, imgSize),
os.path.join(outDir, outLabel + '_avg.jpg'))
def sendToVisualization(self, refVectorReal, scale):
"""
Send the images generated from some reference latent vectors and a
bunch of real examples from the dataset to the visualisation tool.
"""
imgSize = max(128, refVectorReal.size()[2])
envLabel = self.modelLabel + "_training"
label = self.modelLabel
ref_g_smooth = self.model.test(self.refVectorVisualization, True) # test with running average generator
self.tokenWindowFakeSmooth = \
self.visualisation.publishTensors(ref_g_smooth,
(imgSize, imgSize),
label + " smooth",
self.tokenWindowFakeSmooth,
env=envLabel)
ref_g = self.model.test(self.refVectorVisualization, False) # test without running average generator
self.tokenWindowFake = \
self.visualisation.publishTensors(ref_g,
(imgSize, imgSize),
label + " fake",
self.tokenWindowFake,
env=envLabel)
self.tokenWindowReal = \
self.visualisation.publishTensors(refVectorReal,
(imgSize, imgSize),
label + " real",
self.tokenWindowReal,
env=envLabel)
self.tokenWindowLosses = \
self.visualisation.publishLoss(self.lossProfile[scale],
self.modelLabel,
self.tokenWindowLosses,
env=envLabel)
def trainOnEpoch(self,
dbLoader,
scale,
shiftIter=0,
maxIter=-1):
"""
Train the model on one epoch.
Args:
- dbLoader (DataLoader): dataset on which the training will be made
- scale (int): scale at which is the training is performed
- shiftIter (int): shift to apply to the iteration index when
looking for the next update of the alpha
coefficient
- maxIter (int): if > 0, iteration at which the training should stop
Returns:
True if the training went smoothly
False if a diverging behavior was detected and the training had to
be stopped
"""
i = shiftIter
for _, data in enumerate(dbLoader, 0):
inputs_real = data
if inputs_real.size()[0] < self.modelConfig.miniBatchSize:
continue
# Additionnal updates inside a scale
inputs_real = self.inScaleUpdate(i, scale, inputs_real)
allLosses = self.model.optimizeParameters(inputs_real)
self.updateRunningLosses(allLosses)
i += 1
# Regular evaluation
if i % self.lossIterEvaluation == 0:
# Reinitialize the losses
self.updateLossProfile(i)
print('[%d : %6d] loss G : %.3f loss D : %.3f' % (scale, i,
self.lossProfile[-1]["lossG"][-1],
self.lossProfile[-1]["lossD"][-1]))
self.resetRunningLosses()
if self.visualisation is not None:
self.sendToVisualization(inputs_real, scale)
if self.checkPointDir is not None:
if i % self.saveIter == 0:
labelSave = self.modelLabel + ("_s%d_i%d" % (scale, i))
self.saveCheckpoint(self.checkPointDir,
labelSave, scale, i)
if i == maxIter:
return True
return True
| {"/models/networks/gan/gan_trainer.py": ["/models/networks/base/base_trainer.py"], "/dataloader/image_dataloader.py": ["/models/utils/image_transform.py"], "/train.py": ["/config.py"], "/models/networks/vae/trainer.py": ["/models/networks/vae/standard_configurations/vae_config.py", "/models/networks/vae/vae.py"], "/models/networks/vae/vae.py": ["/models/networks/ae/base_AE.py", "/models/networks/vae/networks/vae_net.py"], "/tests/digital_rock_images/generate_digital_rock_images.py": ["/models/utils/image_transform.py", "/dataloader/image_dataloader.py"], "/models/networks/gan/base_GAN.py": ["/models/networks/base/base_NET.py"], "/datasets.py": ["/models/utils/image_transform.py", "/dataloader/image_dataloader.py"], "/models/networks/ae/base_AE.py": ["/models/networks/base/base_NET.py"]} |
43,615 | HaizhaoYang/generative_encoder | refs/heads/master | /dataloader/image_dataloader.py | import os
import numpy as np
from PIL import Image
import torch
from torch.utils.data import Dataset
import torchvision.transforms as Transforms
from models.utils.image_transform import NumpyResize, NumpyToTensor, NumpyReshape
class ImageDataset(Dataset):
"""
A dataset class adapted to image folders.
It loads the images.
"""
def __init__(self,
pathdb,
target=[""],
transform=None):
"""
initializer for the image dataset
"""
# set parameters
self.pathdb = pathdb
self.target = target
self.transform = transform
self.listImg = [[imgName for imgName in os.listdir(os.path.join(pathdb, target_name))
if os.path.splitext(imgName)[1] in [".jpg", ".png",
".npy"]] for target_name in self.target]
if len(self.listImg[0]) == 0:
raise AttributeError("Empty dataset found")
print("%d images found" % len(self.listImg[0]))
def __len__(self):
return len(self.listImg[0])
def __getitem__(self, idx):
images = []
for i in range(len(self.target)):
imgName = self.listImg[i][idx]
imgPath = os.path.join(self.pathdb, self.target[i], imgName)
img_i = pil_loader(imgPath)
if self.transform is not None:
img_i = self.transform(img_i)
images.append(img_i)
if len(images) == 1:
return images[0]
elif len(images) == 2:
return images[0], images[1]
def pil_loader(path, color=True):
"""
loads the image from the given path
"""
imgExt = os.path.splitext(path)[1]
if imgExt == ".npy":
img = np.load(path)[0]
return np.swapaxes(np.swapaxes(img, 0, 2), 0, 1)
# open path as file to avoid ResourceWarning
# (https://github.com/python-pillow/Pillow/issues/835)
with open(path, 'rb') as f:
img = Image.open(f)
if color:
return img.convert('RGB')
else:
return img.convert('L')
def getDataset(path_db, targets, size, modelConfig):
"""
get image dataset for scale and size
Inputs:
size [] - size array of the dataset
"""
# in image datasets, we do resize, numpy to tensor, then normalize
transformList = [NumpyResize(size),
NumpyToTensor()]
# handle rgb vs grayscale images
if modelConfig.dimOutput == 1:
transformList = [Transforms.Grayscale(1)] + transformList + [Transforms.Normalize((0.5), (0.5))]
else:
transformList = transformList + [Transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))]
transform = Transforms.Compose(transformList)
return ImageDataset(path_db,
target=targets,
transform=transform)
def standardTransform(size, dimOutput=3, transform_type='image'):
if transform_type == 'image':
# in image datasets, we do resize, numpy to tensor, then normalize
transformList = [NumpyResize(size),
NumpyToTensor()]
if dimOutput == 1:
transformList = [Transforms.Grayscale(1)] + transformList + [Transforms.Normalize((0.5), (0.5))]
else:
transformList = transformList + [Transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))]
inTransform = Transforms.Compose(transformList)
if dimOutput == 1:
outTransform = Transforms.Compose([Transforms.Normalize((-1.), (2)),
Transforms.ToPILImage()])
else:
outTransform = Transforms.Compose([Transforms.Normalize((-1., -1., -1.), (2, 2, 2)),
Transforms.ToPILImage()])
return inTransform, outTransform
| {"/models/networks/gan/gan_trainer.py": ["/models/networks/base/base_trainer.py"], "/dataloader/image_dataloader.py": ["/models/utils/image_transform.py"], "/train.py": ["/config.py"], "/models/networks/vae/trainer.py": ["/models/networks/vae/standard_configurations/vae_config.py", "/models/networks/vae/vae.py"], "/models/networks/vae/vae.py": ["/models/networks/ae/base_AE.py", "/models/networks/vae/networks/vae_net.py"], "/tests/digital_rock_images/generate_digital_rock_images.py": ["/models/utils/image_transform.py", "/dataloader/image_dataloader.py"], "/models/networks/gan/base_GAN.py": ["/models/networks/base/base_NET.py"], "/datasets.py": ["/models/utils/image_transform.py", "/dataloader/image_dataloader.py"], "/models/networks/ae/base_AE.py": ["/models/networks/base/base_NET.py"]} |
43,616 | HaizhaoYang/generative_encoder | refs/heads/master | /test_generative_encoder.py | """
@author: Yong Zheng Ong
the main function for running the ge model
"""
import importlib
import argparse
import sys
if __name__ == "__main__":
# argparser
parser = argparse.ArgumentParser(description='Testing script', add_help=False)
parser.add_argument('evaluation_name', type=str,
help='Name of the evaluation method to launch. To get \
the arguments specific to an evaluation method please \
use: eval.py evaluation_name -h')
parser.add_argument('evaluation_id', type=str,
help='Identifier of the evaluation to launch. The result will be saved in "result/evaluation_id')
# args for generative_encoder
parser.add_argument('-position', type=int, required=False) # file name
parser.add_argument('-with_ae', default=True, action='store_false')
parser.add_argument('-gan_name', type=str, required=False)
parser.add_argument('-ae_name', type=str, required=False)
args = parser.parse_args()
# validity check for args
if args.evaluation_name in ['celeba']:
assert args.position is not None, "position field should not be empty!"
assert args.with_ae is not None, "with_ae field should not be empty!"
module = importlib.import_module("tests.generative_encoder.generate_images")
else:
raise ValueError("evaluation name provided is invalid")
print("Running " + args.evaluation_name)
module.test(parser) | {"/models/networks/gan/gan_trainer.py": ["/models/networks/base/base_trainer.py"], "/dataloader/image_dataloader.py": ["/models/utils/image_transform.py"], "/train.py": ["/config.py"], "/models/networks/vae/trainer.py": ["/models/networks/vae/standard_configurations/vae_config.py", "/models/networks/vae/vae.py"], "/models/networks/vae/vae.py": ["/models/networks/ae/base_AE.py", "/models/networks/vae/networks/vae_net.py"], "/tests/digital_rock_images/generate_digital_rock_images.py": ["/models/utils/image_transform.py", "/dataloader/image_dataloader.py"], "/models/networks/gan/base_GAN.py": ["/models/networks/base/base_NET.py"], "/datasets.py": ["/models/utils/image_transform.py", "/dataloader/image_dataloader.py"], "/models/networks/ae/base_AE.py": ["/models/networks/base/base_NET.py"]} |
43,617 | HaizhaoYang/generative_encoder | refs/heads/master | /config.py | """
file containing constants for configurations
"""
# list of available models implemented currently
AVAILABLE_MODELS = {
# GAN Based Models
"PGAN": ("progressive_gan.trainer", "ProgressiveGANTrainer"),
# AE Based Models
"VAE": ("vae.trainer", "VAETrainer")
}
# list of available fields for dbType implemented currently
AVAILABLE_DBTYPES = (
"image"
) | {"/models/networks/gan/gan_trainer.py": ["/models/networks/base/base_trainer.py"], "/dataloader/image_dataloader.py": ["/models/utils/image_transform.py"], "/train.py": ["/config.py"], "/models/networks/vae/trainer.py": ["/models/networks/vae/standard_configurations/vae_config.py", "/models/networks/vae/vae.py"], "/models/networks/vae/vae.py": ["/models/networks/ae/base_AE.py", "/models/networks/vae/networks/vae_net.py"], "/tests/digital_rock_images/generate_digital_rock_images.py": ["/models/utils/image_transform.py", "/dataloader/image_dataloader.py"], "/models/networks/gan/base_GAN.py": ["/models/networks/base/base_NET.py"], "/datasets.py": ["/models/utils/image_transform.py", "/dataloader/image_dataloader.py"], "/models/networks/ae/base_AE.py": ["/models/networks/base/base_NET.py"]} |
43,618 | HaizhaoYang/generative_encoder | refs/heads/master | /visualization/image_visualizer.py | """
@author: Yong Zheng Ong
the main visualizer package for images
"""
import visdom
import torch
import torchvision.transforms as Transforms
import torchvision.utils as vutils
import numpy as np
import random
vis = visdom.Visdom()
def resizeTensor(data, out_size_image):
"""
postprocess image tensor before publishing to visdom
Inputs:
data [Tensor: (batch_size, num_channels, height, width)] - tensor array containing image (raw output) to be displayed.
out_size_image [Tuple: (height, width)] - expected height and width of the visualization window.
"""
out_data_size = (data.size()[0], data.size()[
1], out_size_image[0], out_size_image[1])
outdata = torch.empty(out_data_size)
data = torch.clamp(data, min=-1, max=1)
interpolationMode = 0
if out_size_image[0] < data.size()[0] and out_size_image[1] < data.size()[1]:
interpolationMode = 2
# handle single channel image
if out_data_size[1] == 1:
transform = Transforms.Compose([Transforms.Normalize((-1.), (2)),
Transforms.ToPILImage(),
Transforms.Resize(
out_size_image, interpolation=interpolationMode),
Transforms.ToTensor()])
else:
transform = Transforms.Compose([Transforms.Normalize((-1., -1., -1.), (2, 2, 2)),
Transforms.ToPILImage(),
Transforms.Resize(
out_size_image, interpolation=interpolationMode),
Transforms.ToTensor()])
for img in range(out_data_size[0]):
outdata[img] = transform(data[img])
return outdata
def publishTensors(data, out_size_image, caption="", window_token=None, env="main", nrow=16):
"""
publish the tensors to visdom images
Inputs:
data [Tensor: (batch_size, num_channels, height, width)] - tensor array containing image (raw output) to be displayed.
out_size_image [Tuple: (height, width)] - expected height and width of the visualization window.
"""
global vis
# perform post processing for the image to be published
outdata = resizeTensor(data, out_size_image)
return vis.images(outdata, opts=dict(caption=caption), win=window_token, env=env, nrow=nrow)
def saveTensor(data, out_size_image, path):
"""
save the tensors to path
Inputs:
data [Tensor: (batch_size, num_channels, height, width)] - tensor array containing image (raw output) to be displayed.
out_size_image [Tuple: (height, width)] - expected height and width of the visualization window.
path [String] - path to save the image to.
"""
# perform post processing for the image to be saved
outdata = resizeTensor(data, out_size_image)
vutils.save_image(outdata, path)
def publishLoss(data, name="", window_tokens=None, env="main"):
"""
publish the loss to visdom images
Inputs:
data [Dict] - dictionary containing key (title) and value (list of loss) to be displayed.
"""
if window_tokens is None:
window_tokens = {key: None for key in data}
for key, plot in data.items():
# skip metadata not needed to be printed
if key in ("scale", "iter"):
continue
nItems = len(plot)
inputY = np.array([plot[x] for x in range(nItems) if plot[x] is not None])
inputX = np.array([data["iter"][x] for x in range(nItems) if plot[x] is not None])
opts = {'title': key + (' scale %d loss over time' % data["scale"]),
'legend': [key], 'xlabel': 'iteration', 'ylabel': 'loss'}
window_tokens[key] = vis.line(X=inputX, Y=inputY, opts=opts,
win=window_tokens[key], env=env)
return window_tokens
def delete_env(name):
vis.delete_env(name)
| {"/models/networks/gan/gan_trainer.py": ["/models/networks/base/base_trainer.py"], "/dataloader/image_dataloader.py": ["/models/utils/image_transform.py"], "/train.py": ["/config.py"], "/models/networks/vae/trainer.py": ["/models/networks/vae/standard_configurations/vae_config.py", "/models/networks/vae/vae.py"], "/models/networks/vae/vae.py": ["/models/networks/ae/base_AE.py", "/models/networks/vae/networks/vae_net.py"], "/tests/digital_rock_images/generate_digital_rock_images.py": ["/models/utils/image_transform.py", "/dataloader/image_dataloader.py"], "/models/networks/gan/base_GAN.py": ["/models/networks/base/base_NET.py"], "/datasets.py": ["/models/utils/image_transform.py", "/dataloader/image_dataloader.py"], "/models/networks/ae/base_AE.py": ["/models/networks/base/base_NET.py"]} |
43,619 | HaizhaoYang/generative_encoder | refs/heads/master | /train.py | """
@author: Yong Zheng Ong
the main function for training models
"""
import os
import sys
import importlib
import argparse
# choosing CUDA environments
os.environ["CUDA_DEVICE_ORDER"]="PCI_BUS_ID"
os.environ["CUDA_VISIBLE_DEVICES"]="0"
import json # for loading of configurations
from config import * # import global variables
from models.utils.utils import loadmodule, getLastCheckPoint
from models.utils.config import getConfigOverrideFromParser, updateParserWithConfig
def getTrainer(name):
"""function used to get trainer for training"""
if name not in AVAILABLE_MODELS:
raise AttributeError("Invalid module name")
return loadmodule("models.networks." + AVAILABLE_MODELS[name][0],
AVAILABLE_MODELS[name][1],
prefix='')
if __name__ == "__main__":
# build parser
parser = argparse.ArgumentParser(description='Training script')
parser.add_argument('model_name', type=str,
help='Name of the model to launch, available models are\
{}. To get all possible option for a model\
please run train.py $MODEL_NAME -overrides'.format(", ".join(AVAILABLE_MODELS.keys())))
parser.add_argument('--no_vis', help=' Disable all visualizations',
action='store_true')
parser.add_argument('--restart', help=' If a checkpoint is detected, do \
not try to load it',
action='store_true')
parser.add_argument('-n', '--name', help="Model's name",
type=str, dest="name", default="default")
parser.add_argument('-d', '--dir', help='Output directory',
type=str, dest="dir", default='output_networks')
parser.add_argument('-c', '--config', help="Model's name",
type=str, dest="configPath")
parser.add_argument('-s', '--save_iter', help="If it applies, frequence at\
which a checkpoint should be saved. In the case of a\
evaluation test, iteration to work on.",
type=int, dest="saveIter", default=10000)
parser.add_argument('-e', '--eval_iter', help="If it applies, frequence at\
which a checkpoint should be saved",
type=int, dest="evalIter", default=100)
# retrieve the model we want
baseArgs, unknown = parser.parse_known_args()
trainerModule = getTrainer(baseArgs.model_name)
# build the output directory if needed
if not os.path.isdir(baseArgs.dir):
os.mkdir(baseArgs.dir)
# add overrides to the parser
parser = updateParserWithConfig(parser, trainerModule._defaultConfig)
kwargs = vars(parser.parse_args())
configOverride = getConfigOverrideFromParser(
kwargs, trainerModule._defaultConfig)
if kwargs['overrides']:
parser.print_help()
sys.exit()
# load checkpoint data
modelLabel = kwargs["name"]
restart = kwargs["restart"]
checkPointDir = os.path.join(kwargs["dir"], modelLabel)
checkPointData = getLastCheckPoint(checkPointDir, modelLabel)
if not os.path.isdir(checkPointDir):
os.mkdir(checkPointDir)
# training configurations
configPath = kwargs.get("configPath", None)
if configPath is None:
raise ValueError("You need to input a configuration file")
with open(kwargs["configPath"], 'rb') as file:
trainingConfig = json.load(file)
# model configuration
modelConfig = trainingConfig.get("config", {})
for item, val in configOverride.items():
modelConfig[item] = val
trainingConfig["config"] = modelConfig
# setup visualization module
vis_module = None
if baseArgs.no_vis:
print("Visualization disabled")
else:
if trainingConfig["dbType"] in AVAILABLE_DBTYPES:
vis_module = importlib.import_module("visualization.{}_visualizer".format(trainingConfig["dbType"]))
else:
raise NotImplementedError("Visualizer for given dbType {} is not implemented".format(trainingConfig["dbType"]))
# setup dataloader module
dat_module = None
if trainingConfig["dbType"] in AVAILABLE_DBTYPES:
dat_module = importlib.import_module("dataloader.{}_dataloader".format(trainingConfig["dbType"]))
else:
raise NotImplementedError("Dataloader for given dbType {} is not implemented".format(trainingConfig["dbType"]))
print("Running " + baseArgs.model_name)
# path to the image dataset
pathDB = trainingConfig["pathDB"]
trainingConfig.pop("pathDB", None)
# load trainer
Trainer = trainerModule(pathDB,
useGPU=True,
visualisation=vis_module,
dataloader=dat_module,
lossIterEvaluation=kwargs["evalIter"],
checkPointDir=checkPointDir,
saveIter=kwargs["saveIter"],
modelLabel=modelLabel,
**trainingConfig)
# if a checkpoint is found, load it
if not restart and checkPointData is not None:
trainConfig, pathModel, pathTmpData = checkPointData
print(f"Model found at path {pathModel}, pursuing the training")
Trainer.loadSavedTraining(pathModel, trainConfig, pathTmpData)
Trainer.train()
| {"/models/networks/gan/gan_trainer.py": ["/models/networks/base/base_trainer.py"], "/dataloader/image_dataloader.py": ["/models/utils/image_transform.py"], "/train.py": ["/config.py"], "/models/networks/vae/trainer.py": ["/models/networks/vae/standard_configurations/vae_config.py", "/models/networks/vae/vae.py"], "/models/networks/vae/vae.py": ["/models/networks/ae/base_AE.py", "/models/networks/vae/networks/vae_net.py"], "/tests/digital_rock_images/generate_digital_rock_images.py": ["/models/utils/image_transform.py", "/dataloader/image_dataloader.py"], "/models/networks/gan/base_GAN.py": ["/models/networks/base/base_NET.py"], "/datasets.py": ["/models/utils/image_transform.py", "/dataloader/image_dataloader.py"], "/models/networks/ae/base_AE.py": ["/models/networks/base/base_NET.py"]} |
43,620 | HaizhaoYang/generative_encoder | refs/heads/master | /models/networks/gan/loss_criterions/base_loss_criterions.py | import torch
import torch.nn.functional as F
class BaseLossWrapper:
"""
Loss criterion class. Must define 4 members:
sizeDecisionLayer : size of the decision layer of the discrimator
getCriterion : how the loss is actually computed
!! The activation function of the discriminator is computed within the
loss !!
"""
def __init__(self, device):
self.device = device
def getCriterion(self, input, status):
"""
Given an input tensor and its targeted status (detected as real or
detected as fake) build the associated loss
Args:
- input (Tensor): decision tensor build by the model's discrimator
- status (bool): if True -> this tensor should have been detected
as a real input
else -> it shouldn't have
"""
pass
class WGANGP(BaseLossWrapper):
"""
Paper WGANGP loss : linear activation for the generator.
https://arxiv.org/pdf/1704.00028.pdf
"""
def __init__(self, device):
self.generationActivation = None
self.sizeDecisionLayer = 1
BaseLossWrapper.__init__(self, device)
def getCriterion(self, input, status):
if status:
return -input[:, 0].sum()
return input[:, 0].sum() | {"/models/networks/gan/gan_trainer.py": ["/models/networks/base/base_trainer.py"], "/dataloader/image_dataloader.py": ["/models/utils/image_transform.py"], "/train.py": ["/config.py"], "/models/networks/vae/trainer.py": ["/models/networks/vae/standard_configurations/vae_config.py", "/models/networks/vae/vae.py"], "/models/networks/vae/vae.py": ["/models/networks/ae/base_AE.py", "/models/networks/vae/networks/vae_net.py"], "/tests/digital_rock_images/generate_digital_rock_images.py": ["/models/utils/image_transform.py", "/dataloader/image_dataloader.py"], "/models/networks/gan/base_GAN.py": ["/models/networks/base/base_NET.py"], "/datasets.py": ["/models/utils/image_transform.py", "/dataloader/image_dataloader.py"], "/models/networks/ae/base_AE.py": ["/models/networks/base/base_NET.py"]} |
43,621 | HaizhaoYang/generative_encoder | refs/heads/master | /models/networks/vae/trainer.py | import os
from .standard_configurations.vae_config import _C
from .vae import VAE
from ..ae.ae_trainer import AETrainer
class VAETrainer(AETrainer):
"""
A class managing a VAE training. Logs, chekpoints,
visualization, and number iterations are managed here.
"""
_defaultConfig = _C
def getDefaultConfig(self):
return VAETrainer._defaultConfig
def __init__(self,
pathdb,
**kwargs):
"""
Initializer for VAE
"""
AETrainer.__init__(self, pathdb, **kwargs)
self.lossProfile.append({"iter": [], "scale": 0})
def initModel(self):
"""
Initialize the VAE model
"""
config = {key: value for key, value in vars(self.modelConfig).items()}
self.model = VAE(useGPU=self.useGPU, config=config)
def train(self):
shift = 0
if self.startIter >0:
shift+= self.startIter
if self.checkPointDir is not None:
pathBaseConfig = os.path.join(self.checkPointDir, self.modelLabel
+ "_train_config.json")
self.saveBaseConfig(pathBaseConfig)
maxShift = int(self.modelConfig.nEpoch * len(self.getDBLoader(0)))
for epoch in range(self.modelConfig.nEpoch):
dbLoader = self.getDBLoader(0)
self.trainOnEpoch(dbLoader, 0, shiftIter=shift)
shift += len(dbLoader)
if shift > maxShift:
break
label = self.modelLabel + ("_s%d_i%d" %
(0, shift))
self.saveCheckpoint(self.checkPointDir,
label, 0, shift) | {"/models/networks/gan/gan_trainer.py": ["/models/networks/base/base_trainer.py"], "/dataloader/image_dataloader.py": ["/models/utils/image_transform.py"], "/train.py": ["/config.py"], "/models/networks/vae/trainer.py": ["/models/networks/vae/standard_configurations/vae_config.py", "/models/networks/vae/vae.py"], "/models/networks/vae/vae.py": ["/models/networks/ae/base_AE.py", "/models/networks/vae/networks/vae_net.py"], "/tests/digital_rock_images/generate_digital_rock_images.py": ["/models/utils/image_transform.py", "/dataloader/image_dataloader.py"], "/models/networks/gan/base_GAN.py": ["/models/networks/base/base_NET.py"], "/datasets.py": ["/models/utils/image_transform.py", "/dataloader/image_dataloader.py"], "/models/networks/ae/base_AE.py": ["/models/networks/base/base_NET.py"]} |
43,622 | HaizhaoYang/generative_encoder | refs/heads/master | /models/networks/vae/vae.py | import torch.optim as optim
from ..ae.base_AE import BaseAE
from ...utils.config import BaseConfig
from ...utils.utils import finiteCheck
from .networks.vae_net import ENet, DNet
class VAE(BaseAE):
"""
Implementation of VAE
"""
def __init__(self,
config=None,
**kwargs):
"""
Initialize VAE
"""
if not 'config' in vars(self):
self.config = BaseConfig()
self.config.dimE = config["dimE"]
self.config.dimD = config["dimD"]
self.config.imageSize = config["imageSize"]
BaseAE.__init__(self, config=config, **kwargs)
def getNetD(self):
dnet = DNet(self.config,
generationActivation=self.lossCriterion.generationActivation)
return dnet
def getNetE(self):
enet = ENet(self.config)
return enet
def getOptimizerD(self):
return optim.Adam(filter(lambda p: p.requires_grad, self.netD.parameters()),
betas=[0.5, 0.999], lr=self.config.learningRate)
def getOptimizerE(self):
return optim.Adam(filter(lambda p: p.requires_grad, self.netE.parameters()),
betas=[0.5, 0.999], lr=self.config.learningRate)
def getOptimizerAE(self):
params = []
for param in self.netE.parameters():
if param.requires_grad:
params.append(param)
for param in self.netD.parameters():
if param.requires_grad:
params.append(param)
return optim.Adam(params,
betas=[0.5, 0.999], lr=self.config.learningRate)
def optimizeParameters(self, input_batch, input_target):
"""
Update the model using the given inputs for VAE.
Args:
input (torch.tensor): input batch of real data
inputLabels (torch.tensor): labels of the real data
"""
allLosses = {}
# Retrieve the input data
self.real_input = input_batch.to(self.device)
self.target_input = input_target.to(self.device)
n_samples = self.real_input.size()[0]
# Update the encoder and discriminator
self.optimizerAE.zero_grad()
# #1 Predicted data (require latent and actual)
predLatent, predMu, predVar = self.netE(self.real_input) # in VAE, latent layer consist of mu and var
predReal = self.netD(predLatent)
# #2 Compute loss between real and target
lossD = self.lossCriterion.getCriterion(predReal, predMu, predVar, self.target_input)
allLosses["lossAE"] = lossD.item()
lossD.backward(retain_graph=True)
finiteCheck(self.getOriginalD().parameters())
self.optimizerAE.step()
# Logs
lossD = 0
for key, val in allLosses.items():
if key.find("lossAE") == 0:
lossD += val
allLosses["lossAE"] = lossD
return allLosses
def getSize(self):
size = self.config.imageSize
return (size, size)
def test(self, input, getAvG=False, toCPU=True):
"""
Generate some data given the input latent vector.
Args:
input (torch.tensor): input latent vector
"""
input = input.to(self.device)
if getAvG:
if toCPU:
return self.avgD(self.avgE(input)[0]).cpu()
else:
return self.avgD(self.avgE(input)[0])
elif toCPU:
return self.netD(self.netE(input)[0]).detach().cpu()
else:
return self.netD(self.netE(input)[0]).detach()
| {"/models/networks/gan/gan_trainer.py": ["/models/networks/base/base_trainer.py"], "/dataloader/image_dataloader.py": ["/models/utils/image_transform.py"], "/train.py": ["/config.py"], "/models/networks/vae/trainer.py": ["/models/networks/vae/standard_configurations/vae_config.py", "/models/networks/vae/vae.py"], "/models/networks/vae/vae.py": ["/models/networks/ae/base_AE.py", "/models/networks/vae/networks/vae_net.py"], "/tests/digital_rock_images/generate_digital_rock_images.py": ["/models/utils/image_transform.py", "/dataloader/image_dataloader.py"], "/models/networks/gan/base_GAN.py": ["/models/networks/base/base_NET.py"], "/datasets.py": ["/models/utils/image_transform.py", "/dataloader/image_dataloader.py"], "/models/networks/ae/base_AE.py": ["/models/networks/base/base_NET.py"]} |
43,623 | HaizhaoYang/generative_encoder | refs/heads/master | /tests/digital_rock_images/generate_digital_rock_images.py | """
@author: Yong Zheng Ong
This code implements the GE framework
"""
import os
import sys
import importlib
import argparse
import json
import pickle as pkl
import numpy as np
from PIL import Image
os.environ["CUDA_DEVICE_ORDER"]="PCI_BUS_ID"
os.environ["CUDA_VISIBLE_DEVICES"]="1"
import torch
import torchvision.transforms as Transforms
from models.utils.utils import getLastCheckPoint, loadmodule, \
parse_state_name, getNameAndPackage
from models.utils.image_transform import NumpyResize, NumpyToTensor
from models.utils.config import getConfigOverrideFromParser, \
updateParserWithConfig
from dataloader.image_dataloader import pil_loader, standardTransform
import json
def test(parser):
args = parser.parse_args()
key = '{}_0'.format(args.digital_rock_position)
image_to_test = 'dataset/digital_rock_images_test/slice_{}.jpg'.format(key)
with_ae = args.with_ae
evaluation_id = args.evaluation_id
if not os.path.exists('./results/{}'.format(evaluation_id)):
os.makedirs('./results/{}'.format(evaluation_id))
# load config data
with open('config_digital_rock.json', 'rb') as file:
trainingConfig = json.load(file)
if not os.path.exists('./results/plots'):
os.makedirs('./results/plots')
# get required fields
modelConfig = trainingConfig.get("config")
dimOutput = modelConfig["dimOutput"]
# load GAN model
# gan parameters -----
print('loading GAN model...')
default_dir = "output_networks"
config_gan = {
'name' : args.gan_name, # model name (str)
'module' : 'PGAN',
'scale' : 6, # scale to evaluate (int)
'iter' : 200000, # iteration to evaluate (int)
}
# get checkpoint data
checkPointDir = os.path.join(default_dir, config_gan["name"])
checkpointData = getLastCheckPoint(
checkPointDir, config_gan["name"], scale=config_gan["scale"], iter=config_gan["iter"])
if checkpointData is None:
print(config_gan["scale"], config_gan["iter"])
if config_gan["scale"] is not None or config_gan["iter"] is not None:
raise FileNotFoundError("Not checkpoint found for model "
+ config_gan["name"] + " at directory " + default_dir +
" for scale " + str(config_gan["scale"]) +
" at iteration " + str(config_gan["iter"]))
raise FileNotFoundError(
"Not checkpoint found for model " + config_gan["name"] + " at directory "
+ default_dir)
modelConfig, pathModel, _ = checkpointData
with open(modelConfig, 'rb') as file:
configData = json.load(file)
modelPackage, modelName = getNameAndPackage(config_gan["module"])
modelType = loadmodule(modelPackage, modelName)
gan_model = modelType(useGPU=True,
storeAVG=True,
**configData)
if config_gan["scale"] is None or config_gan["iter"] is None:
_, config_gan["scale"], config_gan["iter"] = parse_state_name(pathModel)
print("Checkpoint found at scale %d, iter %d" % (config_gan["scale"], config_gan["iter"]))
gan_model.load(pathModel, loadD=False)
if with_ae:
# load AE model
# ae parameters -----
print('loading AE model...')
default_dir = "output_networks"
config_ae = {
'name' : args.ae_name, # model name (str)
'module' : 'VAE',
'scale' : 0, # scale to evaluate (int)
'iter' : 32800, # iteration to evaluate (int)
}
# get checkpoint data
checkPointDir = os.path.join(default_dir, config_ae["name"])
checkpointData = getLastCheckPoint(
checkPointDir, config_ae["name"], scale=config_ae["scale"], iter=config_ae["iter"])
if checkpointData is None:
print(config_ae["scale"], config_ae["iter"])
if config_ae["scale"] is not None or config_ae["iter"] is not None:
raise FileNotFoundError("Not checkpoint found for model "
+ config_ae["name"] + " at directory " + default_dir +
" for scale " + str(config_ae["scale"]) +
" at iteration " + str(config_ae["iter"]))
raise FileNotFoundError(
"Not checkpoint found for model " + config_gan["name"] + " at directory "
+ default_dir)
modelConfig, pathModel, _ = checkpointData
with open(modelConfig, 'rb') as file:
configData = json.load(file)
modelPackage, modelName = getNameAndPackage(config_ae["module"])
modelType = loadmodule(modelPackage, modelName)
ae_model = modelType(useGPU=True,
storeAVG=False,
**configData)
if config_ae["scale"] is None or config_ae["iter"] is None:
_, config_ae["scale"], config_ae["iter"] = parse_state_name(pathModel)
print("Checkpoint found at scale %d, iter %d" % (config_ae["scale"], config_ae["iter"]))
ae_model.load(pathModel, loadD=False)
# load the image
size = gan_model.getSize()
print("size", size)
# build standard transform
inTransform, outTransform = standardTransform(size, dimOutput)
image_real = inTransform(pil_loader(image_to_test)).view(1,dimOutput,*size).to('cuda:0')
print("image shape:", image_real.size(), "| min:", np.min(image_real.detach().cpu().numpy()), "| max:", np.max(image_real.detach().cpu().numpy()))
if with_ae:
encoded_real = ae_model.netE(image_real)[0].detach()
print("vector shape:", encoded_real.size(), "| min:", np.min(encoded_real.cpu().numpy()), "| max:", np.max(encoded_real.cpu().numpy()))
transform = outTransform
image_real_save = torch.clamp(image_real.cpu(), min=-1, max=1)
transform(image_real_save[0]).save("./results/{}/bulk_to_test_{}.jpg".format(evaluation_id, key))
num_iter = 1000
num_to_find = 1000
# get random start vector from 1000 points
start_choices = gan_model.buildNoiseData(num_to_find)
start = start_choices[0:1,:]
score = 999999999999999999999999999
# find best start vector
for i in range(num_to_find):
start_generation = gan_model.avgG(start_choices[i:i+1,:])
error = torch.nn.MSELoss()(start_generation, image_real).cpu().detach().item()
if error < score:
score = error
start = start_choices[i:i+1,:]
print("final best: ", score, " | start shape: ", start.shape)
class start_vector(torch.nn.Module):
def __init__(self, start):
super(start_vector, self).__init__()
self.vector = torch.nn.Parameter(start, requires_grad=True)
def forward(self):
return self.vector
sv = start_vector(start).to('cuda:0')
ge_optimizer = torch.optim.Adam(sv.parameters(), lr=0.01)
best = 100
best_vector = sv.vector.cpu().detach().numpy()
errors = []
threshold = 0.000
lamb = 0.001
i = 0
while best > threshold:
ge_optimizer.zero_grad()
output = gan_model.avgG(sv())
if with_ae:
encoded = ae_model.netE(output)[0]
reconstruction = torch.nn.MSELoss()(output, image_real)
encoded = torch.nn.MSELoss()(encoded, encoded_real) + lamb * torch.nn.L1Loss()(sv, torch.zeros_like(sv).to('cuda:0'))
loss = encoded
else:
reconstruction = torch.nn.MSELoss()(output, image_real)
loss = reconstruction
loss.backward()
ge_optimizer.step()
errors.append(reconstruction.cpu().detach().item())
if (i+1) % 100 == 0:
# to save
if best is None:
best = reconstruction.cpu().detach().item()
temp = transform(torch.clamp(output.detach().cpu(), min=-1, max=1)[0]).save("./results/{}/bulk_test_{}.jpg".format(evaluation_id, key))
best_vector = sv.vector.cpu().detach().numpy()
elif best >= reconstruction.cpu().detach().item():
best = reconstruction.cpu().detach().item()
temp = transform(torch.clamp(output.detach().cpu(), min=-1, max=1)[0]).save("./results/{}/bulk_test_{}.jpg".format(evaluation_id, key))
best_vector = sv.vector.cpu().detach().numpy()
i += 1
if i == 10000:
break
print("best: ", best)
np.savetxt("./results/plots/{}_{}.csv".format(evaluation_id, key), errors, delimiter =", ", fmt ='% s')
np.save("./results/{}/vector_{}.npy".format(evaluation_id, key), best_vector)
| {"/models/networks/gan/gan_trainer.py": ["/models/networks/base/base_trainer.py"], "/dataloader/image_dataloader.py": ["/models/utils/image_transform.py"], "/train.py": ["/config.py"], "/models/networks/vae/trainer.py": ["/models/networks/vae/standard_configurations/vae_config.py", "/models/networks/vae/vae.py"], "/models/networks/vae/vae.py": ["/models/networks/ae/base_AE.py", "/models/networks/vae/networks/vae_net.py"], "/tests/digital_rock_images/generate_digital_rock_images.py": ["/models/utils/image_transform.py", "/dataloader/image_dataloader.py"], "/models/networks/gan/base_GAN.py": ["/models/networks/base/base_NET.py"], "/datasets.py": ["/models/utils/image_transform.py", "/dataloader/image_dataloader.py"], "/models/networks/ae/base_AE.py": ["/models/networks/base/base_NET.py"]} |
43,624 | HaizhaoYang/generative_encoder | refs/heads/master | /models/networks/ae/loss_criterions/base_loss_criterions.py | import torch
import torch.nn.functional as F
class BaseLossWrapper:
"""
Loss criterion class. Must define 4 members:
sizeDecisionLayer : size of the decision layer of the discrimator
getCriterion : how the loss is actually computed
!! The activation function of the discriminator is computed within the
loss !!
"""
def __init__(self, device):
self.device = device
def getCriterion(self, input, status):
"""
Given an input tensor and its targeted status (detected as real or
detected as fake) build the associated loss
Args:
- input (Tensor): decision tensor build by the model's discrimator
- status (bool): if True -> this tensor should have been detected
as a real input
else -> it shouldn't have
"""
pass
class AE_MSE(BaseLossWrapper):
"""
Implements the base AE loss wrapper using MSE loss.
"""
def __init__(self, device):
self.generationActivation = F.tanh
self.sizeDecisionLayer = 3
BaseLossWrapper.__init__(self, device)
def getCriterion(self, input, mu, var, target):
# reconstruction loss
recon_loss = F.mse_loss(input[:, :self.sizeDecisionLayer], target)
return recon_loss
class VAE_MSE(BaseLossWrapper):
"""
Implements the base VAE loss wrapper using MSE loss.
"""
def __init__(self, device):
self.generationActivation = None
self.sizeDecisionLayer = 3
BaseLossWrapper.__init__(self, device)
def getCriterion(self, input, mu, var, target):
kld_weight = mu.size()[1] * mu.size()[0]
# reconstruction loss
recon_loss = F.mse_loss(input, target)
# kl divergence loss
kld_loss = torch.mean(-0.5 * torch.sum(1 + var - mu ** 2 - var.exp(), dim = 1), dim = 0)
return recon_loss + (1 / kld_weight) * kld_loss | {"/models/networks/gan/gan_trainer.py": ["/models/networks/base/base_trainer.py"], "/dataloader/image_dataloader.py": ["/models/utils/image_transform.py"], "/train.py": ["/config.py"], "/models/networks/vae/trainer.py": ["/models/networks/vae/standard_configurations/vae_config.py", "/models/networks/vae/vae.py"], "/models/networks/vae/vae.py": ["/models/networks/ae/base_AE.py", "/models/networks/vae/networks/vae_net.py"], "/tests/digital_rock_images/generate_digital_rock_images.py": ["/models/utils/image_transform.py", "/dataloader/image_dataloader.py"], "/models/networks/gan/base_GAN.py": ["/models/networks/base/base_NET.py"], "/datasets.py": ["/models/utils/image_transform.py", "/dataloader/image_dataloader.py"], "/models/networks/ae/base_AE.py": ["/models/networks/base/base_NET.py"]} |
43,625 | HaizhaoYang/generative_encoder | refs/heads/master | /models/networks/gan/base_GAN.py | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
from copy import deepcopy
import torch
import torch.nn as nn
from ..base.base_NET import BaseNET
from ...utils.config import BaseConfig, updateConfig
from .loss_criterions import base_loss_criterions
from .loss_criterions.gradient_losses import WGANGPGradientPenalty
from ...utils.utils import loadPartOfStateDict, finiteCheck, \
loadStateDictCompatible
class BaseGAN(BaseNET):
"""Abstract class: the basic framework for GAN training.
"""
def __init__(self,
config=None,
**kwargs):
"""
Initialize the BaseGAN
In addition to base parameters, GAN additionally requires
1. dimLatentVector
2. lossMode
"""
BaseNET.__init__(self, config=config, **kwargs)
self.valid_losses = ['WGANGP']
if config["lossMode"] not in self.valid_losses:
raise ValueError(
"lossMode should be one of the following : {}".format(self.valid_losses))
# Latent vector dimension
self.config.noiseVectorDim = config["dimLatentVector"]
self.config.latentVectorDim = self.config.noiseVectorDim
# Loss criterion
self.config.lossCriterion = config["lossMode"]
self.lossCriterion = getattr(
base_loss_criterions, config["lossMode"])(self.device)
# WGAN-GP
self.config.lambdaGP = config["lambdaGP"]
# Weight on D's output
self.config.epsilonD = config["epsilonD"]
# Initialize the generator and the discriminator
self.netD = self.getNetD()
self.netG = self.getNetG()
# Move the networks to the gpu
self.updateSolversDevice()
def test(self, input, getAvG=False, toCPU=True):
"""
Generate some data given the input latent vector.
Args:
input (torch.tensor): input latent vector
"""
input = input.to(self.device)
if getAvG:
if toCPU:
return self.avgG(input).cpu()
else:
return self.avgG(input)
elif toCPU:
return self.netG(input).detach().cpu()
else:
return self.netG(input).detach()
def buildAvG(self):
"""
Create and upload a moving average generator.
"""
self.avgG = deepcopy(self.getOriginalG())
for param in self.avgG.parameters():
param.requires_grad = False
if self.useGPU:
self.avgG = nn.DataParallel(self.avgG)
self.avgG.to(self.device)
def optimizeParameters(self, input_batch):
"""
perform a single optimization step
Args:
input (torch.tensor): input batch of real data
"""
allLosses = {}
# Retrieve the input data
self.real_input = input_batch.to(self.device)
n_samples = self.real_input.size()[0]
# A: Update the discriminator
self.optimizerD.zero_grad()
# #1 Real data
predRealD = self.netD(self.real_input, False)
lossD = self.lossCriterion.getCriterion(predRealD, True)
allLosses["lossD_real"] = lossD.item()
# #2 Fake data
inputLatent = self.buildNoiseData(n_samples)
predFakeG = self.netG(inputLatent).detach()
predFakeD = self.netD(predFakeG, False) # TODO: investigate the False
lossDFake = self.lossCriterion.getCriterion(predFakeD, False)
allLosses["lossD_fake"] = lossDFake.item()
lossD += lossDFake
# #3 WGANGP gradient loss
if self.config.lambdaGP > 0:
allLosses["lossD_Grad"] = WGANGPGradientPenalty(self.real_input,
predFakeG,
self.netD,
self.config.lambdaGP,
backward=True)
lossD.backward(retain_graph=True)
# finiteCheck(self.getOriginalD().parameters()) # TODO: investigate this
self.optimizerD.step()
# #4 Epsilon loss
if self.config.epsilonD > 0:
lossEpsilon = (predRealD[:, 0] ** 2).sum() * self.config.epsilonD
lossD += lossEpsilon
allLosses["lossD_Epsilon"] = lossEpsilon.item()
# Logs
lossD = 0
for key, val in allLosses.items():
if key.find("lossD") == 0:
lossD += val
allLosses["lossD"] = lossD
# B: Update the generator
self.optimizerG.zero_grad()
self.optimizerD.zero_grad()
# #1 Image generation
inputNoise = self.buildNoiseData(n_samples)
predFakeG = self.netG(inputNoise)
# #2 Status evaluation
predFakeD, phiGFake = self.netD(predFakeG, True) # TODO: investigate the True
lossGFake = self.lossCriterion.getCriterion(predFakeD, True)
allLosses["lossG_fake"] = lossGFake.item()
lossGFake.backward(retain_graph=True)
# finiteCheck(self.getOriginalG().parameters()) # TODO: investigate this
self.optimizerG.step()
# Logs
lossG = 0
for key, val in allLosses.items():
if key.find("lossG") == 0:
lossG += val
allLosses["lossG"] = lossG
# Update the moving average if relevant
for p, avg_p in zip(self.getOriginalG().parameters(),
self.getOriginalAvgG().parameters()):
avg_p.mul_(0.999).add_(0.001, p.data)
return allLosses
def updateSolversDevice(self, buildAvG=True):
"""
Move the current networks and solvers to the GPU.
This function must be called each time netG or netD is modified
"""
if buildAvG:
self.buildAvG()
if not isinstance(self.netD, nn.DataParallel) and self.useGPU:
self.netD = nn.DataParallel(self.netD)
if not isinstance(self.netG, nn.DataParallel) and self.useGPU:
self.netG = nn.DataParallel(self.netG)
self.netD.to(self.device)
self.netG.to(self.device)
self.optimizerD = self.getOptimizerD()
self.optimizerG = self.getOptimizerG()
self.optimizerD.zero_grad()
self.optimizerG.zero_grad()
def buildNoiseData(self, n_samples):
"""
Build a batch of latent vectors for the generator.
Args:
n_samples (int): number of vector in the batch
"""
inputLatent = torch.randn(
n_samples, self.config.noiseVectorDim).to(self.device)
return inputLatent
def getOriginalG(self):
r"""
Retrieve the original G network. Use this function
when you want to modify G after the initialization
"""
if isinstance(self.netG, nn.DataParallel):
return self.netG.module
return self.netG
def getOriginalAvgG(self):
r"""
Retrieve the original avG network. Use this function
when you want to modify avG after the initialization
"""
if isinstance(self.avgG, nn.DataParallel):
return self.avgG.module
return self.avgG
def getOriginalD(self):
r"""
Retrieve the original D network. Use this function
when you want to modify D after the initialization
"""
if isinstance(self.netD, nn.DataParallel):
return self.netD.module
return self.netD
def getNetG(self):
r"""
The generator should be defined here.
"""
pass
def getNetD(self):
r"""
The discrimator should be defined here.
"""
pass
def getOptimizerD(self):
r"""
Optimizer of the discriminator.
"""
pass
def getOptimizerG(self):
r"""
Optimizer of the generator.
"""
pass
def getStateDict(self, saveTrainTmp=False):
"""
Get the model's parameters
"""
# Get the generator's state
stateG = self.getOriginalG().state_dict()
# Get the discrimator's state
stateD = self.getOriginalD().state_dict()
out_state = {'config': self.config,
'netG': stateG,
'netD': stateD}
# Average GAN
out_state['avgG'] = self.getOriginalAvgG().state_dict()
if saveTrainTmp:
out_state['tmp'] = self.trainTmp
return out_state
def save(self, path, saveTrainTmp=False):
"""
Save the model at the given location.
All parameters included in the self.config class will be saved as well.
Args:
- path (string): file where the model should be saved
- saveTrainTmp (bool): set to True if you want to conserve
the training parameters
"""
torch.save(self.getStateDict(saveTrainTmp=saveTrainTmp), path)
def load(self,
path="",
in_state=None,
loadG=True,
loadD=True,
loadConfig=True,
finetuning=False):
"""
Load a model saved with the @method save() function
Args:
- path (string): file where the model is stored
"""
in_state = torch.load(path)
self.load_state_dict(in_state,
loadG=loadG,
loadD=loadD,
loadConfig=True,
finetuning=False)
def load_state_dict(self,
in_state,
loadG=True,
loadD=True,
loadConfig=True,
finetuning=False):
"""
Load a model saved with the @method save() function
Args:
- in_state (dict): state dict containing the model
"""
# Step one : load the configuration
if loadConfig:
updateConfig(self.config, in_state['config'])
self.lossCriterion = getattr(
base_loss_criterions, self.config.lossCriterion)(self.device)
# Re-initialize G and D with the loaded configuration
buildAvG = True
if loadG:
self.netG = self.getNetG()
if finetuning:
loadPartOfStateDict(
self.netG, in_state['netG'], ["formatLayer"])
self.getOriginalG().initFormatLayer(self.config.latentVectorDim)
else:
# Replace me by a standard loadStatedict for open-sourcing TODO
loadStateDictCompatible(self.netG, in_state['netG'])
if 'avgG' in in_state:
print("Average network found !")
self.buildAvG()
# Replace me by a standard loadStatedict for open-sourcing
loadStateDictCompatible(self.getOriginalAvgG(), in_state['avgG'])
buildAvG = False
if loadD:
self.netD = self.getNetD()
if finetuning:
loadPartOfStateDict(
self.netD, in_state['netD'], ["decisionLayer"])
self.getOriginalD().initDecisionLayer(self.lossCriterion.sizeDecisionLayer)
else:
# Replace me by a standard loadStatedict for open-sourcing TODO
loadStateDictCompatible(self.netD, in_state['netD'])
elif 'tmp' in in_state.keys():
self.trainTmp = in_state['tmp']
# Don't forget to reset the machinery !
self.updateSolversDevice(buildAvG)
| {"/models/networks/gan/gan_trainer.py": ["/models/networks/base/base_trainer.py"], "/dataloader/image_dataloader.py": ["/models/utils/image_transform.py"], "/train.py": ["/config.py"], "/models/networks/vae/trainer.py": ["/models/networks/vae/standard_configurations/vae_config.py", "/models/networks/vae/vae.py"], "/models/networks/vae/vae.py": ["/models/networks/ae/base_AE.py", "/models/networks/vae/networks/vae_net.py"], "/tests/digital_rock_images/generate_digital_rock_images.py": ["/models/utils/image_transform.py", "/dataloader/image_dataloader.py"], "/models/networks/gan/base_GAN.py": ["/models/networks/base/base_NET.py"], "/datasets.py": ["/models/utils/image_transform.py", "/dataloader/image_dataloader.py"], "/models/networks/ae/base_AE.py": ["/models/networks/base/base_NET.py"]} |
43,626 | HaizhaoYang/generative_encoder | refs/heads/master | /models/networks/base/base_NET.py | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
from copy import deepcopy
import torch
import torch.nn as nn
from ...utils.config import BaseConfig , updateConfig
class BaseNET():
"""Abstract class: the basic framework for NN training.
"""
def __init__(self,
config=None,
useGPU=True,
**kwargs):
"""
Initialize Standard Base for NN training
"""
if 'config' not in vars(self):
self.config = BaseConfig()
if 'trainTmp' not in vars(self):
self.trainTmp = BaseConfig()
self.useGPU = useGPU and torch.cuda.is_available()
if self.useGPU:
self.device = torch.device("cuda:0")
self.n_devices = torch.cuda.device_count()
else:
self.device = torch.device("cpu")
self.n_devices = 1
# Output image dimension
self.config.dimOutput = config["dimOutput"]
# Actual learning rate
self.config.learningRate = config["baseLearningRate"]
# Data type
self.config.dataType = config["dataType"]
def test(self, input, toCPU=True):
"""
test function to test the model
"""
input = input.to(self.device)
if toCPU:
return input.cpu()
else:
return input
def optimizeParameters(self):
"""
Update the model using input_batch
"""
# return losses
allLosses = {}
return allLosses
def updateSolversDevice(self):
"""
Move the current networks and solvers to the GPU.
This function must be called each time the network is modified
"""
pass
def getStateDict(self):
"""
Get the model's parameters
"""
pass
def save(self):
"""
Save the model at the given location.
"""
pass
def updateConfig(self, config):
"""
Update the object config with new inputs.
Typically if config = {"learningRate": 0.1} only the learning rate
will be changed.
"""
updateConfig(self.config, config)
self.updateSolversDevice()
def load(self,
path=''):
"""
Load a model saved with the @method save() function
Args:
- path (string): file where the model is stored
"""
pass
def load_state_dict(self,
in_state):
"""
Load a model saved with the @method save() function
Args:
- in_state (dict): state dict containing the model
"""
pass
| {"/models/networks/gan/gan_trainer.py": ["/models/networks/base/base_trainer.py"], "/dataloader/image_dataloader.py": ["/models/utils/image_transform.py"], "/train.py": ["/config.py"], "/models/networks/vae/trainer.py": ["/models/networks/vae/standard_configurations/vae_config.py", "/models/networks/vae/vae.py"], "/models/networks/vae/vae.py": ["/models/networks/ae/base_AE.py", "/models/networks/vae/networks/vae_net.py"], "/tests/digital_rock_images/generate_digital_rock_images.py": ["/models/utils/image_transform.py", "/dataloader/image_dataloader.py"], "/models/networks/gan/base_GAN.py": ["/models/networks/base/base_NET.py"], "/datasets.py": ["/models/utils/image_transform.py", "/dataloader/image_dataloader.py"], "/models/networks/ae/base_AE.py": ["/models/networks/base/base_NET.py"]} |
43,627 | HaizhaoYang/generative_encoder | refs/heads/master | /models/networks/vae/networks/vae_net.py | from collections import OrderedDict
import math
import numpy as np
import torch
import torch.nn as nn
def weights_init(m):
classname = m.__class__.__name__
if classname.find('Conv') != -1:
m.weight.data.normal_(0.0, 0.02)
elif classname.find('BatchNorm') != -1:
m.weight.data.normal_(1.0, 0.02)
m.bias.data.fill_(0)
elif classname.find('Linear') != -1:
y = m.in_features
m.weight.data.normal_(0.0, 1/np.sqrt(y))
m.bias.data.fill_(0)
def var_init(m):
classname = m.__class__.__name__
if classname.find('Linear') != -1:
m.weight.data.fill_(0)
m.bias.data.fill_(0)
class ENet(nn.Module):
def __init__(self,
config):
"""
Parameters:
self.config.dimOutput,
self.config.dimE,
self.config.latentVectorDim,
imageSize=self.config.imageSize
"""
super(ENet, self).__init__()
self.imageSize = config.imageSize
self.dataType = config.dataType
if (self.imageSize & (self.imageSize - 1)) != 0:
# check power of 2
raise ValueError("self.imageSize should be a power of 2!")
depthModel = int(math.log2(self.imageSize)) - 1
# build hidden_dims
self.hidden_dims = [config.dimE]
for _ in range(depthModel - 1):
self.hidden_dims.append(self.hidden_dims[-1] * 2)
currDepth = config.dimOutput # start with input dimension
sequence = OrderedDict([])
# build the conv-nn based on the imageSize.
# the final layer size is [-1, hidden_dims[-1], 2, 2]
for index, h_dim in enumerate(self.hidden_dims):
sequence["conv" +
str(index)] = nn.Conv2d(currDepth, h_dim,
4, 2, 1, bias=False)
sequence["batchNorm" + str(index)] = nn.BatchNorm2d(h_dim)
sequence["relu" + str(index)] = nn.LeakyReLU(0.2, inplace=True)
currDepth = h_dim
self.dimFeatureMap = currDepth
self.main = nn.Sequential(sequence)
self.main.apply(weights_init)
self.initMuLayer(config.latentVectorDim)
self.initVarLayer(config.latentVectorDim)
def initMuLayer(self, sizeDecisionLayer):
if self.dataType == "stft_signal":
self.muLayer = nn.Linear(
self.dimFeatureMap * 2 * 2, sizeDecisionLayer)
self.muLayer.apply(weights_init)
self.sizeDecisionLayer = sizeDecisionLayer
else:
self.muLayer = nn.Linear(
self.dimFeatureMap * 2 * 2, sizeDecisionLayer)
self.muLayer.apply(weights_init)
self.sizeDecisionLayer = sizeDecisionLayer
def initVarLayer(self, sizeDecisionLayer):
if self.dataType == "stft_signal":
self.varLayer = nn.Linear(
self.dimFeatureMap * 2 * 2, sizeDecisionLayer)
self.varLayer.apply(var_init) # apply zero initialization to variance layer for stability
self.sizeDecisionLayer = sizeDecisionLayer
else:
self.varLayer = nn.Linear(
self.dimFeatureMap * 2 * 2, sizeDecisionLayer)
self.varLayer.apply(var_init) # apply zero initialization to variance layer for stability
self.sizeDecisionLayer = sizeDecisionLayer
def reparameterize(self, mu, logvar):
"""
Code to sample from N(mu, var) from N(0,1)
"""
std = torch.exp(0.5 * logvar)
eps = torch.randn_like(std)
return eps * std + mu
def forward(self, input):
x = self.main(input)
# convert to vector
x = torch.flatten(x, start_dim=1)
x_mu = self.muLayer(x)
x_var = self.varLayer(x)
z = self.reparameterize(x_mu, x_var)
return z, x_mu, x_var
class DNet(nn.Module):
def __init__(self,
config,
generationActivation=None):
"""
Parameters:
self.config.latentVectorDim,
self.config.dimOutput,
self.config.dimD,
imageSize=self.config.imageSize,
dataType=self.config.dataType
"""
super(DNet, self).__init__()
self.imageSize = config.imageSize
self.dataType = config.dataType
if (self.imageSize & (self.imageSize - 1)) != 0:
# check power of 2
raise ValueError("self.imageSize should be a power of 2!")
depthModel = int(math.log2(self.imageSize)) - 1
# build hidden_dims
self.hidden_dims = [config.dimD]
for _ in range(depthModel - 1):
self.hidden_dims.append(self.hidden_dims[-1] * 2)
self.hidden_dims.reverse()
self.initFormatLayer(config.latentVectorDim)
sequence = OrderedDict([])
# build the conv-nn based on the imageSize.
# the final layer size is [-1, dimOutput, imageSize, imageSize]
for index in range(len(self.hidden_dims) - 1):
sequence["convTranspose" +
str(index)] = nn.ConvTranspose2d(
self.hidden_dims[index], self.hidden_dims[index + 1], 4, 2, 1, bias=False)
sequence["batchNorm" + str(index)] = nn.BatchNorm2d(self.hidden_dims[index + 1])
sequence["relu" + str(index)] = nn.LeakyReLU(0.2, inplace=True)
if self.dataType in ["image", "1d_signal"]:
sequence["outlayer"] = nn.ConvTranspose2d(
config.dimD, config.dimOutput, 4, 2, 1, bias=False)
self.outputAcctivation = generationActivation
elif self.dataType in ["stft_signal"]:
sequence["outlayer"] = nn.ConvTranspose2d(
config.dimD, 2, 4, 2, 1, bias=False) # in stft, output is of channel 2
self.outputAcctivation = generationActivation
else:
raise NotImplementedError("given datatype {} is not implemented yet!".format(self.dataType))
self.main = nn.Sequential(sequence)
self.main.apply(weights_init)
def initFormatLayer(self, dimLatentVector):
if self.dataType == "stft_signal":
self.formatLayer = nn.Linear(
dimLatentVector, self.hidden_dims[0] * 2 * 2)
self.formatLayer.apply(weights_init)
else:
self.formatLayer = nn.Linear(
dimLatentVector, self.hidden_dims[0] * 2 * 2)
self.formatLayer.apply(weights_init)
def forward(self, input):
x = self.formatLayer(input)
if self.dataType == "stft_signal":
x = x.view(-1, self.hidden_dims[0], 2, 2)
else:
x = x.view(-1, self.hidden_dims[0], 2, 2)
x = self.main(x)
if self.outputAcctivation is None:
return x
return self.outputAcctivation(x)
| {"/models/networks/gan/gan_trainer.py": ["/models/networks/base/base_trainer.py"], "/dataloader/image_dataloader.py": ["/models/utils/image_transform.py"], "/train.py": ["/config.py"], "/models/networks/vae/trainer.py": ["/models/networks/vae/standard_configurations/vae_config.py", "/models/networks/vae/vae.py"], "/models/networks/vae/vae.py": ["/models/networks/ae/base_AE.py", "/models/networks/vae/networks/vae_net.py"], "/tests/digital_rock_images/generate_digital_rock_images.py": ["/models/utils/image_transform.py", "/dataloader/image_dataloader.py"], "/models/networks/gan/base_GAN.py": ["/models/networks/base/base_NET.py"], "/datasets.py": ["/models/utils/image_transform.py", "/dataloader/image_dataloader.py"], "/models/networks/ae/base_AE.py": ["/models/networks/base/base_NET.py"]} |
43,628 | HaizhaoYang/generative_encoder | refs/heads/master | /datasets.py | """
@author: Yong Zheng Ong
the dataset processing function
"""
import argparse
import json
import os
import pickle
import math
import numpy as np
import imageio
import matplotlib.pyplot as plt
from PIL import Image
from models.utils.utils import printProgressBar
from models.utils.image_transform import NumpyResize
from dataloader.image_dataloader import pil_loader
def saveImage(path, image):
return imageio.imwrite(path, image)
def celebaSetup(inputPath,
outputPath,
pathConfig="config_celeba_cropped.json"):
imgList = [f for f in os.listdir(
inputPath) if os.path.splitext(f)[1] == ".jpg"]
cx = 89
cy = 121
nImgs = len(imgList)
if not os.path.isdir(outputPath):
os.mkdir(outputPath)
for index, item in enumerate(imgList):
printProgressBar(index, nImgs)
path = os.path.join(inputPath, item)
img = np.array(pil_loader(path))
img = img[cy - 64: cy + 64, cx - 64: cx + 64]
path = os.path.join(outputPath, item)
saveImage(path, img)
printProgressBar(nImgs, nImgs)
def digitalrockSetup(inputPath,
outputPath,
pathConfig="config_digital_rock.json"):
imgList = [f for f in os.listdir(
inputPath) if os.path.splitext(f)[1] == ".jpg"]
nImgs = len(imgList)
if not os.path.isdir(outputPath):
os.mkdir(outputPath)
for index, item in enumerate(imgList):
printProgressBar(index, nImgs)
path = os.path.join(inputPath, item)
# load the image
img = np.array(pil_loader(path, color=False))
path = os.path.join(outputPath, item)
saveImage(path, img)
printProgressBar(nImgs, nImgs)
def lsunSetup(inputPath,
outputPath,
pathConfig="config_lsun_cropped.json"):
imgList = [f for f in os.listdir(
inputPath) if os.path.splitext(f)[1] == ".jpg"]
nImgs = len(imgList)
if not os.path.isdir(outputPath):
os.mkdir(outputPath)
for index, item in enumerate(imgList):
printProgressBar(index, nImgs)
path = os.path.join(inputPath, item)
img = np.array(pil_loader(path).resize((256,256)))
path = os.path.join(outputPath, item)
saveImage(path, img)
printProgressBar(nImgs, nImgs)
def resizeDataset(inputPath, outputPath, maxSize):
sizes = [64, 128, 512, 1024]
scales = [0, 5, 6, 8]
index = 0
imgList = [f for f in os.listdir(inputPath) if os.path.splitext(f)[
1] in [".jpg", ".npy"]]
nImgs = len(imgList)
if maxSize < sizes[0]:
raise AttributeError("Maximum resolution too low")
if not os.path.isdir(outputPath):
os.mkdir(outputPath)
datasetProfile = {}
for index, size in enumerate(sizes):
if size > maxSize:
break
localPath = os.path.join(outputPath, str(size))
if not os.path.isdir(localPath):
os.mkdir(localPath)
datasetProfile[str(scales[index])] = localPath
print("Resolution %d x %d" % (size, size))
resizeModule = NumpyResize((size, size))
for index, item in enumerate(imgList):
printProgressBar(index, nImgs)
path = os.path.join(inputPath, item)
img = pil_loader(path)
img = resizeModule(img)
path = os.path.splitext(os.path.join(localPath, item))[0] + ".jpg"
saveImage(path, img)
printProgressBar(nImgs, nImgs)
return datasetProfile, localPath
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Testing script')
parser.add_argument('dataset_name', type=str,
choices=['celeba_cropped', 'digital_rock'],
help='Name of the dataset.')
parser.add_argument('dataset_path', type=str,
help='Path to the input dataset')
parser.add_argument('-o', help="If it applies, output dataset (mandadory \
for celeba_cropped)",
type=str, dest="output_dataset")
parser.add_argument('-f', action='store_true',
dest="fast_training",
help="Store several resized versions of a dataset for \
a faster training. Advised for HD datasets.")
parser.add_argument('-m', dest='model_type',
type=str, default='PGAN',
choices=['PGAN', 'DCGAN'],
help="Model type. Default is progressive growing \
(PGAN)")
args = parser.parse_args()
config = {"pathDB": args.dataset_path}
config["config"] = {}
moveLastScale = False
keepOriginalDataset = True
if args.dataset_name == 'celeba_cropped':
if args.output_dataset is None:
raise AttributeError(
"Please provide and output path to dump celebaCropped")
if args.model_type == 'PGAN':
config["config"]["maxIterAtScale"] = [48000, 96000, 96000, 96000,
96000, 200000]
maxSize = 128
print("Cropping dataset...")
celebaSetup(args.dataset_path, args.output_dataset)
config["pathDB"] = args.output_dataset
config["dbType"] = "image"
config["targets"] = [""]
config["config"]["dataType"] = "image"
config["config"]["dimOutput"] = 3
config["config"]["imageSize"] = 128
moveLastScale = True
# implement dataset loader for digital_rock_images
if args.dataset_name == 'digital_rock':
if args.model_type == 'PGAN':
config["config"]["maxIterAtScale"] = [48000, 96000, 96000, 96000,
96000, 96000, 200000]
# configuration for VAE depth: 2 ** (depth + 3) = maxSize
config["config"]["depth"] = 5
maxSize = 256
if args.output_dataset is None:
raise AttributeError(
"Please provide and output path to dump digital rock images")
print("Building dataset...")
digitalrockSetup(args.dataset_path, args.output_dataset)
config["pathDB"] = args.output_dataset
config["dbType"] = "image"
config["targets"] = [""]
config["config"]["dataType"] = "image"
config["config"]["dimOutput"] = 1
config["config"]["imageSize"] = 256
moveLastScale = True
if args.fast_training:
if args.output_dataset is None:
raise AttributeError(
"Please provide and output path to dump intermediate datasets")
maxScale = int(math.log(maxSize, 2)) - 2
if moveLastScale:
datasetProfile, _ = resizeDataset(
args.output_dataset, args.output_dataset, maxSize / 2)
print("Moving the last dataset...")
lastScaleOut = os.path.join(args.output_dataset, str(maxSize))
if not os.path.isdir(lastScaleOut):
os.mkdir(lastScaleOut)
for img in [f for f in os.listdir(args.output_dataset)
if os.path.splitext(f)[1] == ".jpg"]:
pathIn = os.path.join(args.output_dataset, img)
pathOut = os.path.join(lastScaleOut, img)
os.rename(pathIn, pathOut)
datasetProfile[maxScale] = lastScaleOut
elif keepOriginalDataset:
datasetProfile, _ = resizeDataset(
args.dataset_path, args.output_dataset, maxSize / 2)
datasetProfile[maxScale] = args.dataset_path
lastScaleOut = args.dataset_path
else:
datasetProfile, lastScaleOut = resizeDataset(
args.dataset_path, args.output_dataset, maxSize)
config["datasetProfile"] = datasetProfile
config["pathDB"] = lastScaleOut
pathConfig = "config_" + args.dataset_name + ".json"
with open(pathConfig, 'w') as file:
json.dump(config, file, indent=2)
| {"/models/networks/gan/gan_trainer.py": ["/models/networks/base/base_trainer.py"], "/dataloader/image_dataloader.py": ["/models/utils/image_transform.py"], "/train.py": ["/config.py"], "/models/networks/vae/trainer.py": ["/models/networks/vae/standard_configurations/vae_config.py", "/models/networks/vae/vae.py"], "/models/networks/vae/vae.py": ["/models/networks/ae/base_AE.py", "/models/networks/vae/networks/vae_net.py"], "/tests/digital_rock_images/generate_digital_rock_images.py": ["/models/utils/image_transform.py", "/dataloader/image_dataloader.py"], "/models/networks/gan/base_GAN.py": ["/models/networks/base/base_NET.py"], "/datasets.py": ["/models/utils/image_transform.py", "/dataloader/image_dataloader.py"], "/models/networks/ae/base_AE.py": ["/models/networks/base/base_NET.py"]} |
43,629 | HaizhaoYang/generative_encoder | refs/heads/master | /test_digital_rock_images.py | """
@author: Yong Zheng Ong
the main function for running the ge model
"""
import importlib
import argparse
import sys
if __name__ == "__main__":
# argparser
parser = argparse.ArgumentParser(description='Testing script', add_help=False)
parser.add_argument('evaluation_name', type=str,
help='Name of the evaluation method to launch. To get \
the arguments specific to an evaluation method please \
use: eval.py evaluation_name -h')
parser.add_argument('evaluation_id', type=str,
help='Identifier of the evaluation to launch. The result will be saved in "result/evaluation_id')
# args for generative_encoder
parser.add_argument('-digital_rock_position', type=int, required=False)
parser.add_argument('-with_ae', type=bool, required=False)
parser.add_argument('-gan_name', type=str, required=False)
parser.add_argument('-ae_name', type=str, required=False)
args = parser.parse_args()
# validity check for args
if args.evaluation_name == 'generate_digital_rock_images':
assert args.digital_rock_position is not None, "digital_rock_position field should not be empty!"
assert args.with_ae is not None, "with_ae field should not be empty!"
module = importlib.import_module("tests.digital_rock_images.generate_digital_rock_images")
else:
raise ValueError("evaluation name provided is invalid")
print("Running " + args.evaluation_name)
module.test(parser) | {"/models/networks/gan/gan_trainer.py": ["/models/networks/base/base_trainer.py"], "/dataloader/image_dataloader.py": ["/models/utils/image_transform.py"], "/train.py": ["/config.py"], "/models/networks/vae/trainer.py": ["/models/networks/vae/standard_configurations/vae_config.py", "/models/networks/vae/vae.py"], "/models/networks/vae/vae.py": ["/models/networks/ae/base_AE.py", "/models/networks/vae/networks/vae_net.py"], "/tests/digital_rock_images/generate_digital_rock_images.py": ["/models/utils/image_transform.py", "/dataloader/image_dataloader.py"], "/models/networks/gan/base_GAN.py": ["/models/networks/base/base_NET.py"], "/datasets.py": ["/models/utils/image_transform.py", "/dataloader/image_dataloader.py"], "/models/networks/ae/base_AE.py": ["/models/networks/base/base_NET.py"]} |
43,630 | HaizhaoYang/generative_encoder | refs/heads/master | /models/networks/base/base_trainer.py | import os
import json
import pickle as pkl
import torch
import torchvision.transforms as Transforms
from ...utils.config import BaseConfig, getConfigFromDict, getDictFromConfig
class BaseTrainer():
"""
a class to manage model training.
"""
def __init__(self,
pathdb,
dataloader=None,
dbType="image",
targets=[""],
useGPU=True,
config=None,
lossIterEvaluation=1, # TODO: change back to 200
saveIter=5000,
checkPointDir=None,
modelLabel=""):
"""
Initializer for all trainers
"""
# Parameters
# Training dataset parameters
self.path_db = pathdb
self.db_type = dbType
self.targets = targets
# set up dataloader for training
self.dataloader = dataloader
if config is None:
config = {}
# Load training configuration
self.readTrainConfig(config)
# Model Initialization
self.useGPU = useGPU
if not self.useGPU:
self.numWorkers = 1
# Internal state
self.runningLoss = {}
self.startScale = 0
self.startIter = 0
self.lossProfile = []
self.initModel()
# set checkpoint parameters
self.checkPointDir = checkPointDir
self.modelLabel = modelLabel
self.saveIter = saveIter
self.pathLossLog = None
# Loss printing
self.lossIterEvaluation = lossIterEvaluation
def readTrainConfig(self, config):
"""
load a permanent configuration describing a model.
variables described here should remain constant through the training.
"""
self.modelConfig = BaseConfig()
getConfigFromDict(self.modelConfig, config, self.getDefaultConfig())
def getDefaultConfig(self):
"""the default config to load should be implemented here"""
pass
def initModel(self):
"""the model should be initialized here"""
pass
def updateRunningLosses(self, allLosses):
for name, value in allLosses.items():
if name not in self.runningLoss:
self.runningLoss[name] = [0, 0]
self.runningLoss[name][0]+= value
self.runningLoss[name][1]+=1
def resetRunningLosses(self):
self.runningLoss = {}
def updateLossProfile(self, iter):
nPrevIter = len(self.lossProfile[-1]["iter"])
self.lossProfile[-1]["iter"].append(iter)
newKeys = set(self.runningLoss.keys())
existingKeys = set(self.lossProfile[-1].keys())
toComplete = existingKeys - newKeys
for item in newKeys:
if item not in existingKeys:
self.lossProfile[-1][item] = [None for x in range(nPrevIter)]
value, stack = self.runningLoss[item]
self.lossProfile[-1][item].append(value /float(stack))
for item in toComplete:
if item in ["scale", "iter"]:
continue
self.lossProfile[-1][item].append(None)
def getDBLoader(self, scale):
"""
Load the training dataset for the given scale.
Args:
- scale (int): scale at which we are working
Returns:
A dataset with properly resized inputs.
"""
# prepare parameters for the dataloader
# size
size = self.model.getSize()
print("size", size)
print("loading {} dataset".format(self.db_type))
dataset = self.dataloader.getDataset(self.path_db, self.targets, size, self.modelConfig)
print("%d images detected" % int(len(dataset)))
return torch.utils.data.DataLoader(dataset,
batch_size=self.modelConfig.miniBatchSize,
shuffle=True, num_workers=self.model.n_devices)
def inScaleUpdate(self, iter, scale, inputs_real):
return inputs_real
def trainOnEpoch(self,
dbLoader,
scale,
shiftIter=0,
maxIter=-1):
pass
def train(self):
pass | {"/models/networks/gan/gan_trainer.py": ["/models/networks/base/base_trainer.py"], "/dataloader/image_dataloader.py": ["/models/utils/image_transform.py"], "/train.py": ["/config.py"], "/models/networks/vae/trainer.py": ["/models/networks/vae/standard_configurations/vae_config.py", "/models/networks/vae/vae.py"], "/models/networks/vae/vae.py": ["/models/networks/ae/base_AE.py", "/models/networks/vae/networks/vae_net.py"], "/tests/digital_rock_images/generate_digital_rock_images.py": ["/models/utils/image_transform.py", "/dataloader/image_dataloader.py"], "/models/networks/gan/base_GAN.py": ["/models/networks/base/base_NET.py"], "/datasets.py": ["/models/utils/image_transform.py", "/dataloader/image_dataloader.py"], "/models/networks/ae/base_AE.py": ["/models/networks/base/base_NET.py"]} |
43,631 | HaizhaoYang/generative_encoder | refs/heads/master | /models/networks/ae/base_AE.py | from copy import deepcopy
import torch
import torch.nn as nn
from ..base.base_NET import BaseNET
from ...utils.config import BaseConfig, updateConfig
from .loss_criterions import base_loss_criterions
from ...utils.utils import loadPartOfStateDict, finiteCheck, \
loadStateDictCompatible
class BaseAE(BaseNET):
"""Abstract class: the basic framework for AE training.
"""
def __init__(self,
config=None,
**kwargs):
"""
Initialize the BaseAE
In addition to base parameters, AE additionally requires
1. lossMode
"""
BaseNET.__init__(self, config=config, **kwargs)
self.valid_losses = ['AE_MSE', 'VAE_MSE']
if config["lossMode"] not in self.valid_losses:
raise ValueError(
"lossMode should be one of the following : {}".format(self.valid_losses))
# Latent vector dimension
self.config.latentVectorDim = config["dimLatentVector"]
# Loss criterion
self.config.lossCriterion = config["lossMode"]
self.lossCriterion = getattr(
base_loss_criterions, config["lossMode"])(self.device)
# Initialize the encoder and the decoder
self.netD = self.getNetD()
self.netE = self.getNetE()
# Move the networks to the gpu
self.updateSolversDevice()
def test(self, input, getAvG=False, toCPU=True):
"""
Generate some data given the input latent vector.
Args:
input (torch.tensor): input latent vector
"""
input = input.to(self.device)
if getAvG:
if toCPU:
return self.avgD(self.avgE(input)).cpu()
else:
return self.avgD(self.avgE(input))
elif toCPU:
return self.netD(self.netE(input)).detach().cpu()
else:
return self.netD(self.netE(input)).detach()
def buildAvG(self):
"""
Create and upload a moving average encoder and decoder.
"""
# generate decoder
self.avgD = deepcopy(self.getOriginalD())
for param in self.avgD.parameters():
param.requires_grad = False
if self.useGPU:
self.avgD = nn.DataParallel(self.avgD)
self.avgD.to(self.device)
# generate encoder
self.avgE = deepcopy(self.getOriginalE())
for param in self.avgE.parameters():
param.requires_grad = False
if self.useGPU:
self.avgE = nn.DataParallel(self.avgE)
self.avgE.to(self.device)
def optimizeParameters(self, input_batch, input_target):
"""
Update the model using the given inputs. The optimization flow should be described here.
Args:
input (torch.tensor): input batch of real data
inputLabels (torch.tensor): labels of the real data
"""
pass
def updateSolversDevice(self, buildAvG=True):
"""
Move the current networks and solvers to the GPU.
This function must be called each time netE or netD is modified
"""
if buildAvG:
self.buildAvG()
if not isinstance(self.netD, nn.DataParallel) and self.useGPU:
self.netD = nn.DataParallel(self.netD)
if not isinstance(self.netE, nn.DataParallel) and self.useGPU:
self.netE = nn.DataParallel(self.netE)
self.netD.to(self.device)
self.netE.to(self.device)
self.optimizerD = self.getOptimizerD()
self.optimizerE = self.getOptimizerE()
self.optimizerAE = self.getOptimizerAE()
self.optimizerD.zero_grad()
self.optimizerE.zero_grad()
self.optimizerAE.zero_grad()
def getOriginalE(self):
r"""
Retrieve the original E network. Use this function
when you want to modify E after the initialization
"""
if isinstance(self.netE, nn.DataParallel):
return self.netE.module
return self.netE
def getOriginalD(self):
r"""
Retrieve the original D network. Use this function
when you want to modify D after the initialization
"""
if isinstance(self.netD, nn.DataParallel):
return self.netD.module
return self.netD
def getOriginalAvgD(self):
r"""
Retrieve the original avG network. Use this function
when you want to modify avG after the initialization
"""
if isinstance(self.avgD, nn.DataParallel):
return self.avgD.module
return self.avgD
def getOriginalAvgE(self):
r"""
Retrieve the original avG network. Use this function
when you want to modify avG after the initialization
"""
if isinstance(self.avgE, nn.DataParallel):
return self.avgE.module
return self.avgE
def getNetE(self):
r"""
The encoder should be defined here.
"""
pass
def getNetD(self):
r"""
The discrimator should be defined here.
"""
pass
def getOptimizerD(self):
r"""
Optimizer of the decoder.
"""
pass
def getOptimizerE(self):
r"""
Optimizer of the encoder.
"""
pass
def getOptimizerAE(self):
r"""
Optimizer of the autoencoder.
"""
pass
def getStateDict(self, saveTrainTmp=False):
"""
Get the model's parameters
"""
# Get the generator's state
stateE = self.getOriginalE().state_dict()
# Get the discrimator's state
stateD = self.getOriginalD().state_dict()
out_state = {'config': self.config,
'netE': stateE,
'netD': stateD}
# Average AE
out_state['avgE'] = self.getOriginalAvgE().state_dict()
out_state['avgD'] = self.getOriginalAvgD().state_dict()
if saveTrainTmp:
out_state['tmp'] = self.trainTmp
return out_state
def save(self, path, saveTrainTmp=False):
"""
Save the model at the given location.
All parameters included in the self.config class will be saved as well.
Args:
- path (string): file where the model should be saved
- saveTrainTmp (bool): set to True if you want to conserve
the training parameters
"""
torch.save(self.getStateDict(saveTrainTmp=saveTrainTmp), path)
def load(self,
path="",
in_state=None,
loadE=True,
loadD=True,
loadConfig=True,
finetuning=False):
"""
Load a model saved with the @method save() function
Args:
- path (string): file where the model is stored
"""
in_state = torch.load(path)
self.load_state_dict(in_state,
loadE=loadE,
loadD=loadD,
loadConfig=True,
finetuning=False)
def load_state_dict(self,
in_state,
loadE=True,
loadD=True,
loadConfig=True,
finetuning=False):
"""
Load a model saved with the @method save() function
Args:
- in_state (dict): state dict containing the model
"""
# Step one : load the configuration
if loadConfig:
updateConfig(self.config, in_state['config'])
self.lossCriterion = getattr(
base_loss_criterions, self.config.lossCriterion)(self.device)
# Re-initialize E and D with the loaded configuration
buildAvG = True
if loadE:
self.netE = self.getNetE()
if finetuning:
loadPartOfStateDict(
self.netE, in_state['netE'], ["formatLayer"])
self.getOriginalE().initFormatLayer(self.config.latentVectorDim)
else:
# Replace me by a standard loadStatedict for open-sourcing TODO
loadStateDictCompatible(self.netE, in_state['netE'])
if 'avgE' in in_state:
print("Average network found !")
self.buildAvG()
# Replace me by a standard loadStatedict for open-sourcing
loadStateDictCompatible(self.getOriginalAvgE(), in_state['avgE'])
buildAvG = False
if loadD:
self.netD = self.getNetD()
if finetuning:
loadPartOfStateDict(
self.netD, in_state['netD'], ["decisionLayer"])
self.getOriginalD().initDecisionLayer(self.lossCriterion.sizeDecisionLayer)
else:
# Replace me by a standard loadStatedict for open-sourcing TODO
loadStateDictCompatible(self.netD, in_state['netD'])
if 'avgD' in in_state:
print("Average network found !")
self.buildAvG()
# Replace me by a standard loadStatedict for open-sourcing
loadStateDictCompatible(self.getOriginalAvgD(), in_state['avgD'])
buildAvG = False
elif 'tmp' in in_state.keys():
self.trainTmp = in_state['tmp']
# Don't forget to reset the machinery !
self.updateSolversDevice(buildAvG)
| {"/models/networks/gan/gan_trainer.py": ["/models/networks/base/base_trainer.py"], "/dataloader/image_dataloader.py": ["/models/utils/image_transform.py"], "/train.py": ["/config.py"], "/models/networks/vae/trainer.py": ["/models/networks/vae/standard_configurations/vae_config.py", "/models/networks/vae/vae.py"], "/models/networks/vae/vae.py": ["/models/networks/ae/base_AE.py", "/models/networks/vae/networks/vae_net.py"], "/tests/digital_rock_images/generate_digital_rock_images.py": ["/models/utils/image_transform.py", "/dataloader/image_dataloader.py"], "/models/networks/gan/base_GAN.py": ["/models/networks/base/base_NET.py"], "/datasets.py": ["/models/utils/image_transform.py", "/dataloader/image_dataloader.py"], "/models/networks/ae/base_AE.py": ["/models/networks/base/base_NET.py"]} |
43,632 | HaizhaoYang/generative_encoder | refs/heads/master | /models/networks/vae/standard_configurations/vae_config.py | from ....utils.config import BaseConfig
# Default configuration for VAETrainer
_C = BaseConfig()
############################################################
# Data Type
_C.dataType = 'image'
# Image Size
_C.imageSize = 128
# Mini batch size
_C.miniBatchSize = 16
# Dimension of the latent vector
_C.dimLatentVector = 256
# Dimension of the output image
_C.dimOutput = 3
# Dimension of the encoder
_C.dimE = 32
# Dimension of the discrimator
_C.dimD = 32
# Loss mode
_C.lossMode = 'VAE_MSE'
# Base learning rate
_C.baseLearningRate = 0.0002
# Number of epochs
_C.nEpoch = 500 | {"/models/networks/gan/gan_trainer.py": ["/models/networks/base/base_trainer.py"], "/dataloader/image_dataloader.py": ["/models/utils/image_transform.py"], "/train.py": ["/config.py"], "/models/networks/vae/trainer.py": ["/models/networks/vae/standard_configurations/vae_config.py", "/models/networks/vae/vae.py"], "/models/networks/vae/vae.py": ["/models/networks/ae/base_AE.py", "/models/networks/vae/networks/vae_net.py"], "/tests/digital_rock_images/generate_digital_rock_images.py": ["/models/utils/image_transform.py", "/dataloader/image_dataloader.py"], "/models/networks/gan/base_GAN.py": ["/models/networks/base/base_NET.py"], "/datasets.py": ["/models/utils/image_transform.py", "/dataloader/image_dataloader.py"], "/models/networks/ae/base_AE.py": ["/models/networks/base/base_NET.py"]} |
43,633 | HaizhaoYang/generative_encoder | refs/heads/master | /models/utils/image_transform.py | import torch
import torchvision.transforms as Transforms
import numpy as np
from PIL import Image
# The equivalent of some torchvision.transforms operations but for numpy array
# instead of PIL images
class NumpyResize(object):
def __init__(self, size):
self.size = size
def __call__(self, img):
"""
Args:
img (np array): image to be resized
Returns:
np array: resized image
"""
if not isinstance(img, Image.Image):
img = Image.fromarray(img)
return np.array(img.resize(self.size, resample=Image.BILINEAR))
def __repr__(self):
return self.__class__.__name__ + '(p={})'.format(self.p)
class NumpyReshape(object):
def __init__(self, size):
self.size = size
def __call__(self, img):
"""
Args:
img (np array): image to be reshaped
Returns:
np array: resized image
"""
img = Image.fromarray(np.reshape(img, self.size))
return img
def __repr__(self):
return self.__class__.__name__ + '(p={})'.format(self.p)
class NumpyToTensor(object):
def __init__(self):
return
def __call__(self, img):
"""
Turn a numpy object into a tensor.
"""
if len(img.shape) == 1:
return torch.from_numpy(np.array(img, np.float32, copy=False))
if len(img.shape) == 2:
img = img.reshape(img.shape[0], img.shape[1], 1)
return Transforms.functional.to_tensor(img)
| {"/models/networks/gan/gan_trainer.py": ["/models/networks/base/base_trainer.py"], "/dataloader/image_dataloader.py": ["/models/utils/image_transform.py"], "/train.py": ["/config.py"], "/models/networks/vae/trainer.py": ["/models/networks/vae/standard_configurations/vae_config.py", "/models/networks/vae/vae.py"], "/models/networks/vae/vae.py": ["/models/networks/ae/base_AE.py", "/models/networks/vae/networks/vae_net.py"], "/tests/digital_rock_images/generate_digital_rock_images.py": ["/models/utils/image_transform.py", "/dataloader/image_dataloader.py"], "/models/networks/gan/base_GAN.py": ["/models/networks/base/base_NET.py"], "/datasets.py": ["/models/utils/image_transform.py", "/dataloader/image_dataloader.py"], "/models/networks/ae/base_AE.py": ["/models/networks/base/base_NET.py"]} |
43,645 | RayM4/HangMan | refs/heads/master | /main.py | ################################
# Main Application #
################################
import random
from list_reader import get_default_list
from defaults import DEFAULT_LIVES, DEFAULT_LIFE_RENDER
class Hangman:
def __init__(self):
self.word_bank = get_default_list()
self.current_word = self.__get_random_word()
self.guesses = {}
self.current_guess_string = self.__get_guess_string()
self.max_lives = DEFAULT_LIVES
self.current_lives = 0
def render(self):
self.__draw_man()
# print(self.current_word)
print('\n')
self.__draw_guesses()
print(self.current_guess_string)
def update(self):
self.render()
if self.__check_game_over():
print("GAME OVER")
print("Word was: " + self.current_word)
print("Press any key to continue")
if self.__check_win():
print('You Win')
print("Press any key to continue")
def guess(self, character):
if self.__check_game_over() or self.__check_win():
self.__reset()
else:
self.__check_guess(character)
self.update()
# internal helper functions
def __get_guess_string(self):
word = ''
for c in self.current_word:
if self.guesses.get(c):
word += ' ' + c
else:
word += ' _'
return word
def __get_random_word(self):
index = random.randint(0, len(self.word_bank)-1)
return self.word_bank[index]
def __draw_man(self):
man = ''
for i in range(0, self.current_lives):
man += DEFAULT_LIFE_RENDER.get(i)
print(man)
def __draw_guesses(self):
keys = list(self.guesses.keys())
correct = '[ '
wrong = '[ '
for k in keys:
if self.guesses.get(k):
correct += k + ' '
else:
wrong += k + ' '
print('Correct: ' + correct+']')
print('Wrong: ' + wrong + ']')
def __check_guess(self, character):
guess = str(character).lower()
if self.current_word.find(guess) > -1:
self.guesses[guess] = True
else:
self.guesses[guess] = False
self.current_lives += 1
self.current_guess_string = self.__get_guess_string()
def __check_game_over(self):
return self.current_lives >= self.max_lives
def __check_win(self):
return self.current_guess_string.find('_') == -1
def __reset(self):
self.word_bank = get_default_list()
self.current_word = self.__get_random_word()
self.guesses = {}
self.current_guess_string = self.__get_guess_string()
self.max_lives = DEFAULT_LIVES
self.current_lives = 0
print("------------------------------")
if __name__ == "__main__":
h = Hangman()
h.render()
while True:
c = input('Input: ')
h.guess(c)
| {"/main.py": ["/list_reader.py"]} |
43,646 | RayM4/HangMan | refs/heads/master | /list_reader.py | ################################
# Reader to generate word bank #
################################
from defaults import DEFAULT_WORD_BANK
def get_default_list():
return DEFAULT_WORD_BANK
| {"/main.py": ["/list_reader.py"]} |
43,647 | RovisLab/GFPNet | refs/heads/master | /modelling_utils.py | from open3d.open3d import geometry
import numpy as np
def find_primitive_points_dependencies(primitive_cloud):
"""
Method used for computing the point dependencies from inside a point cloud.
It takes 1 point and finds it's closest 2 neighbors, and keeps them in a structure.
:param primitive_points: [in] points representing the primitive cloud.
:return: [out] Sets the dependencies inside [allPrimitivePointsNeighboursDependinces] list.
"""
from io_primitive import NEXT_PREV_POINT_DEP
from open3d.open3d import geometry
dependencies_list = []
K = 2
tree = geometry.KDTreeFlann()
tree.set_geometry(primitive_cloud)
points = np.asarray(primitive_cloud.points)
for i in range(0, len(points)):
search_point = points[i]
(count, pointIdxNKNsearch, pointNKNSquaredDistances) = tree.search_knn_vector_3d(search_point, K)
if count > 0:
temp = NEXT_PREV_POINT_DEP(
primitive_cloud.points[pointIdxNKNsearch[0]],
primitive_cloud.points[pointIdxNKNsearch[1]],
pointIdxNKNsearch[0],
pointIdxNKNsearch[1])
dependencies_list.append(temp)
return dependencies_list
def compute_econt(point1, point2, point_cloud):
"""
Computes the contour energy between two points inside a point cloud.
:param point1: [in] First point
:param point2: [in] Second point
:param point_cloud: [in] Source point cloud
:return: [out] E_cont value
"""
# compute the contour energy
temp = (point2[0] - point1[0]) ** 2 + (point2[1] - point1[1]) ** 2 + (point2[2] - point1[2]) ** 2
econt = (euclidian_distance(point_cloud) - np.sqrt(temp)) ** 2
return econt
def compute_ecurv(point1, point2, point3):
"""
Computes the curvature energy of three points.
:param point1: [in] First point
:param point2: [in] Middle point
:param point3: [in] Last point
:return: [out] Curvature energy value
"""
ecurv = ((point1[0] - (2 * point2[0]) + point3[0]) ** 2) + \
((point1[1] - (2 * point2[1]) + point3[1]) ** 2) + \
((point1[2] - (2 * point2[2]) + point3[2]) ** 2)
return ecurv
def euclidian_distance_2points(pointA, pointB):
"""Euclidian distance 2 points"""
return np.linalg.norm(pointA - pointB)
def euclidian_distance(point_cloud):
""" Computes euclidian distance between points in pointcloud
:param point_cloud:
:return:
"""
distance = 0
max_len = len(point_cloud.points)
assert max_len > 0 # cloud has points
for i in range(0, len(point_cloud.points)):
if i == len(point_cloud.points) - 1:
distance += np.sqrt(
(point_cloud.points[max_len - 1][0] - point_cloud.points[0][0]) ** 2 +
(point_cloud.points[max_len - 1][1] - point_cloud.points[0][1]) ** 2 +
(point_cloud.points[max_len - 1][2] - point_cloud.points[0][2]) ** 2)
else:
distance += np.sqrt(
(point_cloud.points[i][0] - point_cloud.points[i + 1][0]) ** 2 +
(point_cloud.points[i][1] - point_cloud.points[i + 1][1]) ** 2 +
(point_cloud.points[i][2] - point_cloud.points[i + 1][2]) ** 2)
distance = distance / max_len
return distance
def number_of_neighbors(ptCheckedPoint, observation_cloud, radius, returnWhat="count"):
"""
Computes the number of neighbors for a certain point in the point cloud, given a radius.
:param ptCheckedPoint: [in] Checked point
:param objectROI: [in] Point Cloud where to search
:param radius: [in] Radius to search
:return: [out] Depends on parameter
"""
from open3d.open3d import geometry
tree = geometry.KDTreeFlann()
tree.set_geometry(observation_cloud)
(count, pointIdxRadiusSearch, pointSquareDistances) = tree.search_radius_vector_3d(ptCheckedPoint, radius)
if returnWhat == 'count':
return count
elif returnWhat == 'indexes':
return pointIdxRadiusSearch
def dist2point(point1, point2):
"""
Method used for calculating the euclidian distance between two points.
:param point1: [in] First point
:param point2: [in] Second point
:return: [out] Distance between points
"""
dist = np.sqrt(
((point2[0] - point1[0]) ** 2) +
((point2[1] - point1[1]) ** 2) +
((point2[2] - point1[2]) ** 2)
)
return dist
def active_contour_modelling(srcPrimitive, objectROI, search_radius, steps, step_dist, visualizer):
"""
Method used for modelling the point cloud acording to the Active Contours principle.
:param srcPrimitive: [in] Object containing the primitive point cloud.
:param objectROI: [in] Object containing the target, extracted object from scene point cloud.
:param search_radius: [in] search raadius
:param step: [in] Max step value by wich the points inside the primitive cloud can be moved along their normal direction.
:param step_dir: [in] Step added to the point coordinates, by each iteration
:param visualizer: [in] Visualizer for viewing live modellation
:return:
"""
import time
from io_primitive import PPoint
from open3d.open3d import visualization
TIME_START = time.time()
primitive_points_list = [PPoint(idx=i,
isModified=False,
x=srcPrimitive.point_cloud.points[i][0],
y=srcPrimitive.point_cloud.points[i][1],
z=srcPrimitive.point_cloud.points[i][2],
f_eng=1000,
neighborsCount=0,
isControlPoint=False) for i in range(0, len(srcPrimitive.point_cloud.points))]
total_energy_temp = 0
treshold = 0.3
number_of_iterations = 1
srcPrimitive.allPrimitivePointsNeighboursDependinces = find_primitive_points_dependencies(srcPrimitive.point_cloud)
for iteration in range(number_of_iterations):
count = 0
for i in range(0, srcPrimitive.cloud_size):
visualizer.update_geometry()
visualizer.poll_events()
visualizer.update_renderer()
if not srcPrimitive.primitiveModelledVertices[i].isModified and srcPrimitive.primitiveModelledVertices[i].isControlPoint:
total_energy_temp = 0
e_curv_temp = compute_ecurv(
srcPrimitive.point_cloud.points[srcPrimitive.allPrimitivePointsNeighboursDependinces[i].prevPointID],
srcPrimitive.point_cloud.points[i],
srcPrimitive.point_cloud.points[srcPrimitive.allPrimitivePointsNeighboursDependinces[i].nextPointID])
th = 1 * 10**(-10)
if e_curv_temp > th:
alpha = 0.1
beta = 0.3
gama = 0.8
else:
alpha = 0.1
beta = 0.3
gama = 0.6
functional_energy = alpha * compute_econt(srcPrimitive.point_cloud.points[i],
srcPrimitive.point_cloud.points[
srcPrimitive.allPrimitivePointsNeighboursDependinces[
i].nextPointID],
srcPrimitive.point_cloud) + \
beta * compute_ecurv(srcPrimitive.point_cloud.points[i],
srcPrimitive.point_cloud.points[
srcPrimitive.allPrimitivePointsNeighboursDependinces[
i].nextPointID],
srcPrimitive.point_cloud.points[
srcPrimitive.allPrimitivePointsNeighboursDependinces[
srcPrimitive.allPrimitivePointsNeighboursDependinces[
i].nextPointID].nextPointID]) - \
gama * number_of_neighbors(srcPrimitive.point_cloud.points[i],
objectROI.point_cloud, search_radius)
point_NEG_dir = [srcPrimitive.point_cloud.points[i][0],
srcPrimitive.point_cloud.points[i][1],
srcPrimitive.point_cloud.points[i][2]]
point_POS_dir = [srcPrimitive.point_cloud.points[i][0],
srcPrimitive.point_cloud.points[i][1],
srcPrimitive.point_cloud.points[i][2]]
temp_pos_dir = point_POS_dir
temp_neg_dir = point_NEG_dir
for iteration_step in np.arange(step_dist, steps, step_dist):
point_POS_dir[0] = point_POS_dir[0] + srcPrimitive.point_cloud.normals[i][0] * iteration_step
point_POS_dir[1] = point_POS_dir[1] + srcPrimitive.point_cloud.normals[i][1] * iteration_step
point_POS_dir[2] = point_POS_dir[2] + srcPrimitive.point_cloud.normals[i][2] * iteration_step
point_NEG_dir[0] = point_NEG_dir[0] - srcPrimitive.point_cloud.normals[i][0] * iteration_step
point_NEG_dir[1] = point_NEG_dir[1] - srcPrimitive.point_cloud.normals[i][1] * iteration_step
point_NEG_dir[2] = point_NEG_dir[2] - srcPrimitive.point_cloud.normals[i][2] * iteration_step
functional_energy_POS = alpha * compute_econt(point_POS_dir,
srcPrimitive.point_cloud.points[
srcPrimitive.allPrimitivePointsNeighboursDependinces[
i].nextPointID],
srcPrimitive.point_cloud) + \
beta * compute_ecurv(point_POS_dir,
srcPrimitive.point_cloud.points[
srcPrimitive.allPrimitivePointsNeighboursDependinces[
i].nextPointID],
srcPrimitive.point_cloud.points[
srcPrimitive.allPrimitivePointsNeighboursDependinces[
srcPrimitive.allPrimitivePointsNeighboursDependinces[
i].nextPointID].nextPointID]) - \
gama * number_of_neighbors(point_POS_dir,
objectROI.point_cloud, search_radius)
functional_energy_NEG = alpha * compute_econt(point_NEG_dir,
srcPrimitive.point_cloud.points[
srcPrimitive.allPrimitivePointsNeighboursDependinces[
i].nextPointID],
srcPrimitive.point_cloud) + \
beta * compute_ecurv(point_NEG_dir,
srcPrimitive.point_cloud.points[
srcPrimitive.allPrimitivePointsNeighboursDependinces[
i].nextPointID],
srcPrimitive.point_cloud.points[
srcPrimitive.allPrimitivePointsNeighboursDependinces[
srcPrimitive.allPrimitivePointsNeighboursDependinces[
i].nextPointID].nextPointID]) - \
gama * number_of_neighbors(point_NEG_dir,
objectROI.point_cloud, search_radius)
POS_neigh = number_of_neighbors(point_POS_dir, objectROI.point_cloud, search_radius)
NEG_neigh = number_of_neighbors(point_NEG_dir, objectROI.point_cloud, search_radius)
POS_dist_to_init_pos = dist2point(point_POS_dir, srcPrimitive.point_cloud.points[i])
NEG_dist_to_init_pos = dist2point(point_NEG_dir, srcPrimitive.point_cloud.points[i])
if functional_energy_POS < primitive_points_list[i].functional_energy and \
POS_dist_to_init_pos < treshold and \
POS_neigh > primitive_points_list[i].nOfNeighbors:
functional_energy = functional_energy_POS
primitive_points_list[i].functional_energy = functional_energy_POS
primitive_points_list[i].index = i
primitive_points_list[i].x = point_POS_dir[0]
primitive_points_list[i].y = point_POS_dir[1]
primitive_points_list[i].z = point_POS_dir[2]
primitive_points_list[i].nOfNeighbors = POS_neigh
primitive_points_list[i].isModified = True
temp_pos_dir[0] += srcPrimitive.point_cloud.normals[i][0] * iteration_step
temp_pos_dir[1] += srcPrimitive.point_cloud.normals[i][1] * iteration_step
temp_pos_dir[2] += srcPrimitive.point_cloud.normals[i][2] * iteration_step
if number_of_neighbors(temp_pos_dir, objectROI.point_cloud, search_radius) > POS_neigh:
functional_energy_POS = alpha * compute_econt(temp_pos_dir,
srcPrimitive.point_cloud.points[
srcPrimitive.allPrimitivePointsNeighboursDependinces[
i].nextPointID],
srcPrimitive.point_cloud) + \
beta * compute_ecurv(temp_pos_dir,
srcPrimitive.point_cloud.points[
srcPrimitive.allPrimitivePointsNeighboursDependinces[
i].nextPointID],
srcPrimitive.point_cloud.points[
srcPrimitive.allPrimitivePointsNeighboursDependinces[
srcPrimitive.allPrimitivePointsNeighboursDependinces[
i].nextPointID].nextPointID]) - \
gama * number_of_neighbors(temp_pos_dir,
objectROI.point_cloud, search_radius)
primitive_points_list[i].functional_energy = functional_energy_POS
primitive_points_list[i].x = temp_pos_dir[0]
primitive_points_list[i].y = temp_pos_dir[1]
primitive_points_list[i].z = temp_pos_dir[2]
primitive_points_list[i].nOfNeighbors = number_of_neighbors(temp_pos_dir, objectROI.point_cloud, search_radius)
break
elif functional_energy_NEG < primitive_points_list[i].functional_energy and \
NEG_dist_to_init_pos < treshold and \
NEG_neigh > primitive_points_list[i].nOfNeighbors:
functional_energy = functional_energy_NEG
primitive_points_list[i].functional_energy = functional_energy_NEG
primitive_points_list[i].index = i
primitive_points_list[i].x = point_NEG_dir[0]
primitive_points_list[i].y = point_NEG_dir[1]
primitive_points_list[i].z = point_NEG_dir[2]
primitive_points_list[i].nOfNeighbors = NEG_neigh
primitive_points_list[i].isModified = True
temp_neg_dir[0] -= srcPrimitive.point_cloud.normals[i][0] * iteration_step
temp_neg_dir[1] -= srcPrimitive.point_cloud.normals[i][1] * iteration_step
temp_neg_dir[2] -= srcPrimitive.point_cloud.normals[i][2] * iteration_step
if number_of_neighbors(temp_neg_dir, objectROI.point_cloud, search_radius) > NEG_neigh:
functional_energy_NEG = alpha * compute_econt(temp_neg_dir,
srcPrimitive.point_cloud.points[
srcPrimitive.allPrimitivePointsNeighboursDependinces[
i].nextPointID],
srcPrimitive.point_cloud) + \
beta * compute_ecurv(temp_neg_dir,
srcPrimitive.point_cloud.points[
srcPrimitive.allPrimitivePointsNeighboursDependinces[
i].nextPointID],
srcPrimitive.point_cloud.points[
srcPrimitive.allPrimitivePointsNeighboursDependinces[
srcPrimitive.allPrimitivePointsNeighboursDependinces[
i].nextPointID].nextPointID]) - \
gama * number_of_neighbors(temp_neg_dir,
objectROI.point_cloud, search_radius)
primitive_points_list[i].functional_energy = functional_energy_NEG
primitive_points_list[i].index = i
primitive_points_list[i].x = temp_neg_dir[0]
primitive_points_list[i].y = temp_neg_dir[1]
primitive_points_list[i].z = temp_neg_dir[2]
primitive_points_list[i].nOfNeighbors = number_of_neighbors(temp_neg_dir, objectROI.point_cloud, search_radius)
primitive_points_list[i].isModified = True
break
total_energy_temp += functional_energy
srcPrimitive.primitiveModelledVertices[i].isModified = True
if primitive_points_list[i].isModified:
count += 1
transform_neighbor_points_for(i,
[primitive_points_list[i].x,
primitive_points_list[i].y,
primitive_points_list[i].z],
srcPrimitive,
visualizer)
print('For iteration {0} - {1} points modified || Energy = {2}'.format(iteration, count, total_energy_temp))
TIME_FINISH = time.time()
print('*********************TIME*********************')
print('Time for {0} : {1}'.format(iteration, TIME_FINISH - TIME_START))
def transform_neighbor_points_for(original_point_index,
best_point_position,
primitive, visualizer):
from parameters import ModellingParameters
from io_primitive import PRIMITIVE_NEIGHBOR_POINT
neighbors = list()
affected_area = euclidian_distance_2points(primitive.point_cloud.points[original_point_index],
best_point_position) * ModellingParameters.CAR.MODELLING_AFFECTED_AREA_FACTOR
dx = primitive.point_cloud.points[original_point_index][0] - best_point_position[0]
dy = primitive.point_cloud.points[original_point_index][1] - best_point_position[1]
dz = primitive.point_cloud.points[original_point_index][2] - best_point_position[2]
kd_tree = geometry.KDTreeFlann()
if affected_area > 0:
kd_tree.set_geometry(primitive.point_cloud)
(neighbors_count, pointIdxRadiusSearch, pointRadiusSquareDistance) = kd_tree.search_radius_vector_3d(best_point_position, affected_area)
if neighbors_count > 0:
for i in range(0, len(pointIdxRadiusSearch)):
if primitive.point_cloud.points[original_point_index] is not primitive.point_cloud.points[pointIdxRadiusSearch[i]]:
tempdx = primitive.point_cloud.points[original_point_index][0] - primitive.point_cloud.points[pointIdxRadiusSearch[i]][0]
tempdy = primitive.point_cloud.points[original_point_index][1] - primitive.point_cloud.points[pointIdxRadiusSearch[i]][1]
tempdz = primitive.point_cloud.points[original_point_index][2] - primitive.point_cloud.points[pointIdxRadiusSearch[i]][2]
temp_dist = np.sqrt((tempdx ** 2) + (tempdy ** 2) + (tempdz ** 2))
tempNeighbor = PRIMITIVE_NEIGHBOR_POINT(
pointIdxRadiusSearch[i],
primitive.point_cloud.points[pointIdxRadiusSearch[i]],
temp_dist,
tempdx,
tempdy,
tempdz)
neighbors.append(tempNeighbor)
dist_max = max(neighbor.dist for neighbor in neighbors)
for neighbor in neighbors:
if neighbor.dist != 0:
neighbor.ptNeighborhood[0] = neighbor.ptNeighborhood[0] - dx * (1 - neighbor.dist / dist_max)
neighbor.ptNeighborhood[1] = neighbor.ptNeighborhood[1] - dy * (1 - neighbor.dist / dist_max)
neighbor.ptNeighborhood[2] = neighbor.ptNeighborhood[2] - dz * (1 - neighbor.dist / dist_max)
primitive.point_cloud.points[neighbor.position_in_primitive_vect] = neighbor.ptNeighborhood
primitive.primitiveModelledVertices[neighbor.position_in_primitive_vect].isModified = True
visualizer.update_geometry()
visualizer.poll_events()
visualizer.update_renderer()
primitive.primitiveModelledVertices[original_point_index].isModified = True
primitive.point_cloud.points[original_point_index] = best_point_position | {"/modelling_utils.py": ["/io_primitive.py", "/parameters.py"], "/io_utils.py": ["/path_utils.py"], "/transformations_utils.py": ["/io_observation.py", "/parameters.py"], "/io_primitive.py": ["/transformations_utils.py", "/io_utils.py", "/parameters.py", "/path_utils.py"], "/preprocess_kitti.py": ["/kitti_dataset.py"], "/train_GFP.py": ["/io_observation.py", "/io_primitive.py", "/io_utils.py", "/model.py", "/parameters.py", "/path_utils.py", "/transformations_utils.py"], "/io_observation.py": ["/transformations_utils.py", "/io_utils.py"], "/test.py": ["/io_observation.py", "/io_primitive.py", "/transformations_utils.py", "/path_utils.py", "/visualization_utils.py", "/modelling_utils.py"], "/model.py": ["/custom_layers.py"]} |
43,648 | RovisLab/GFPNet | refs/heads/master | /io_utils.py | import h5py
import numpy as np
from open3d.open3d import io, geometry
from path_utils import PATHS
def read_off_file(filename):
"""
Method used for reading off files, holding points representing the point cloud.
:param filename: [in] Path to .off file.
:param read_normals [in] Boolean flag to read normals or not
:return: [out] N x 3 array
"""
f = open(filename)
f.readline() # ignore the 'OFF' at the first line
f.readline() # ignore the second line
points = []
normals = []
while True:
new_line = f.readline()
x = new_line.split(' ')
if len(x) == 6:
if x[0] != '3' and new_line != '' and len(x) == 6:
P = np.array(x[0:3], dtype='float32')
N = np.array(x[3:6], dtype='float32')
points.append(P)
normals.append(N)
else:
f.close()
break
else:
if x != '\n' and x[0] != '3' and new_line != '':
A = np.array(x[0:3], dtype='float32')
points.append(A)
else:
f.close()
break
return points, normals
def load_h5(h5_filename):
f = h5py.File(h5_filename)
data = f['data'][:]
label = f['label'][:]
return (data, label)
def createH5_from(data_path, labels_path, files_format = 'off', h5_filename='train_fileh5'):
from pathlib import Path
FILE_FORMAT = files_format
# script to read all clouds and write in h5
hf = h5py.File(PATHS.NETWORK.root +'{0}.h5'.format(h5_filename), 'w')
train_x_clouds_path = Path(data_path).glob('*.{0}'.format(FILE_FORMAT))
train_x_labels_path = Path(labels_path).glob('*.{0}'.format(FILE_FORMAT))
point_cloud_points = []
labels = []
for cloud_path in train_x_clouds_path:
cloud_path = str(cloud_path)
cloud_points_with_normals = read_off_file(cloud_path)
point_cloud_points.append(cloud_points_with_normals)
for label_path in train_x_labels_path:
label_path = str(label_path)
label_points_with_normals = read_off_file(label_path)
labels.append(label_points_with_normals)
hf.create_dataset('data', data=point_cloud_points)
hf.create_dataset('label', data=labels)
hf.close()
print('{0} CREATED SUCCESSFULLY - can be found at : {1}.'.format(h5_filename, PATHS.NETWORK.root + h5_filename))
def createH5_GPF(train_X, train_Y, h5_filename='train_fileh5'):
hf = h5py.File(PATHS.NETWORK.root +'{0}.h5'.format(h5_filename), 'w')
hf.create_dataset('data', data=train_X)
hf.create_dataset('label', data=train_Y)
hf.close()
print('{0} CREATED SUCCESSFULLY - can be found at : {1}.'.format(h5_filename,PATHS.NETWORK.root + h5_filename)) | {"/modelling_utils.py": ["/io_primitive.py", "/parameters.py"], "/io_utils.py": ["/path_utils.py"], "/transformations_utils.py": ["/io_observation.py", "/parameters.py"], "/io_primitive.py": ["/transformations_utils.py", "/io_utils.py", "/parameters.py", "/path_utils.py"], "/preprocess_kitti.py": ["/kitti_dataset.py"], "/train_GFP.py": ["/io_observation.py", "/io_primitive.py", "/io_utils.py", "/model.py", "/parameters.py", "/path_utils.py", "/transformations_utils.py"], "/io_observation.py": ["/transformations_utils.py", "/io_utils.py"], "/test.py": ["/io_observation.py", "/io_primitive.py", "/transformations_utils.py", "/path_utils.py", "/visualization_utils.py", "/modelling_utils.py"], "/model.py": ["/custom_layers.py"]} |
43,649 | RovisLab/GFPNet | refs/heads/master | /path_utils.py | import os
from pathlib import Path
basepath = os.path.dirname(__file__)
DIRECTORY_PATH = os.path.abspath(os.path.join(basepath))
class PATHS(object):
'''
Constant variables representing the paths to the folders used in the code.
'''
class KITTI(object):
'''
All these paths, are related to the kitti dataset folder, downloaded from ki
@object_dir - path to kitti dataset directory.
@IMG_ROOT - path to .png or .img files that are found in the image_2 folder.
@PC_ROOT - path to velodyne pointclouds
@CALIB_ROOT - path to calibration data, found in the folder calib.
'''
basepath = os.path.dirname(__file__)
DIRECTORY_PATH = os.path.abspath(os.path.join(basepath, "Dataset" "\\kitti"))
IMG_ROOT = os.path.join(DIRECTORY_PATH, "image_2\\")
PC_ROOT = os.path.join(DIRECTORY_PATH, "velodyne\\")
CALIB_ROOT = os.path.join(DIRECTORY_PATH, "calib\\")
CAR_CLASS = 'Car'
PEDESTRIAN_CLASS = 'Pedestrian'
CYCLIST_CLASS = 'Cyclist'
TRUCK_CLASS = 'Truck'
class NETWORK(object):
root = os.path.join(DIRECTORY_PATH, "dataset", "\\network")
TRAIN_X = os.path.join(root, "train_x\\")
TRAIN_Y = os.path.join(root, "train_y\\")
class PATH_TO_PRIMITIVES(object):
root = os.path.join(DIRECTORY_PATH, "dataset", "\\primitives")
BOTTLE = os.path.join(root, "bottle\\")
GLASSES = os.path.join(root, "glasses\\")
HAT = os.path.join(root, "hat\\")
KNIFE = os.path.join(root, "knife\\")
MUG = os.path.join(root, "mug\\")
OTHER = os.path.join(root, "other\\")
SOFA = os.path.join(root, "sofa\\")
HUMAN = os.path.join(root, "human\\")
class CAR(object):
root = os.path.join(DIRECTORY_PATH, "dataset", "primitives", "car\\")
| {"/modelling_utils.py": ["/io_primitive.py", "/parameters.py"], "/io_utils.py": ["/path_utils.py"], "/transformations_utils.py": ["/io_observation.py", "/parameters.py"], "/io_primitive.py": ["/transformations_utils.py", "/io_utils.py", "/parameters.py", "/path_utils.py"], "/preprocess_kitti.py": ["/kitti_dataset.py"], "/train_GFP.py": ["/io_observation.py", "/io_primitive.py", "/io_utils.py", "/model.py", "/parameters.py", "/path_utils.py", "/transformations_utils.py"], "/io_observation.py": ["/transformations_utils.py", "/io_utils.py"], "/test.py": ["/io_observation.py", "/io_primitive.py", "/transformations_utils.py", "/path_utils.py", "/visualization_utils.py", "/modelling_utils.py"], "/model.py": ["/custom_layers.py"]} |
43,650 | RovisLab/GFPNet | refs/heads/master | /transformations_utils.py | import time
import numpy as np
from open3d.open3d import registration
from open3d.open3d.geometry import voxel_down_sample, estimate_normals, KDTreeSearchParamHybrid, KDTreeSearchParamKNN, PointCloud, KDTreeFlann
from open3d.open3d.registration import registration_ransac_based_on_feature_matching, \
TransformationEstimationPointToPoint, CorrespondenceCheckerBasedOnEdgeLength, CorrespondenceCheckerBasedOnDistance, \
RANSACConvergenceCriteria, compute_fpfh_feature, registration_fast_based_on_feature_matching, \
FastGlobalRegistrationOption, registration_icp, TransformationEstimationPointToPlane, ICPConvergenceCriteria
from io_observation import IoObservation
from parameters import ModellingParameters
def compute_centroid(points):
x = [point[0] for point in points]
y = [point[1] for point in points]
z = [point[2] for point in points]
length = len(points)
return [sum(x)/length, sum(y)/length, sum(z)/length]
def delaunay_triangulation(points_array):
import numpy as np
from scipy.spatial import Delaunay
from open3d.open3d import geometry, utility
u = [point[0] for point in points_array]
v = [point[1] for point in points_array]
tri = Delaunay(np.array([u,v]).T)
mesh = geometry.TriangleMesh()
mesh.vertices = utility.Vector3dVector(points_array)
mesh.triangles = utility.Vector3iVector(tri.simplices)
mesh.remove_duplicated_triangles()
mesh.remove_degenerate_triangles()
return mesh
def downsample_cloud_random(pointcloud, number_of_points):
'''
Method used for cloud downsampling by selecting random points.
:param point_cloud_in: [in] Input point cloud.
:param number_of_points: [in] Desired number of points.
:return: [out] Downsampled point cloud.
'''
cloud_size = len(pointcloud.points)
index = np.random.choice(cloud_size, number_of_points, replace=False)
for i in range(len(index)):
cloud_point = [pointcloud.points[index[i]][0],
pointcloud.points[index[i]][1],
pointcloud.points[index[i]][2]]
pointcloud.points.push_back(cloud_point)
return pointcloud
def preprocess_point_cloud(pcd, voxel_size):
'''
Method used for computing point cloud features.
@ The FPFH feature is a 33-dimensional vector that describes the local geometric property of a point.
:param pcd: [in] Open3D point cloud
:param voxel_size: [in] Treshold for the voxel size.
:return: [out] Downsampled cloud along with its FPFH features.
'''
print(":: Downsample with a voxel size %.3f." % voxel_size)
pcd_down = pcd#voxel_down_sample(pcd, voxel_size)
radius_normal = voxel_size * 1.5
print(":: Estimate normal with search radius %.3f." % radius_normal)
estimate_normals(pcd_down, KDTreeSearchParamHybrid(
radius=radius_normal, max_nn=30))
radius_feature = voxel_size * 5
print(":: Compute FPFH feature with search radius %.3f." % radius_feature)
pcd_fpfh = compute_fpfh_feature(pcd_down,
KDTreeSearchParamHybrid(radius=radius_feature, max_nn=100))
return pcd_down, pcd_fpfh
def downsample_points(points, number_of_points):
'''
Method used for cloud downsampling by selecting random points.
:param point_cloud_in: [in] Input point cloud.
:param number_of_points: [in] Desired number of points.
:return: [out] Downsampled point cloud.
'''
downsampled_points = []
if number_of_points < len(points):
index = np.random.choice(len(points), number_of_points, replace=False)
else:
index = np.random.choice(len(points), number_of_points, replace=True)
for i in range(len(index)):
point = [points[index[i]][0],
points[index[i]][0],
points[index[i]][0]]
downsampled_points.append(point)
return downsampled_points
def execute_global_registration(source, target, normals_radius, treshold):
"""
Method used for executing global registration of two clouds
:param source: source pointcloud
:param target: target pointcloud
:param normals_radius: radius for fast point feature histograms estimation
:param treshold: treshold value for ransac registration
:return: result - representing a registration result containing transformation matrix, inlier_rmse
"""
source_fpfh = compute_fpfh_feature(source, KDTreeSearchParamHybrid(radius=normals_radius, max_nn=100))
target_fpfh = compute_fpfh_feature(target, KDTreeSearchParamHybrid(radius=normals_radius, max_nn=100))
print(":: RANSAC registration on downsampled point clouds.")
result = registration_ransac_based_on_feature_matching(
source, target,
source_fpfh, target_fpfh,
treshold,
TransformationEstimationPointToPoint(False), 4,
[CorrespondenceCheckerBasedOnEdgeLength(0.9),
CorrespondenceCheckerBasedOnDistance(treshold)],
RANSACConvergenceCriteria(4000000, 1000))
return result
def execute_fast_global_registration(source_down, target_down,
source_fpfh, target_fpfh, treshold):
'''
Method used for fast point clouds registration.
:param source_down: [in] Source downsampled point cloud.
:param target_down: [in] Target downsampled point cluoud.
:param source_fpfh: [in] Source fpfh features
:param target_fpfh: [in] Target fpfh features
:param treshold: [in] treshold value for executing fast feature match registration representing max correspondance distance
:return: [out] Registration result
'''
print(":: Apply fast global registration with distance threshold %.3f" % treshold)
result = registration_fast_based_on_feature_matching(
source_down, target_down, source_fpfh, target_fpfh,
FastGlobalRegistrationOption(maximum_correspondence_distance=treshold))
return result
def refine_registration(source, target, treshold, transformation):
'''
Method used for ICP registration, after global registration has been completed.
:param source [in] source pointcloud
:param target [in] target pointcloud
:param treshold [in] The value by wich the search radius will be multiplied.
:param transformation [in] Initial transformation matrix.
:return [out] reg_p2p = ICP registration object, containing fitness, error, transformation matrix
'''
estimate_normals(source, KDTreeSearchParamKNN())
estimate_normals(target, KDTreeSearchParamKNN())
reg_p2p = registration_icp(source, target, treshold, transformation,
TransformationEstimationPointToPlane(),
ICPConvergenceCriteria(max_iteration=1))
return reg_p2p
def get_diagonal(pointcloud):
'''
Method used for computing coordinate points for the Axis Aligned Bounding Box AABB
:param cloud: [in] cloud for wich to calculate AABB
:return: [out] length of the diagonal
'''
min_point_AABB = pointcloud.get_min_bound()
max_point_AABB = pointcloud.get_max_bound()
return np.sqrt(np.sum((min_point_AABB[0] - max_point_AABB[0]) ** 2 +
(min_point_AABB[1] - max_point_AABB[1]) ** 2 +
(min_point_AABB[2] - max_point_AABB[2]) ** 2))
def upsample_cloud_kd_tree(pointcloud, number_of_points):
'''
Method used for cloud upsampling following the principle of adding a point in the middle of 2 closest neigbors.
:param point_cloud_in: [in] Input point cloud.
:param number_of_points: [in] Desired number of points of the point cloud.
:return: [out] Upsampled point cloud.
'''
cloud_size = len(pointcloud.points)
points_to_be_added_count = number_of_points - cloud_size
kdtree = KDTreeFlann()
kdtree.set_geometry(pointcloud)
closest_neighbor_index = 0
K = 5
# iterate through random selected indexes
for i in range(0, points_to_be_added_count):
index = np.random.choice(cloud_size, 1, replace=False)
distance = 1000
cloud_point = pointcloud.points[index[0]]
# find nearest neighbors
(count, pointIdxNKNSearch, pointNKNSquareDistance) = kdtree.search_knn_vector_3d(cloud_point, K)
if kdtree.nearestKSearch(cloud_point, K, pointIdxNKNSearch, pointNKNSquareDistance) > 0:
for j in range(len(pointIdxNKNSearch)):
if pointNKNSquareDistance[j] < distance and pointNKNSquareDistance[j] != 0:
distance = pointNKNSquareDistance[j]
closest_neighbor_index = j
# calculate distance between selected point and closest neighbor and add the point in between
neighbor_point = pointcloud.points[pointIdxNKNSearch[closest_neighbor_index]]
new_point = [(cloud_point[0] + (cloud_point[0] - neighbor_point[0])/2),
(cloud_point[0] + (cloud_point[0] - neighbor_point[0])/2),
(cloud_point[0] + (cloud_point[0] - neighbor_point[0])/2)]
pointcloud.points.push_back(new_point)
return pointcloud
def normalize_cloud(pointcloud):
min_point_AABB = pointcloud.get_min_bound()
pointcloud_points = translate_point_cloud(pointcloud.points, min_point_AABB, action='translate')
maxv = np.amax(pointcloud_points, axis=0)
translation_point = maxv
pointcloud_points = translate_point_cloud(pointcloud_points, translation_point, action='normalize')
return pointcloud_points
def translate_point_cloud(cloud_points_array, translationPoint_as_array, action):
'''
Method used for translating the point cloud by a translation point.
:param cloud: [in] Point cloud points to be translated
:param translationPoint: [in] Reference translation point
:return: [out] Translated point cloud
'''
cloud_size = len(cloud_points_array)
if action == 'translate':
for i in range(0, cloud_size):
cloud_points_array[i][0] -= translationPoint_as_array[0]
cloud_points_array[i][1] -= translationPoint_as_array[1]
cloud_points_array[i][2] -= translationPoint_as_array[2]
return cloud_points_array
elif action == 'normalize':
for i in range(0, cloud_size):
cloud_points_array[i][0] /= translationPoint_as_array[0]
cloud_points_array[i][1] /= translationPoint_as_array[1]
cloud_points_array[i][2] /= translationPoint_as_array[2]
return cloud_points_array
def scale_pointcloud(pointcloud, scale):
'''
Method used for scaling the primitive point cloud relative to the scene cloud.
:param primitive: [in] primitive point cloud
:param object: [in] selected object point cloud from the scene
:return: [out] referenced pointcloud from the primitive object
'''
cloud_size = len(pointcloud.points)
for i in range(0, cloud_size):
pointcloud.points[i][0] *= scale
pointcloud.points[i][1] *= scale
pointcloud.points[i][2] *= scale
return pointcloud
def calculate_distance_from(points, origin):
'''
Calculates the distance between point and system origin.
:param point: [in] point
:return: [out] distance
'''
dist_vector = [(points[0] - origin[0]),
(points[1] - origin[1]),
(points[2] - origin[2])]
return dist_vector
def normalize_points(points, norm_point):
''' Normalization of points related to a specific point given as parameter
:param cloud: [in] Point cloud points to be normalized
:param norm_point: [in] Reference normalization point
:return: [out] normalized points
'''
cloud_size = len(points)
for i in range(0, cloud_size):
points[i][0] /= norm_point[0]
points[i][1] /= norm_point[1]
points[i][2] /= norm_point[2]
return points
def translate_points(points, translationPoint_as_array):
'''
Method used for translating the point cloud by a translation point.
:param cloud: [in] Point cloud points to be translated
:param translationPoint: [in] Reference translation point
:return: [out] Translated point cloud
'''
cloud_size = len(points)
for i in range(0, cloud_size):
points[i][0] -= translationPoint_as_array[0]
points[i][1] -= translationPoint_as_array[1]
points[i][2] -= translationPoint_as_array[2]
return points
def scale_points(points, scale):
'''
Method used for scaling a point cloud by a treshold scale value.
:param cloud: [in] Point cloud
:param scale: [in] Scale by wich to transform the point cloud.
:return: [out] Scaled point cloud
'''
for i in range(0, len(points)):
points[i][0] *= scale
points[i][1] *= scale
points[i][2] *= scale
return points
def icp_align_clouds(source, target, threshold, show_on_visualizer=False, max_iterations=50):
from open3d.open3d import registration, visualization
result = execute_global_registration(source.point_cloud, target.point_cloud, normals_radius=threshold*10, treshold=threshold)
estimate_normals(cloud=source.point_cloud, search_param=KDTreeSearchParamHybrid(threshold, 30))
estimate_normals(cloud=target.point_cloud, search_param=KDTreeSearchParamHybrid(threshold, 30))
if show_on_visualizer:
vis = visualization.Visualizer()
vis.create_window("ICP ALIGNMENT", 800, 600)
vis.add_geometry(source.point_cloud)
vis.add_geometry(target.point_cloud)
source.point_cloud.transform(result.transformation)
for i in range(max_iterations):
reg_p2l = registration_icp(source.point_cloud, target.point_cloud, threshold,
np.identity(4), TransformationEstimationPointToPoint(),
ICPConvergenceCriteria(max_iteration=1))
trans_matrix_z = [[reg_p2l.transformation[0][0], reg_p2l.transformation[0][1], 0.0, reg_p2l.transformation[0][3]],
[reg_p2l.transformation[1][0], reg_p2l.transformation[1][1], 0.0, reg_p2l.transformation[1][3]],
[0.0, 0.0, 1, reg_p2l.transformation[2][3]],
[0.0, 0.0, 0.0, 1]]
source.point_cloud.transform(trans_matrix_z)
vis.update_geometry()
vis.poll_events()
vis.update_renderer()
vis.run()
vis.destroy_window()
else:
source.point_cloud.transform(result.transformation)
for i in range(max_iterations):
reg_p2l = registration_icp(source.point_cloud, target.point_cloud, threshold,
np.identity(4), TransformationEstimationPointToPoint(),
ICPConvergenceCriteria(max_iteration=1))
source.point_cloud.transform(reg_p2l.transformation)
return source.point_cloud, target.point_cloud
def compute_point_cloud_control_points(srcPrimitive):
'''
Method used for computing the point cloud control points.
It rotates the pointcloud and searches for points at sharp edges and corners.
:param srcPrimitive: [in] Object holding the primitive point cloud.
:return: [out] List of indexes of the control points inside the point cloud.
'''
indexes_of_control_points = []
for x in range(0, 360, 120):
for y in range(0, 360, 120):
for z in range(0, 360, 120):
rotation_point = [x, y, z]
srcPrimitive.point_cloud.rotate(rotation_point)
hPointIndex = 0
lPointIndex = 0
for e in range(0, len(srcPrimitive.point_cloud.points)):
if srcPrimitive.point_cloud.points[e].y > srcPrimitive.point_cloud.points[hPointIndex].y:
hPointIndex = e
indexes_of_control_points.append(hPointIndex)
elif srcPrimitive.point_cloud.points[e].y < srcPrimitive.point_cloud.points[lPointIndex].y:
lPointIndex = e
indexes_of_control_points.append(lPointIndex)
indexes_of_control_points = list(set(indexes_of_control_points))
return indexes_of_control_points
def get_sample_for_direction(prim_point_idx, object_cloud, primitive_object, visualization=None, usePrimitiveNNPoints = False):
from common.data_processing.kitti_IO_utils import MODELLING_PARAMS
from common.utils.transformations.geometric_transformations import calculate_distance_from
primitive_point = primitive_object.point_cloud.xyz[prim_point_idx]
sample_X = []
step_counter = 0
prim_aux = np.copy(primitive_point)
sample_X_nn_obj = []
sample_X_nn_prim = []
radius_search = MODELLING_PARAMS.CAR.RADIUS_SEARCH
while (step_counter < MODELLING_PARAMS.CAR.STEPS):
prim_aux += primitive_object.normals.normals[prim_point_idx] * MODELLING_PARAMS.CAR.STEP_SIZE
# Get current primitive points lidar NN
nn_indexes_object_cloud = get_nn_indexes(prim_aux,
object_cloud,
radius_search)
# Get current primitive points primitive NN
nn_indexes_primitive_cloud = get_nn_indexes(prim_aux,
primitive_object,
radius_search)
for index in nn_indexes_object_cloud:
point = object_cloud.point_cloud.xyz[index]
normal = object_cloud.normals.normals[index]
sample_X_nn_obj.append((point, normal))
cloud_points = [pair[0] for pair in sample_X_nn_obj]
cloud_normals = [pair[1] for pair in sample_X_nn_obj]
# ***
#visualize_samples(cloud_points, cloud_normals, normalize=True, r=0, g=255, b=0)
if usePrimitiveNNPoints:
for index in nn_indexes_primitive_cloud:
point = primitive_object.point_cloud.xyz[index]
normal = primitive_object.normals.normals[index]
sample_X_nn_prim.append((point, normal))
step_counter+=1
cloud_points = [pair[0] for pair in sample_X_nn_prim]
cloud_normals = [pair[1] for pair in sample_X_nn_prim]
# ***
#visualize_samples(cloud_points, cloud_normals, normalize=True, r=255, g=0, b=0)
sample = sample_X_nn_prim + sample_X_nn_obj
from common.visualization.visualizer_parameters import visualize_samples
if len(cloud_points) > 0:
concat = np.concatenate((cloud_points, cloud_normals), axis=1)
no_duplicates_points = unique_rows(concat)
sample_X.append(no_duplicates_points)
step_counter = 0
prim_aux = np.copy(primitive_point)
sample_X_nn_obj = []
sample_X_nn_prim = []
while (step_counter < MODELLING_PARAMS.CAR.STEPS):
prim_aux -= primitive_object.normals.normals[prim_point_idx] * MODELLING_PARAMS.CAR.STEP_SIZE
# Get current primitive points lidar NN
nn_indexes_object_cloud = get_nn_indexes(prim_aux,
object_cloud,
radius_search)
# Get current primitive points primitive NN
nn_indexes_primitive_cloud = get_nn_indexes(prim_aux,
primitive_object,
radius_search)
for index in nn_indexes_object_cloud:
point = object_cloud.point_cloud.xyz[index]
normal = object_cloud.normals.normals[index]
sample_X_nn_obj.append((point, normal))
cloud_points = [pair[0] for pair in sample_X_nn_obj]
cloud_normals = [pair[1] for pair in sample_X_nn_obj]
# ***
#visualize_samples(cloud_points, cloud_normals, normalize=True, r=0, g=255, b=0)
if usePrimitiveNNPoints:
for index in nn_indexes_primitive_cloud:
point = primitive_object.point_cloud.xyz[index]
normal = primitive_object.normals.normals[index]
sample_X_nn_prim.append((point, normal))
step_counter += 1
cloud_points = [pair[0] for pair in sample_X_nn_prim]
cloud_normals = [pair[1] for pair in sample_X_nn_prim]
# ***
#visualize_samples(cloud_points, cloud_normals, normalize=True, r=255, g=0, b=0)
sample = sample_X_nn_prim + sample_X_nn_obj
cloud_points = [pair[0] for pair in sample]
cloud_normals = [pair[1] for pair in sample]
if len(cloud_points) > 0:
concat = np.concatenate((cloud_points, cloud_normals), axis=1)
no_duplicates_points = unique_rows(concat)
sample_X.append(no_duplicates_points)
return sample_X
def unique_rows(a):
a = np.ascontiguousarray(a)
unique_a = np.unique(a.view([('', a.dtype)]*a.shape[1]))
return unique_a.view(a.dtype).reshape((unique_a.shape[0], a.shape[1]))
def make_samples(object_cloud, primitive_object, primitive_modelled_object, usePrimitivePoints=False, generate_for = 'train'):
sample_X = []
sample_Y = []
primitive_norm_point = []
primitive_norm_normal = []
wasVisited = [False for i in range(0, len(primitive_object.point_cloud.points))]
for i in range(0, primitive_object.cloud_size):
primitive_point_before = primitive_object.point_cloud.xyz[i]
primitive_point_after = primitive_modelled_object.point_cloud.xyz[i]
primitive_norm_normal = primitive_object.normals.normals[i]
translation_point = np.asarray(calculate_distance_from(primitive_point_before, ModellingParameters.NORMALIZATION_CENTER))
primitive_point_after *= ModellingParameters.CAR.SCALE
primitive_norm_point = np.array([primitive_point_after[0] - translation_point[0],
primitive_point_after[1] - translation_point[1],
primitive_point_after[2] - translation_point[2]])
object_cloud_currentNNs = get_sample_for_direction(i, object_cloud, primitive_object, visualization=None, usePrimitiveNNPoints=usePrimitivePoints)
object_cloud_currentNNs = [point * ModellingParameters.CAR.SCALE for point in object_cloud_currentNNs]
primitive_point_beforex = np.array([primitive_point_before[0] - translation_point[0],
primitive_point_before[1] - translation_point[1],
primitive_point_before[2] - translation_point[2]])
if len(object_cloud_currentNNs) > 0:
object_cloud_currentNNs = object_cloud_currentNNs[0]
object_cloud_currentNNS_translated = translate_point_cloud(object_cloud_currentNNs,
translation_point,
action="translate")
wasVisited[i] = True
sample_X.append(object_cloud_currentNNS_translated)
sample_Y.append([primitive_norm_point[0], primitive_norm_point[1], primitive_norm_point[2]])
if generate_for.lower() == 'train':
return sample_X, sample_Y, primitive_point_beforex, primitive_norm_point
elif generate_for.lower() == 'test':
return sample_X, wasVisited
def upsample_point_set(sample_collection, points_count):
"""
Used for upsampling the samples from the primitive to a certain number of points
:param sample_collection: list of points
:param points_count: number of points to upsample to
:return:
"""
train_X_upsampled = []
for points_sample in sample_collection:
io_obs_temp = IoObservation(points_sample)
io_obs_temp.upsample_cloud_to(points_count)
train_X_upsampled.append(np.asarray(io_obs_temp.point_cloud.points))
return train_X_upsampled
| {"/modelling_utils.py": ["/io_primitive.py", "/parameters.py"], "/io_utils.py": ["/path_utils.py"], "/transformations_utils.py": ["/io_observation.py", "/parameters.py"], "/io_primitive.py": ["/transformations_utils.py", "/io_utils.py", "/parameters.py", "/path_utils.py"], "/preprocess_kitti.py": ["/kitti_dataset.py"], "/train_GFP.py": ["/io_observation.py", "/io_primitive.py", "/io_utils.py", "/model.py", "/parameters.py", "/path_utils.py", "/transformations_utils.py"], "/io_observation.py": ["/transformations_utils.py", "/io_utils.py"], "/test.py": ["/io_observation.py", "/io_primitive.py", "/transformations_utils.py", "/path_utils.py", "/visualization_utils.py", "/modelling_utils.py"], "/model.py": ["/custom_layers.py"]} |
43,651 | RovisLab/GFPNet | refs/heads/master | /io_primitive.py | from open3d import *
from transformations_utils import get_diagonal, delaunay_triangulation, upsample_cloud_kd_tree, downsample_cloud_random, normalize_cloud
from io_utils import read_off_file
from parameters import ModellingParameters
from path_utils import PATHS
# Class used for holding data of primitive neighbor point, when calculating it's functional and external energy.
class PPoint:
def __init__(self, idx, isModified, x, y, z, f_eng, neighborsCount, isControlPoint):
'''
Class used for holding a primitive point's properties
:param idx: point index
:param isModified: flag TRUE if the point whas modelled using active contours
:param x: x coordinate
:param y: y coordinate
:param z: z coordinate
:param f_eng: functional energy
:param neighborsCount: number of neighbors
:param isControlPoint: flag to show if it's a control point
'''
self.index = idx
self.isModified = isModified
self.x = x
self.y = y
self.z = z
self.functional_energy = f_eng
self.nOfNeighbors = neighborsCount
self.isControlPoint = isControlPoint
def is_modified(self):
return self.isModified
def get_functional_energy(self):
return self.functional_energy
class PRIMITIVE_NEIGHBOR_POINT():
def __init__(self, pos_in_primitive_vect, ptNeighborhood, dist,
dx,dy,dz):
self.position_in_primitive_vect = pos_in_primitive_vect
self.ptNeighborhood = ptNeighborhood
self.dist = dist
self.dx = dx
self.dy = dy
self.dz = dz
# Used for active contours modelling
class NEXT_PREV_POINT_DEP:
def __init__(self,
ptNextPoint,
ptPreviousPoint,
nextPointID,
prevPointID):
self.ptNextPoint = ptNextPoint
self.ptPreviousPoint = ptPreviousPoint
self.nextPointID = nextPointID
self.prevPointID = prevPointID
class IoPrimitive:
def __init__(self, path_to_primitive):
self.point_cloud = geometry.PointCloud()
self.aux_cloud = geometry.PointCloud()
self.primitiveModelledVertices = []
self.allPrimitivePointsNeighboursDependinces = list()
self.path = path_to_primitive
self.filename = str(path_to_primitive).rsplit("\\")[-1:][0].split(".")[0]
self.cloud_down = geometry.PointCloud()
self.cloud_fpfh = registration.RegistrationResult()
self.mesh = geometry.TriangleMesh()
self.mesh_lines = geometry.LineSet()
self.control_points_idx = []
self.primitive_center = [.0, .0, .0]
self.cloud_size = 0
self.height = 0
self.width = 0
self.scale = 0
self.count = 0
self.RADIUS_SEARCH = 0
self.STEPS = 0
self.STEP_SIZE = 0
self.NORMALS_RADIUS = 0
def get_scale(self):
return get_diagonal(self.point_cloud)
def scale_relative_to(self, dest_cloud):
self.scale = self.get_scale()
dest_cloud_scale = get_diagonal(dest_cloud)
scale_ratio = dest_cloud_scale / self.scale * 1.1
self.scale_with_factor(scale_ratio)
def reset_points(self):
self.point_cloud.points = Vector3dVector(self.aux_cloud.points)
def scale_with_factor(self, factor):
cloud_size = len(self.point_cloud.points)
for i in range(0, cloud_size):
self.point_cloud.points[i][0] *= factor
self.point_cloud.points[i][1] *= factor
self.point_cloud.points[i][2] *= factor
return self.point_cloud
def align_primitive_on_z_axis(self, target_cloud):
rotz = [[np.cos(0.05), -np.sin(0.05), 0.0, 0.0],
[np.sin(0.05), np.cos(0.05), 0.0, 0.0],
[0.0, 0.0, 1.0, 0.0],
[0.0, 0.0, 0.0, 1.0]]
eval_total = 999
for alpha in range(0, 360):
transformation_matrix = [[np.cos(np.deg2rad(1)), -np.sin(np.deg2rad(1)), 0.0, 0.0],
[np.sin(np.deg2rad(1)), np.cos(np.deg2rad(1)), 0.0, 0.0],
[0.0, 0.0, 1.0, 0.0],
[0.0, 0.0, 0.0, 1.0]]
self.point_cloud.transform(transformation_matrix)
eval = registration.evaluate_registration(self.point_cloud,
target_cloud, 1,
transformation_matrix)
from decimal import Decimal
if Decimal(eval.inlier_rmse) < Decimal(eval_total):
eval_total = eval.inlier_rmse
transformation_matrix = \
[[np.cos(np.deg2rad(alpha)), -np.sin(np.deg2rad(alpha)), 0.0, 0.0],
[np.sin(np.deg2rad(alpha)), np.cos(np.deg2rad(alpha)), 0.0, 0.0],
[0.0, 0.0, 1.0, 0.0],
[0.0, 0.0, 0.0, 1.0]]
rot_angle_deg = alpha
print(alpha)
self.point_cloud.transform(
[[np.cos(np.deg2rad(rot_angle_deg)), -np.sin(np.deg2rad(rot_angle_deg)), 0.0, 0.0],
[np.sin(np.deg2rad(rot_angle_deg)), np.cos(np.deg2rad(rot_angle_deg)), 0.0, 0.0],
[0.0, 0.0, 1.0, 0.0],
[0.0, 0.0, 0.0, 1.0]])
def compute_normals(self, max_nn):
"""
Computes pointcloud normals using the open3d library functionality.
:param radius: [in] Radius in within to search for.
:return:
"""
estimate_normals(self.point_cloud, KDTreeSearchParamHybrid(radius=self.NORMALS_RADIUS, max_nn=max_nn))
def load_mesh(self, compute_vertex_normals=False):
if compute_vertex_normals:
self.mesh = io.read_triangle_mesh(self.path)
self.mesh.compute_vertex_normals()
self.mesh.compute_triangle_normals()
else:
self.mesh = io.read_triangle_mesh(self.path)
self.point_cloud.points = Vector3dVector(self.mesh.vertices)
def get_delaynay_mesh(self, compute_vertex_normals=False):
if compute_vertex_normals:
self.mesh = delaunay_triangulation(self.point_cloud.points)
self.mesh.compute_vertex_normals()
self.mesh.compute_triangle_normals()
else:
self.mesh = delaunay_triangulation(self.point_cloud.points)
def smoothen_mesh(self, method=None, iterations = 1):
if method.lower() =='simple':
self.mesh.filter_smooth_simple(iterations)
elif method.lower() == 'laplacian':
self.mesh.filter_smooth_laplacian(iterations)
elif method.lower() == 'taubin':
self.mesh.filter_smooth_taubin(iterations)
elif method.lower() == 'sharpen':
self.mesh.filter_sharpen(iterations)
def get_mesh_lines(self, lines_color=[0, 0, 0]):
mesh = self.mesh
triangles_list = np.asarray(mesh.triangles)
points_list = [[[mesh.vertices[idx[0]][0], mesh.vertices[idx[0]][1], mesh.vertices[idx[0]][2]],
[mesh.vertices[idx[1]][0], mesh.vertices[idx[1]][1], mesh.vertices[idx[1]][2]],
[mesh.vertices[idx[2]][0], mesh.vertices[idx[2]][1], mesh.vertices[idx[2]][2]]] for idx in
triangles_list]
n = 0
triangle_points = []
triangle_lines = []
for tri_point_set in points_list:
triangle_points.extend(tri_point_set)
lines = [[n, n + 1],
[n + 1, n + 2],
[n + 2, n]]
triangle_lines.extend(lines)
n += 3
triangles_lines_set = geometry.LineSet()
triangles_lines_set.points = utility.Vector3dVector(triangle_points)
triangles_lines_set.lines = utility.Vector2iVector(triangle_lines)
triangles_lines_set.colors = utility.Vector3dVector([lines_color for i in range(len(triangle_lines))])
self.mesh_lines = triangles_lines_set
def load_primitive(self, normalize=False):
""" Method used for reading the off file that holds the pcd points and populate the point_cloud and normal points inside this object."""
try:
file_extension = (str(self.path).rsplit("\\", 1)[1]).rsplit(".", 1)[1]
if file_extension.lower() == "off":
if normalize:
points_array, normals_array = read_off_file(self.path)
normalized_points = normalize_cloud(points_array)
self.point_cloud.points = Vector3dVector(normalized_points)
else:
points_array, normals_array = read_off_file(self.path)
self.point_cloud.points = Vector3dVector(points_array)
elif file_extension == 'pcd':
io.read_point_cloud(self.path, self.point_cloud)
elif file_extension == 'ply':
self.mesh = io.read_triangle_mesh(self.path)
assert len(self.mesh.vertices) > 0
self.point_cloud.points = Vector3dVector(self.mesh.vertices)
self.load_primitive_control_points(set_all=True)
self.cloud_size = len(self.point_cloud.points)
self.aux_cloud.points = Vector3dVector(np.copy(self.point_cloud.points))
self.RADIUS_SEARCH = ModellingParameters.CAR.RADIUS_SEARCH
self.STEPS = ModellingParameters.CAR.STEPS
self.STEP_SIZE = ModellingParameters.CAR.STEP_SIZE
self.NORMALS_RADIUS = self.STEPS * self.RADIUS_SEARCH * 15
except Exception as e:
print('Exception at reading off file.', e)
def load_primitive_control_points(self, set_all = False):
if set_all:
for i in range(0, len(self.point_cloud.points)):
pt = PPoint(i, False,
self.point_cloud.points[i][0],
self.point_cloud.points[i][1],
self.point_cloud.points[i][2],
None, 0, True)
self.primitiveModelledVertices.append(pt)
else:
final_path = PATHS.PATH_TO_PRIMITIVES.CAR.root + self.filename + "_cp.txt" # cp stands for control points
file = open(final_path, "r")
assert file is not None
try:
lines = file.read().split(" ")
for i in range(0, len(self.point_cloud.points)):
pt = PPoint(i, False,
self.point_cloud.points[i][0],
self.point_cloud.points[i][1],
self.point_cloud.points[i][2],
None, 0, True)
self.primitiveModelledVertices.append(pt)
for index in lines:
self.control_points_idx.append(index)
if str(i) in set(self.control_points_idx):
pt.isControlPoint = True
except:
print('File may not exist!')
def upsample_cloud_to(self, number_of_points):
"""
Method used for upsampling the cloud to a certain number of points.
:param number_of_points: [in] Desired number of points
:return: [out] Sets the cloud inside this object
"""
if len(self.point_cloud.points) < number_of_points:
upsample_cloud_kd_tree(self.point_cloud, number_of_points)
else:
self.point_cloud = downsample_cloud_random(self.point_cloud, number_of_points)
| {"/modelling_utils.py": ["/io_primitive.py", "/parameters.py"], "/io_utils.py": ["/path_utils.py"], "/transformations_utils.py": ["/io_observation.py", "/parameters.py"], "/io_primitive.py": ["/transformations_utils.py", "/io_utils.py", "/parameters.py", "/path_utils.py"], "/preprocess_kitti.py": ["/kitti_dataset.py"], "/train_GFP.py": ["/io_observation.py", "/io_primitive.py", "/io_utils.py", "/model.py", "/parameters.py", "/path_utils.py", "/transformations_utils.py"], "/io_observation.py": ["/transformations_utils.py", "/io_utils.py"], "/test.py": ["/io_observation.py", "/io_primitive.py", "/transformations_utils.py", "/path_utils.py", "/visualization_utils.py", "/modelling_utils.py"], "/model.py": ["/custom_layers.py"]} |
43,652 | RovisLab/GFPNet | refs/heads/master | /preprocess_kitti.py | from kitti_utils import align_img_and_pc, load_calib, load_velodyne_points, project_velo_points_in_img, prepare_velo_points
from kitti_dataset import KITTIPaths
IMG_ROOT = KITTIPaths.IMG_ROOT
PC_ROOT = KITTIPaths.PC_ROOT
CALIB_ROOT = KITTIPaths.CALIB_ROOT
for frame in range(0, 7481):
img_dir = IMG_ROOT + '%06d.png' % frame
pc_dir = PC_ROOT + '%06d.bin' % frame
calib_dir = CALIB_ROOT + '%06d.txt' % frame
points = align_img_and_pc(img_dir, pc_dir, calib_dir)
output_name = PC_ROOT + frame + '.bin'
points[:, :4].astype('float32').tofile(output_name)
| {"/modelling_utils.py": ["/io_primitive.py", "/parameters.py"], "/io_utils.py": ["/path_utils.py"], "/transformations_utils.py": ["/io_observation.py", "/parameters.py"], "/io_primitive.py": ["/transformations_utils.py", "/io_utils.py", "/parameters.py", "/path_utils.py"], "/preprocess_kitti.py": ["/kitti_dataset.py"], "/train_GFP.py": ["/io_observation.py", "/io_primitive.py", "/io_utils.py", "/model.py", "/parameters.py", "/path_utils.py", "/transformations_utils.py"], "/io_observation.py": ["/transformations_utils.py", "/io_utils.py"], "/test.py": ["/io_observation.py", "/io_primitive.py", "/transformations_utils.py", "/path_utils.py", "/visualization_utils.py", "/modelling_utils.py"], "/model.py": ["/custom_layers.py"]} |
43,653 | RovisLab/GFPNet | refs/heads/master | /custom_layers.py | import tensorflow as tf
from keras.backend import backend as K
from keras.engine.base_layer import Layer
class _MergeCustom(Layer):
"""Generic merge layer for elementwise merge functions.
Used to implement `Sum`, `Average`, etc.
# Arguments
**kwargs: standard layer keyword arguments.
"""
def __init__(self, **kwargs):
super(_MergeCustom, self).__init__(**kwargs)
self.supports_masking = True
def _merge_function(self, inputs):
raise NotImplementedError
def _compute_elemwise_op_output_shape(self, shape1, shape2):
"""Computes the shape of the resultant of an elementwise operation.
# Arguments
shape1: tuple or None. Shape of the first tensor
shape2: tuple or None. Shape of the second tensor
# Returns
expected output shape is
carried out on 2 tensors with shapes shape1 and shape2.
tuple or None.
# Raises
ValueError: if shape1 and shape2 are not compatible for
multiplication operations.
"""
if None in [shape1, shape2]:
return None
elif len(shape1) < len(shape2):
return self._compute_elemwise_op_output_shape(shape2, shape1)
elif not shape2:
return shape1
output_shape = list(shape1[:-len(shape2)])
for i, j in zip(shape1[-len(shape2):], shape2):
if i is None or j is None:
output_shape.append(None)
elif i == 1:
output_shape.append(j)
elif j == 1:
output_shape.append(i)
else:
output_shape.append(i)
return tuple(output_shape)
def build(self, input_shape):
# Used purely for shape validation.
if not isinstance(input_shape, list):
raise ValueError('A merge layer should be called '
'on a list of inputs.')
if len(input_shape) < 2:
raise ValueError('A merge layer should be called '
'on a list of at least 2 inputs. '
'Got ' + str(len(input_shape)) + ' inputs.')
batch_sizes = [s[0] for s in input_shape if s is not None]
batch_sizes = set(batch_sizes)
batch_sizes -= set([None])
if len(batch_sizes) > 1:
raise ValueError('Can not merge tensors with different '
'batch sizes. Got tensors with shapes : ' +
str(input_shape))
if input_shape[0] is None:
output_shape = None
else:
output_shape = input_shape[0][1:]
for i in range(1, len(input_shape)):
if input_shape[i] is None:
shape = None
else:
shape = input_shape[i][1:]
output_shape = self._compute_elemwise_op_output_shape(output_shape,
shape)
# If the inputs have different ranks, we have to reshape them
# to make them broadcastable.
if None not in input_shape and len(set(map(len, input_shape))) == 1:
self._reshape_required = False
else:
self._reshape_required = True
def call(self, inputs):
if not isinstance(inputs, list):
raise ValueError('A merge layer should be called '
'on a list of inputs.')
if self._reshape_required:
reshaped_inputs = []
input_ndims = list(map(K.ndim, inputs))
if None not in input_ndims:
# If ranks of all inputs are available,
# we simply expand each of them at axis=1
# until all of them have the same rank.
max_ndim = max(input_ndims)
for x in inputs:
x_ndim = K.ndim(x)
for _ in range(max_ndim - x_ndim):
x = K.expand_dims(x, 1)
reshaped_inputs.append(x)
return self._merge_function(reshaped_inputs)
else:
# Transpose all inputs so that batch size is the last dimension.
# (batch_size, dim1, dim2, ... ) -> (dim1, dim2, ... , batch_size)
transposed = False
for x in inputs:
x_ndim = K.ndim(x)
if x_ndim is None:
x_shape = K.shape(x)
batch_size = x_shape[0]
new_shape = K.concatenate([x_shape[1:],
K.expand_dims(batch_size)])
x_transposed = K.reshape(x, K.stack([batch_size,
K.prod(x_shape[1:])]))
x_transposed = K.permute_dimensions(x_transposed, (1, 0))
x_transposed = K.reshape(x_transposed, new_shape)
reshaped_inputs.append(x_transposed)
transposed = True
elif x_ndim > 1:
dims = list(range(1, x_ndim)) + [0]
reshaped_inputs.append(K.permute_dimensions(x, dims))
transposed = True
else:
# We don't transpose inputs if they are
# 1D vectors or scalars.
reshaped_inputs.append(x)
y = self._merge_function(reshaped_inputs)
y_ndim = K.ndim(y)
if transposed:
# If inputs have been transposed,
# we have to transpose the output too.
if y_ndim is None:
y_shape = K.shape(y)
y_ndim = K.shape(y_shape)[0]
batch_size = y_shape[y_ndim - 1]
new_shape = K.concatenate([K.expand_dims(batch_size),
y_shape[:y_ndim - 1]])
y = K.reshape(y, (-1, batch_size))
y = K.permute_dimensions(y, (1, 0))
y = K.reshape(y, new_shape)
elif y_ndim > 1:
dims = [y_ndim - 1] + list(range(y_ndim - 1))
y = K.permute_dimensions(y, dims)
return y
else:
return self._merge_function(inputs)
def compute_output_shape(self, input_shape):
if input_shape[0] is None:
output_shape = None
else:
output_shape = input_shape[0][1:]
for i in range(1, len(input_shape)):
if input_shape[i] is None:
shape = None
else:
shape = input_shape[i][1:]
output_shape = self._compute_elemwise_op_output_shape(output_shape,
shape)
batch_sizes = [s[0] for s in input_shape if s is not None]
batch_sizes = set(batch_sizes)
batch_sizes -= set([None])
if len(batch_sizes) == 1:
output_shape = (list(batch_sizes)[0],) + output_shape
else:
output_shape = (None,) + output_shape
return output_shape
def compute_mask(self, inputs, mask=None):
if mask is None:
return None
if not isinstance(mask, list):
raise ValueError('`mask` should be a list.')
if not isinstance(inputs, list):
raise ValueError('`inputs` should be a list.')
if len(mask) != len(inputs):
raise ValueError('The lists `inputs` and `mask` '
'should have the same length.')
if all([m is None for m in mask]):
return None
masks = [K.expand_dims(m, 0) for m in mask if m is not None]
return K.all(K.concatenate(masks, axis=0), axis=0, keepdims=False)
class MatMul_layer(_MergeCustom):
"""Layer that does matrix multiplication like operation on tensors
It takes as input a list of tensors,
can be of different shapes, and returns
a single tensor (also of the same shape).
Needs arguments of shape (n,m) and (m,p) so that resulting matrix is (n,p) [ usual math ].
"""
def build(self, input_shape):
super(MatMul_layer, self).build(input_shape)
if len(input_shape) != 2:
raise ValueError('matMul_layer does not currently implement multiplication of more than 2 layers')
def _merge_function(self, inputs):
# ToDo implement multiplication of more than 2 tensors
if len(inputs) != 2:
raise ValueError('matMul_layer does not currently implement multiplication of more than 2 layers')
return tf.matmul(inputs[0], inputs[1])
def get_config(self):
config = {}
base_config = super(_MergeCustom, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
def matMul_layer(inputs, **kwargs):
"""Functional interface to the `MatMul_layer` layer.
# Arguments
inputs: A list of input tensors (at least 2).
**kwargs: Standard layer keyword arguments.
# Returns
A tensor, the matrix like multiplication of the inputs.
"""
return MatMul_layer(**kwargs)(inputs) | {"/modelling_utils.py": ["/io_primitive.py", "/parameters.py"], "/io_utils.py": ["/path_utils.py"], "/transformations_utils.py": ["/io_observation.py", "/parameters.py"], "/io_primitive.py": ["/transformations_utils.py", "/io_utils.py", "/parameters.py", "/path_utils.py"], "/preprocess_kitti.py": ["/kitti_dataset.py"], "/train_GFP.py": ["/io_observation.py", "/io_primitive.py", "/io_utils.py", "/model.py", "/parameters.py", "/path_utils.py", "/transformations_utils.py"], "/io_observation.py": ["/transformations_utils.py", "/io_utils.py"], "/test.py": ["/io_observation.py", "/io_primitive.py", "/transformations_utils.py", "/path_utils.py", "/visualization_utils.py", "/modelling_utils.py"], "/model.py": ["/custom_layers.py"]} |
43,654 | RovisLab/GFPNet | refs/heads/master | /visualization_utils.py | import numpy as np
from open3d.open3d import geometry, utility
def draw_bbox_open3d(cloud_points_array):
cloud = geometry.PointCloud()
cloud.points = utility.Vector3dVector(np.asarray(cloud_points_array))
minBound = cloud.get_min_bound()
maxBound = cloud.get_max_bound()
box_center = np.array([maxBound[0] - minBound[0], maxBound[1] - minBound[1], maxBound[2] - minBound[2]])
points = [[minBound[0], maxBound[1], minBound[2]], [maxBound[0], maxBound[1], minBound[2]],
[maxBound[0], minBound[1], minBound[2]], [minBound[0], minBound[1], minBound[2]],
[minBound[0], minBound[1], maxBound[2]], [maxBound[0], minBound[1], maxBound[2]],
[maxBound[0], maxBound[1], maxBound[2]], [minBound[0], maxBound[1], maxBound[2]],
[0, 0, 0], [box_center[0], box_center[1], box_center[2]]]
lines = [[0, 1], [1, 2], [2, 3], [0, 3],
[0, 7],
[1, 6],
[2, 5],
[3, 4],
[4, 5], [4, 7],
[5, 6], [6, 7]]
colors = [[0, 255, 0] for i in range(0, len(lines))]
line_set = geometry.LineSet()
line_set.points = utility.Vector3dVector(points)
line_set.lines = utility.Vector2iVector(lines)
line_set.colors = utility.Vector3dVector(colors)
return line_set | {"/modelling_utils.py": ["/io_primitive.py", "/parameters.py"], "/io_utils.py": ["/path_utils.py"], "/transformations_utils.py": ["/io_observation.py", "/parameters.py"], "/io_primitive.py": ["/transformations_utils.py", "/io_utils.py", "/parameters.py", "/path_utils.py"], "/preprocess_kitti.py": ["/kitti_dataset.py"], "/train_GFP.py": ["/io_observation.py", "/io_primitive.py", "/io_utils.py", "/model.py", "/parameters.py", "/path_utils.py", "/transformations_utils.py"], "/io_observation.py": ["/transformations_utils.py", "/io_utils.py"], "/test.py": ["/io_observation.py", "/io_primitive.py", "/transformations_utils.py", "/path_utils.py", "/visualization_utils.py", "/modelling_utils.py"], "/model.py": ["/custom_layers.py"]} |
43,655 | RovisLab/GFPNet | refs/heads/master | /train_GFP.py | import os
from pathlib import Path
import numpy as np
import tensorflow as tf
import keras.backend as K
from keras import optimizers
from io_observation import IoObservation
from io_primitive import IoPrimitive
from io_utils import load_h5, read_off_file, createH5_from
from model import GFPNet
from parameters import ModellingParameters
from path_utils import PATHS
from transformations_utils import make_samples, upsample_point_set
def euclidean_distance_loss(y_true, y_pred):
# Euclidian loss func 2D
return K.sqrt(K.sum(K.square(y_pred - y_true), axis=-1))
class GFPNetParams:
NUM_POINTS = 1024
MAX_EPOCH = 1000
BATCH_SIZE = 512
OPTIMIZER = optimizers.Adam(lr=0.001, decay=0.9)
GPU_INDEX = 0
PATH_TO_WEIGHTS = ''
FILE_EXTENSION = ''
TRAIN_PATH = ''
TRAIN_FILENAME = ''
TEST_PATH = ''
TEST_FILENAME = ''
def __init__(self, num_points, max_epoch, batch_size):
self.NUM_POINTS = num_points
self.MAX_EPOCH = max_epoch
self.BATCH_SIZE = batch_size
def set_paths_to_data(self, path_to_trainh5, path_to_testh5):
self.FILE_EXTENSION = '.' + str(path_to_trainh5).rsplit("\\")[-1:][0].split(".")[1]
self.TRAIN_PATH = str(path_to_trainh5)
self.TEST_PATH = str(path_to_testh5)
self.TRAIN_FILENAME = str(path_to_trainh5).rsplit("\\")[-1:][0].split(".")[0]
self.TEST_FILENAME = str(path_to_testh5).rsplit("\\")[-1:][0].split(".")[0]
def load_data(self, loadfor='train'):
if loadfor.lower() == 'train':
points, labels = load_h5(self.TRAIN_PATH)
else:
points, labels = load_h5(self.TEST_PATH)
return points, labels
# TODO: replace with generated files
train_h5_path = PATHS.NETWORK.root + ''
test_h5_path = PATHS.NETWORK.root + ''
net_params = GFPNetParams(num_points=50, max_epoch=100, batch_size=256)
net_params.set_paths_to_data(train_h5_path, test_h5_path)
print(net_params.PATH_TO_WEIGHTS)
def test_GFP():
from open3d import visualization, geometry, utility
test_points, test_labels = load_h5(test_h5_path)
nr_points = ModellingParameters.NUM_POINTS_UPSAMPLE
model = GFPNet(nr_points)
model.compile(optimizer='adam',
loss='mean_squared_error',
metrics=['mse', 'accuracy'])
# print the model summary
model.load_weights(net_params.PATH_TO_WEIGHTS)
print(model.summary())
primitive_path = PATHS.PATH_TO_PRIMITIVES.CAR.root + "CarPrimitive_15_500.off"
io_primitive = IoPrimitive(primitive_path)
io_primitive.load_primitive(normalize=False)
pathlist = Path(PATHS.NETWORK.root).glob('*.{0}'.format('off'))
for path in pathlist:
lidar_cloud_path = str(path)
file_name = lidar_cloud_path.split("\\")[-1]
label_path = PATHS.NETWORK.TEST_MODELLED + file_name
cloud_path = PATHS.NETWORK.TEST_CLOUD + file_name
observation_points = read_off_file(cloud_path)
io_observation_cloud = IoObservation(observation_points)
io_primitive.scale_relative_to(io_observation_cloud)
io_primitive.align_primitive_on_z_axis(io_observation_cloud)
io_primitive.compute_normals()
modelled_primitive_points = read_off_file(label_path)
io_modelled_primitive = IoObservation(modelled_primitive_points)
eval_X, boolIndexes = make_samples(io_observation_cloud, io_primitive, io_modelled_primitive, usePrimitivePoints=False, generate_for='test')
eval_X = upsample_point_set(sample_collection=eval_X, points_count=ModellingParameters.NUM_POINTS_UPSAMPLE)
cloud_bef = geometry.PointCloud()
cloud_bef.points = utility.Vector3dVector(np.asarray(io_primitive.point_cloud.points))
cloud_bef.normals = utility.Vector3dVector(np.asarray(io_primitive.point_cloud.normals))
cloud_bef.paint_uniform_color([255, 0, 0])
cloud_lidar = geometry.PointCloud()
cloud_lidar.points = utility.Vector3dVector(np.asarray(io_observation_cloud.point_cloud))
cloud_lidar.paint_uniform_color([0, 0, 0])
cloud_modelled = geometry.PointCloud()
cloud_modelled.points = utility.Vector3dVector(np.asarray(io_modelled_primitive.point_cloud.points))
cloud_modelled.paint_uniform_color([255, 255, 0])
visualization.draw_geometries([cloud_bef, cloud_lidar, cloud_modelled])
final_pts = []
idx = 0
for i in range(0, len(eval_X)):
pred = eval_X[i].reshape(-1, nr_points, 3)
points = model.predict(pred)
final_pts.append(points)
idx = i
final_pts = np.reshape(final_pts, newshape=(len(final_pts), 3))
print('Final pts len : ', len(final_pts))
final_pts = [point * ModellingParameters.CAR.SCALE for point in final_pts]
true_indexes = [i for i, val in enumerate(boolIndexes) if val]
for i in true_indexes:
cloud_bef.colors[i] = [0, 255, 0]
import pclpy.pcl.point_types as ptype
aux = ptype.PointXYZ()
new_points = []
for i in range(0, len(final_pts)):
val = io_primitive.point_cloud.points[true_indexes[i]] + (final_pts[i] - ModellingParameters.NORMALIZATION_CENTER)
aux.x = val[0]
aux.y = val[1]
aux.z = val[2]
new_points.append(val)
# transform_neighbor_points_for(i, aux, srcPrimitive, None)
cloud_aft = geometry.PointCloud()
cloud_aft.points = utility.Vector3dVector(new_points)
cloud_aft.paint_uniform_color([0, 0, 255])
# cloud.normals = utility.Vector3dVector(cloud_points[:,3:6])
visualization.draw_geometries([cloud_bef, cloud_aft, cloud_lidar, cloud_modelled])
def train_gfp():
train_points, train_labels = net_params.load_data('train')
test_points, test_labels = net_params.load_data('test')
model = GFPNet(net_params.NUM_POINTS)
model.compile(optimizer='adam',
loss='mean_squared_error',
metrics=['accuracy'])
print(model.summary())
print('Shape Train', np.shape(train_points))
# Fit model on training data
for i in range(1, 2):
model.fit(train_points, train_labels, batch_size=net_params.BATCH_SIZE, epochs=net_params.MAX_EPOCH,
shuffle=True, verbose=1)
s = "Current epoch is:" + str(i)
print(s)
if i % 10 == 0:
score = model.evaluate(test_points, test_labels, verbose=1)
print('Test loss: ', score[0])
print('Test accuracy: ', score[1])
model.save_weights(net_params.PATH_TO_WEIGHTS)
if __name__ == '__main__':
# tf version
print(tf.__version__)
os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
os.environ['CUDA_VISIBLE_DEVICES'] = '0, 1'
if tf.test.gpu_device_name():
print('GPU found')
else:
print("No GPU found")
createH5_from(PATHS.NETWORK.TRAIN_X, PATHS.NETWORK.TRAIN_Y, 'off', 'training_set')
# train_gfp()
# test_GFP() | {"/modelling_utils.py": ["/io_primitive.py", "/parameters.py"], "/io_utils.py": ["/path_utils.py"], "/transformations_utils.py": ["/io_observation.py", "/parameters.py"], "/io_primitive.py": ["/transformations_utils.py", "/io_utils.py", "/parameters.py", "/path_utils.py"], "/preprocess_kitti.py": ["/kitti_dataset.py"], "/train_GFP.py": ["/io_observation.py", "/io_primitive.py", "/io_utils.py", "/model.py", "/parameters.py", "/path_utils.py", "/transformations_utils.py"], "/io_observation.py": ["/transformations_utils.py", "/io_utils.py"], "/test.py": ["/io_observation.py", "/io_primitive.py", "/transformations_utils.py", "/path_utils.py", "/visualization_utils.py", "/modelling_utils.py"], "/model.py": ["/custom_layers.py"]} |
43,656 | RovisLab/GFPNet | refs/heads/master | /io_observation.py | from open3d.open3d import *
from transformations_utils import translate_point_cloud, upsample_cloud_kd_tree, downsample_cloud_random
from transformations_utils import compute_centroid
from open3d.open3d import geometry, utility
from transformations_utils import normalize_cloud
from io_utils import read_off_file
class IoObservation:
def __init__(self, points):
self.point_cloud = geometry.PointCloud()
self.point_cloud.points = utility.Vector3dVector(points)
self.cloud_down = geometry.PointCloud()
self.cloud_fpfh = registration.RegistrationResult()
self.centroid = []
def translate_to_origin(self, point=None):
"""
Translates object into origin, based on its centroid
:return: [out] Sets the point cloud translated into origin.
"""
assert len(self.point_cloud.points) > 0
if point is None:
centroid = compute_centroid(self.point_cloud.points)
translated_points = translate_point_cloud(self.point_cloud.points, centroid, action="translate")
else:
translated_points = translate_point_cloud(self.point_cloud.xyz, [0, 0, 0], action="translate")
self.point_cloud.points = utility.Vector3dVector(translated_points)
def compute_normals(self, radius, max_nn):
geometry.estimate_normals(self.point_cloud, geometry.KDTreeSearchParamHybrid(radius=radius, max_nn=max_nn))
def load_point_cloud(self, normalize=False):
""" Method used for reading the off file that holds the pcd points and populate the point_cloud and normal points inside this object."""
try:
try:
file_extension = (str(self.path).rsplit("\\", 1)[1]).rsplit(".", 1)[1]
except Exception as e:
print('Exception at path split.', e)
if file_extension == "off" or file_extension == "OFF":
if normalize:
points_array, normals_array = read_off_file(self.path)
normalized_points = normalize_cloud(points_array)
self.point_cloud.points = utility.Vector3dVector(normalized_points)
else:
points_array, normals_array = read_off_file(self.path)
self.point_cloud.points = utility.Vector3dVector(points_array)
elif file_extension == 'pcd':
io.read_point_cloud(self.path, self.point_cloud)
elif file_extension == 'ply':
self.mesh = io.read_triangle_mesh(self.path)
self.point_cloud.points = utility.Vector3dVector(self.mesh.vertices)
except Exception as e:
print('Exception at reading off file.', e)
def upsample_cloud_to(self, number_of_points):
"""
Method used for upsampling the cloud to a certain number of points.
:param number_of_points: [in] Desired number of points
:return: [out] Sets the cloud inside this object
"""
if len(self.point_cloud.points) < number_of_points:
upsample_cloud_kd_tree(self, number_of_points)
else:
self.point_cloud = downsample_cloud_random(self, number_of_points) | {"/modelling_utils.py": ["/io_primitive.py", "/parameters.py"], "/io_utils.py": ["/path_utils.py"], "/transformations_utils.py": ["/io_observation.py", "/parameters.py"], "/io_primitive.py": ["/transformations_utils.py", "/io_utils.py", "/parameters.py", "/path_utils.py"], "/preprocess_kitti.py": ["/kitti_dataset.py"], "/train_GFP.py": ["/io_observation.py", "/io_primitive.py", "/io_utils.py", "/model.py", "/parameters.py", "/path_utils.py", "/transformations_utils.py"], "/io_observation.py": ["/transformations_utils.py", "/io_utils.py"], "/test.py": ["/io_observation.py", "/io_primitive.py", "/transformations_utils.py", "/path_utils.py", "/visualization_utils.py", "/modelling_utils.py"], "/model.py": ["/custom_layers.py"]} |
43,657 | RovisLab/GFPNet | refs/heads/master | /test.py | import numpy as np
from open3d.open3d import geometry, utility, visualization
from io_observation import IoObservation
from io_primitive import IoPrimitive
from transformations_utils import delaunay_triangulation, icp_align_clouds
from kitti_utils import load_cloud_and_labels, align_img_and_pc, get_segmented_cloud_points_after_fitting_box, translate_points
from path_utils import PATHS
from visualization_utils import draw_bbox_open3d
from modelling_utils import active_contour_modelling
def custom_visualizer_with_key_callback(primitive, observation):
def rotate_vis(vis):
ctr = vis.get_view_control()
ctr.rotate(1.0, 0.0)
return False
def model_clouds(vis):
srcPrimitive.point_cloud, io_obs.point_cloud = active_contour_modelling(srcPrimitive, io_obs, search_radius=srcPrimitive.RADIUS_SEARCH,
steps=srcPrimitive.STEP_SIZE * srcPrimitive.STEPS,
step_dist=srcPrimitive.STEP_SIZE, visualizer=vis)
return False
vis = visualization.VisualizerWithKeyCallback()
vis.create_window("--Presenting Window--", 1280, 1024)
vis.add_geometry(primitive)
vis.add_geometry(observation)
#vis.register_animation_callback(rotate_vis)
vis.register_key_callback(ord("M"), model_clouds)
vis.run()
vis.destroy_window()
if __name__ == '__main__':
print('Cropping original velodyne clouds')
print('Processing: ')
full_cloud = geometry.PointCloud()
seg_cloud = geometry.PointCloud()
clouds = []
# DECLARING OBJECTS
source_primitive_cloud_path = PATHS.PATH_TO_PRIMITIVES.CAR.root + "CarPrimitive_15_500.off"
srcPrimitive = IoPrimitive(source_primitive_cloud_path)
srcPrimitive.load_primitive()
srcPrimitive.compute_normals(30)
srcPrimitive.point_cloud.normalize_normals()
srcPrimitive.point_cloud.paint_uniform_color([1, 0, 0])
frame = 8 # sample id from kitti dataset
final_clouds = []
lidar, labels = load_cloud_and_labels(frame)
img_dir = str(PATHS.KITTI.IMG_ROOT + '%06d.png' % frame)
pc_dir = str(PATHS.KITTI.PC_ROOT + '%06d.bin' % frame)
calib_dir = str(PATHS.KITTI.CALIB_ROOT + '%06d.txt' % frame)
full_cloud_points = align_img_and_pc(img_dir, pc_dir, calib_dir)
full_cloud.points = utility.Vector3dVector(np.array(full_cloud_points[:, 0:3]))
full_cloud.colors = utility.Vector3dVector([[0, 0, 0] for i in range(0, len(full_cloud.points))])
# keep original labels, and work on original labels
lab_cnt = 0
for line in labels:
# Clear cloud to be repopulated
values = line.split(' ')
lab_cnt += 1
[cls, obj_lv_image, occl_state, obs_angl, bb_l, bb_t, bb_r, bb_d, h, w, l, y, z, x, rot] = values
x = float(x)
y = float(y)
z = float(z)
h = float(h)
w = float(w)
l = float(l)
rot = float(rot)
# select only cars
if cls == PATHS.KITTI.CAR_CLASS:
box = [[y, z, x, h, w, l,rot]]
extracted_points, box_center = get_segmented_cloud_points_after_fitting_box(box, lidar)
clouds.append([extracted_points, box_center])
print('Labels Count : {0}'.format(lab_cnt))
line_set = []
box_count = 0
all_pts = []
concatenated_cloud = []
primitive_obs_pairs_list = []
mesh = geometry.TriangleMesh()
for point_set, translation_point in clouds:
# all extracted clouds colored in red
point_set = translate_points(point_set, translation_point, 'neg')
io_obs = IoObservation(point_set)
srcPrimitive.scale_relative_to(io_obs.point_cloud)
srcPrimitive.align_primitive_on_z_axis(io_obs.point_cloud)
io_obs.point_cloud.paint_uniform_color([0, 0, 1])
custom_visualizer_with_key_callback(srcPrimitive.point_cloud, io_obs.point_cloud)
#icp_align_clouds(srcPrimitive, io_obs, threshold=0.5, show_on_visualizer=True)
concatenated_cloud.extend(srcPrimitive.point_cloud.points)
mesh = delaunay_triangulation(concatenated_cloud)
translated_pts = translate_points(concatenated_cloud, translation_point, "pos")
all_pts.extend(translated_pts)
mesh.vertices = utility.Vector3dVector(translated_pts)
mesh.compute_triangle_normals(True)
mesh.compute_vertex_normals(True)
mesh.paint_uniform_color([1, 0, 0])
seg_cloud.points = utility.Vector3dVector(all_pts)
line_set = draw_bbox_open3d(mesh.vertices)
final_clouds.append(line_set)
final_clouds.append(mesh)
srcPrimitive.reset_points()
seg_cloud.colors = utility.Vector3dVector([[0, 255, 0] for i in range(0, len(seg_cloud.points))])
final_clouds.append(seg_cloud)
final_clouds.append(full_cloud)
| {"/modelling_utils.py": ["/io_primitive.py", "/parameters.py"], "/io_utils.py": ["/path_utils.py"], "/transformations_utils.py": ["/io_observation.py", "/parameters.py"], "/io_primitive.py": ["/transformations_utils.py", "/io_utils.py", "/parameters.py", "/path_utils.py"], "/preprocess_kitti.py": ["/kitti_dataset.py"], "/train_GFP.py": ["/io_observation.py", "/io_primitive.py", "/io_utils.py", "/model.py", "/parameters.py", "/path_utils.py", "/transformations_utils.py"], "/io_observation.py": ["/transformations_utils.py", "/io_utils.py"], "/test.py": ["/io_observation.py", "/io_primitive.py", "/transformations_utils.py", "/path_utils.py", "/visualization_utils.py", "/modelling_utils.py"], "/model.py": ["/custom_layers.py"]} |
43,658 | RovisLab/GFPNet | refs/heads/master | /kitti_dataset.py | from os import path
class KITTIPaths(object):
'''
All these paths, are related to the kitti dataset folder, downloaded from ki
@object_dir - path to kitti dataset directory.
@IMG_ROOT - path to .png or .img files that are found in the image_2 folder.
@PC_ROOT - path to velodyne pointclouds
@CALIB_ROOT - path to calibration data, found in the folder calib.
'''
basepath = path.dirname(__file__)
#TODO: set a generic path for the users to download all the data in
DIRECTORY_PATH = path.abspath(path.join(basepath, "dataset", "kitti"))
IMG_ROOT = DIRECTORY_PATH + "/" + "image_2/"
PC_ROOT = DIRECTORY_PATH + "/" + "velodyne/"
CALIB_ROOT = DIRECTORY_PATH + "/" + "calib/"
CAR_CLASS = 'Car'
PEDESTRIAN_CLASS = 'Pedestrian'
CYCLIST_CLASS = 'Cyclist'
TRUCK_CLASS = 'Truck'
| {"/modelling_utils.py": ["/io_primitive.py", "/parameters.py"], "/io_utils.py": ["/path_utils.py"], "/transformations_utils.py": ["/io_observation.py", "/parameters.py"], "/io_primitive.py": ["/transformations_utils.py", "/io_utils.py", "/parameters.py", "/path_utils.py"], "/preprocess_kitti.py": ["/kitti_dataset.py"], "/train_GFP.py": ["/io_observation.py", "/io_primitive.py", "/io_utils.py", "/model.py", "/parameters.py", "/path_utils.py", "/transformations_utils.py"], "/io_observation.py": ["/transformations_utils.py", "/io_utils.py"], "/test.py": ["/io_observation.py", "/io_primitive.py", "/transformations_utils.py", "/path_utils.py", "/visualization_utils.py", "/modelling_utils.py"], "/model.py": ["/custom_layers.py"]} |
43,659 | RovisLab/GFPNet | refs/heads/master | /parameters.py | import numpy as np
class ModellingParameters(object):
"""
Contains object class parameters used at the modelling process through active contours
"""
# Fixed number of points to be provided to the first NN layer.
NUM_POINTS_UPSAMPLE = 50
NORMALIZATION_CENTER = np.array([0.5, 0.5, 0.5])
class CAR(object):
import numpy as np
MODELLING_AFFECTED_AREA_FACTOR = 5
RADIUS_SEARCH = 0.2
STEP_SIZE = 0.1
STEPS = 5
SCALE = 0.5/((STEPS * STEP_SIZE) + ((RADIUS_SEARCH - STEP_SIZE) if RADIUS_SEARCH > STEP_SIZE else 0))
ANCHOR_L = 4
ANCHOR_W = 1.6
ANCHOR_H = 1.6
DIAGONAL = np.sqrt(ANCHOR_L**2 + ANCHOR_H**2 + ANCHOR_W**2)
class PEDESTRIAN(object):
RADIUS_SEARCH = 0.05
STEP_SIZE = 0.05
STEPS = 5
class TRUCK(object):
RADIUS_SEARCH = 0.5
STEP_SIZE = 0.1
STEPS = 5
class MUG(object):
import numpy as np
MODELLING_AFFECTED_AREA_FACTOR = 5
RADIUS_SEARCH = 0.005
STEP_SIZE = 0.005
STEPS = 5
SCALE = 0.5 / ((STEPS * STEP_SIZE) + (
(RADIUS_SEARCH - STEP_SIZE) if RADIUS_SEARCH > STEP_SIZE else 0)) # TODO: modify
ANCHOR_L = 0.05
ANCHOR_W = 0.05
ANCHOR_H = 0.10
DIAGONAL = np.sqrt(ANCHOR_L ** 2 + ANCHOR_H ** 2 + ANCHOR_W ** 2) | {"/modelling_utils.py": ["/io_primitive.py", "/parameters.py"], "/io_utils.py": ["/path_utils.py"], "/transformations_utils.py": ["/io_observation.py", "/parameters.py"], "/io_primitive.py": ["/transformations_utils.py", "/io_utils.py", "/parameters.py", "/path_utils.py"], "/preprocess_kitti.py": ["/kitti_dataset.py"], "/train_GFP.py": ["/io_observation.py", "/io_primitive.py", "/io_utils.py", "/model.py", "/parameters.py", "/path_utils.py", "/transformations_utils.py"], "/io_observation.py": ["/transformations_utils.py", "/io_utils.py"], "/test.py": ["/io_observation.py", "/io_primitive.py", "/transformations_utils.py", "/path_utils.py", "/visualization_utils.py", "/modelling_utils.py"], "/model.py": ["/custom_layers.py"]} |
43,660 | RovisLab/GFPNet | refs/heads/master | /model.py | from keras.layers import Input
from keras.models import Model
from keras.layers import Dense, Flatten, Reshape, Dropout
from keras.layers import Convolution1D, MaxPooling1D, BatchNormalization, Convolution3D
from keras.layers import Lambda, concatenate, multiply
import numpy as np
import tensorflow as tf
from custom_layers import matMul_layer
def classification_model(num_points, k_clases):
input_points = Input(shape=(num_points, 3))
x = Convolution1D(64, 1, activation='relu',
input_shape=(num_points, 3))(input_points)
x = BatchNormalization()(x)
x = Convolution1D(128, 1, activation='relu')(x)
x = BatchNormalization()(x)
x = Convolution1D(1024, 1, activation='relu')(x)
x = BatchNormalization()(x)
x = MaxPooling1D(pool_size=num_points)(x)
x = Dense(512, activation='relu')(x)
x = BatchNormalization()(x)
x = Dense(256, activation='relu')(x)
x = BatchNormalization()(x)
x = Dense(9, weights=[np.zeros([256, 9]), np.array([1, 0, 0, 0, 1, 0, 0, 0, 1]).astype(np.float32)])(x)
input_T = Reshape((3, 3))(x)
g = matMul_layer([input_points, input_T])
g = Convolution1D(64, 1, input_shape=(num_points, 3), activation='relu')(g)
g = BatchNormalization()(g)
g = Convolution1D(64, 1, input_shape=(num_points, 3), activation='relu')(g)
g = BatchNormalization()(g)
# feature transform net
f = Convolution1D(64, 1, activation='relu')(g)
f = BatchNormalization()(f)
f = Convolution1D(128, 1, activation='relu')(f)
f = BatchNormalization()(f)
f = Convolution1D(1024, 1, activation='relu')(f)
f = BatchNormalization()(f)
f = MaxPooling1D(pool_size=num_points)(f)
f = Dense(512, activation='relu')(f)
f = BatchNormalization()(f)
f = Dense(256, activation='relu')(f)
f = BatchNormalization()(f)
f = Dense(64 * 64, weights=[np.zeros([256, 64 * 64]), np.eye(64).flatten().astype(np.float32)])(f)
feature_T = Reshape((64, 64))(f)
g = matMul_layer([g, feature_T])
g = Convolution1D(64, 1, activation='relu')(g)
g = BatchNormalization()(g)
g = Convolution1D(128, 1, activation='relu')(g)
g = BatchNormalization()(g)
g = Convolution1D(1024, 1, activation='relu')(g)
g = BatchNormalization()(g)
# global_feature
global_feature = MaxPooling1D(pool_size=num_points)(g)
# point_net_cls
c = Dense(512, activation='relu')(global_feature)
c = BatchNormalization()(c)
c = Dropout(rate=0.7)(c)
c = Dense(256, activation='relu')(c)
c = BatchNormalization()(c)
c = Dropout(rate=0.7)(c)
c = Dense(k_clases, activation='softmax')(c)
prediction = Flatten()(c)
model = Model(inputs=input_points, outputs=prediction)
return model
def GFPNet(num_points):
input_points = Input(shape=(num_points, 3))
x = Convolution1D(64, 1, activation='relu')(input_points)
x = BatchNormalization()(x)
x = Convolution1D(128, 1, activation='relu')(x)
x = BatchNormalization()(x)
x = Convolution1D(256, 1, activation='relu')(x)
x = BatchNormalization()(x)
f = MaxPooling1D(pool_size=num_points)(x)
f = Dense(256, activation='relu')(f)
f = Dense(128, activation='relu')(f)
f = Dense(64, activation='relu')(f)
f = Flatten()(f)
prediction = Dense(3, activation=None)(f)
model = Model(inputs=input_points, outputs=prediction)
return model | {"/modelling_utils.py": ["/io_primitive.py", "/parameters.py"], "/io_utils.py": ["/path_utils.py"], "/transformations_utils.py": ["/io_observation.py", "/parameters.py"], "/io_primitive.py": ["/transformations_utils.py", "/io_utils.py", "/parameters.py", "/path_utils.py"], "/preprocess_kitti.py": ["/kitti_dataset.py"], "/train_GFP.py": ["/io_observation.py", "/io_primitive.py", "/io_utils.py", "/model.py", "/parameters.py", "/path_utils.py", "/transformations_utils.py"], "/io_observation.py": ["/transformations_utils.py", "/io_utils.py"], "/test.py": ["/io_observation.py", "/io_primitive.py", "/transformations_utils.py", "/path_utils.py", "/visualization_utils.py", "/modelling_utils.py"], "/model.py": ["/custom_layers.py"]} |
43,661 | shaeed/ArtStudio | refs/heads/master | /artstdweb/infinityroom/migrations/0003_auto_20180808_0025.py | # Generated by Django 2.0.7 on 2018-08-07 18:55
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('infinityroom', '0002_auto_20180807_2251'),
]
operations = [
migrations.CreateModel(
name='GlobalVars',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('selectedTheme', models.IntegerField()),
('tcp', models.BinaryField()),
('gvId', models.IntegerField()),
],
),
migrations.AddField(
model_name='ledthemes',
name='enable',
field=models.BooleanField(default=True),
preserve_default=False,
),
]
| {"/artstdweb/infinityroom/urls.py": ["/artstdweb/infinityroom/__init__.py"]} |
43,662 | shaeed/ArtStudio | refs/heads/master | /artstdweb/infinityroom/install.py | # -*- coding: utf-8 -*-
from infinityroom.models import LEDThemes, Device, Settings
from artstdweb.constants import const
def createThemes():
#Delete old ones
a = LEDThemes.objects.all()
for t in a:
t.delete()
#Off
led = LEDThemes(
themeName = 'Off',
themeId = 1,
themeSNo = 1,
themeType = 0,
themeBrt = 0,
themeSpd = 0,
themeSpdMax = 100,
themeSpdMin = 0,
themeClr = 'NA',
themeOption = {'brt':0, 'spd':0, 'clr':0},
enable = True)
led.save()
#Rainbow
led = LEDThemes(
themeName = 'Rainbow',
themeId = 3,
themeSNo = 3,
themeType = 0,
themeBrt = 50,
themeSpd = 7,
themeSpdMax = 30,
themeSpdMin = 3,
themeClr = 'NA',
themeOption = {'brt':1, 'spd':1, 'clr':0},
enable = True)
led.save()
#Strobe
led = LEDThemes(
themeName = 'Strobe',
themeId = 4,
themeSNo = 4,
themeType = 0,
themeBrt = 80,
themeSpd = 50,
themeSpdMax = 100,
themeSpdMin = 0,
themeClr = '3515FF',
themeOption = {'brt':1, 'spd':1, 'clr':1},
enable = True)
led.save()
#Theme_Strobe2
led = LEDThemes(
themeName = 'Strobe 2',
themeId = 13,
themeSNo = 13,
themeType = 0,
themeBrt = 80,
themeSpd = 10,
themeSpdMax = 100,
themeSpdMin = 0,
themeClr = 'FFFFFF',
themeOption = {'brt':1, 'spd':1, 'clr':1},
enable = True)
led.save()
#All white
led = LEDThemes(
themeName = 'All White',
themeId = 5,
themeSNo = 5,
themeType = 0,
themeBrt = 80,
themeSpd = 10,
themeSpdMax = 100,
themeSpdMin = 0,
themeClr = 'FFFFFF',
themeOption = {'brt':1, 'spd':0, 'clr':0},
enable = True)
led.save()
#Colorful
led = LEDThemes(
themeName = 'Colorful',
themeId = 6,
themeSNo = 6,
themeType = 0,
themeBrt = 80,
themeSpd = 10,
themeSpdMax = 100,
themeSpdMin = 0,
themeClr = '3515FF',
themeOption = {'brt':1, 'spd':0, 'clr':1},
enable = True)
led.save()
#Theme_FadeInFadeOut
led = LEDThemes(
themeName = 'Fade In-Out',
themeId = 12,
themeSNo = 12,
themeType = 0,
themeBrt = 80,
themeSpd = 10,
themeSpdMax = 100,
themeSpdMin = 0,
themeClr = 'FFFFFF',
themeOption = {'brt':1, 'spd':0, 'clr':1},
enable = True)
led.save()
#Fade
led = LEDThemes(
themeName = 'Fade In-Out 2',
themeId = 7,
themeSNo = 7,
themeType = 0,
themeBrt = 80,
themeSpd = 10,
themeSpdMax = 100,
themeSpdMin = 0,
themeClr = '3515FF',
themeOption = {'brt':1, 'spd':1, 'clr':1},
enable = True)
led.save()
#Night Sky
led = LEDThemes(
themeName = 'Night Sky',
themeId = 10,
themeSNo = 10,
themeType = 1,
themeBrt = 80,
themeSpd = 150,
themeSpdMax = 300,
themeSpdMin = 75,
themeClr = 'FFFFFF',
themeOption = {'brt':0, 'spd':1, 'clr':0},
enable = True)
led.save()
#THEME_NIGHTSKY2
led = LEDThemes(
themeName = 'Colorful Night',
themeId = 16,
themeSNo = 16,
themeType = 0,
themeBrt = 80,
themeSpd = 150,
themeSpdMax = 300,
themeSpdMin = 75,
themeClr = 'FFFFFF',
themeOption = {'brt':1, 'spd':1, 'clr':0},
enable = True)
led.save()
# #Theme_RGBLoop
# led = LEDThemes(
# themeName = 'RGB Loop',
# themeId = 11,
# themeSNo = 11,
# themeType = 0,
# themeBrt = 80,
# themeSpd = 10,
# themeSpdMax = 100,
# themeSpdMin = 0,
# themeClr = 'FFFFFF',
# themeOption = {'brt':1, 'spd':1, 'clr':0},
# enable = True)
# led.save()
#Theme_CylonBounce
led = LEDThemes(
themeName = 'Cylon Bounce',
themeId = 14,
themeSNo = 14,
themeType = 0,
themeBrt = 80,
themeSpd = 10,
themeSpdMax = 100,
themeSpdMin = 0,
themeClr = 'FFFFFF',
themeOption = {'brt':1, 'spd':1, 'clr':1},
enable = True)
led.save()
#Theme_NewKITT
led = LEDThemes(
themeName = 'Chase Showdown',
themeId = 15,
themeSNo = 15,
themeType = 0,
themeBrt = 80,
themeSpd = 10,
themeSpdMax = 100,
themeSpdMin = 0,
themeClr = 'FFFFFF',
themeOption = {'brt':1, 'spd':1, 'clr':1},
enable = True)
led.save()
#Theme_Sparkle
led = LEDThemes(
themeName = 'Sparkle',
themeId = 17,
themeSNo = 17,
themeType = 0,
themeBrt = 80,
themeSpd = 10,
themeSpdMax = 300,
themeSpdMin = 0,
themeClr = 'FFFFFF',
themeOption = {'brt':1, 'spd':1, 'clr':1},
enable = True)
led.save()
# #Theme_SnowSparkle
# led = LEDThemes(
# themeName = 'Snow Sparkle',
# themeId = 18,
# themeSNo = 18,
# themeType = 0,
# themeBrt = 80,
# themeSpd = 10,
# themeSpdMax = 100,
# themeSpdMin = 0,
# themeClr = 'FFFFFF',
# themeOption = {'brt':1, 'spd':1, 'clr':1},
# enable = True)
# led.save()
#Theme_RunningLights
led = LEDThemes(
themeName = 'Running',
themeId = 19,
themeSNo = 19,
themeType = 0,
themeBrt = 80,
themeSpd = 10,
themeSpdMax = 150,
themeSpdMin = 4,
themeClr = 'FFFFFF',
themeOption = {'brt':1, 'spd':1, 'clr':1},
enable = True)
led.save()
#Theme_colorWipe
led = LEDThemes(
themeName = 'Color Wipe',
themeId = 20,
themeSNo = 20,
themeType = 0,
themeBrt = 80,
themeSpd = 10,
themeSpdMax = 50,
themeSpdMin = 2,
themeClr = 'FFFFFF',
themeOption = {'brt':1, 'spd':1, 'clr':1},
enable = True)
led.save()
#Theme_theaterChaseRainbow
led = LEDThemes(
themeName = 'Theater Chase Rainbow',
themeId = 21,
themeSNo = 21,
themeType = 0,
themeBrt = 80,
themeSpd = 20,
themeSpdMax = 80,
themeSpdMin = 10,
themeClr = 'FFFFFF',
themeOption = {'brt':1, 'spd':1, 'clr':0},
enable = True)
led.save()
#Theme_Fire
led = LEDThemes(
themeName = 'Fire',
themeId = 22,
themeSNo = 22,
themeType = 0,
themeBrt = 80,
themeSpd = 10,
themeSpdMax = 20,
themeSpdMin = 2,
themeClr = 'FFFFFF',
themeOption = {'brt':1, 'spd':1, 'clr':0},
enable = True)
led.save()
#Theme_meteorRain
led = LEDThemes(
themeName = 'Meteor Rain',
themeId = 23,
themeSNo = 23,
themeType = 0,
themeBrt = 80,
themeSpd = 10,
themeSpdMax = 20,
themeSpdMin = 0,
themeClr = 'FFFFFF',
themeOption = {'brt':1, 'spd':1, 'clr':1},
enable = True)
led.save()
#Theme_HueEffect
led = LEDThemes(
themeName = 'Hue Effect',
themeId = 24,
themeSNo = 24,
themeType = 0,
themeBrt = 80,
themeSpd = 10,
themeSpdMax = 100,
themeSpdMin = 0,
themeClr = 'FFFFFF',
themeOption = {'brt':1, 'spd':0, 'clr':0},
enable = True)
led.save()
#Devices
#Delete old ones
a = Device.objects.all()
for t in a:
t.delete()
#Infinity Room
dev = Device(
name = 'Infinity Room',
devId = 1,
sno = 1,
devType = const.DEV_IR,
devGrp = [const.GRP_HOME],
target = const.TAR_IR,
brt = 0,
spd = 0,
spdMax = 100,
spdMin = 0,
clr = 0,
option = {'brt':1, 'spd':0, 'clr':0},
enable = True,
state = True,
other = {'selectedTheme':1})
dev.save()
#Infinity Room L1
dev = Device(
name = 'Andromeda',
devId = 5,
sno = 2,
devType = const.DEV_LED,
devGrp = [const.GRP_IR],
target = const.TAR_IRL,
brt = 0,
spd = 0,
spdMax = 100,
spdMin = 0,
clr = 0,
option = {'brt':0, 'spd':0, 'clr':0},
enable = True,
state = True,
other = {})
dev.save()
#Infinity Room L2
dev = Device(
name = 'Whirlpool',
devId = 2,
sno = 3,
devType = const.DEV_LED,
devGrp = [const.GRP_IR],
target = const.TAR_IRL,
brt = 0,
spd = 0,
spdMax = 100,
spdMin = 0,
clr = 0,
option = {'brt':0, 'spd':0, 'clr':0},
enable = True,
state = False,
other = {})
dev.save()
#Infinity Room L3
dev = Device(
name = 'Cartwheel',
devId = 16,
sno = 4,
devType = const.DEV_LED,
devGrp = [const.GRP_IR],
target = const.TAR_IRL,
brt = 0,
spd = 0,
spdMax = 100,
spdMin = 0,
clr = 0,
option = {'brt':0, 'spd':0, 'clr':0},
enable = True,
state = False,
other = {})
dev.save()
a = Settings.objects.all()
for t in a:
t.delete()
| {"/artstdweb/infinityroom/urls.py": ["/artstdweb/infinityroom/__init__.py"]} |
43,663 | shaeed/ArtStudio | refs/heads/master | /artstdweb/infinityroom/send_to_controllers.py | ###############################################################################
# Communication Standrad & Protocol
#
# 1) Receive '>>' for every data sent as acknowladge
# 2) Send '\r' in the end of every chunk of data
#
# Command information
# A - Apply theme
# B 123 - Brightness
# C 1234567 - Color
# L 123 - LED
# S 12 - Speed
# T 12 - Theme
#
# Example-
# T 1;B 123;S 12;C 123456;A 1; \r
from infinityroom.connect_to_controllers import GetIP
from artstdweb.constants import const
import requests
class SendToNodeMCU:
def send(target, data):
count = 3
while(count > 0):
count -= 1
try:
adr = GetIP.getAdr(const.hMap[target])
res = requests.get(adr+target, params=data)
return res.text
#except ConnectionError:
except :
#Not able to reach infinty room
#Refresh the address
GetIP.updateIp(const.hMap[target])
#End while
return 'Not able to connect to ' + const.hMap[target]
def sendToControllers(target, data):
#Decide for which send to call i.e. NodeMCU, or any other device
return SendToNodeMCU.send(target, data)
| {"/artstdweb/infinityroom/urls.py": ["/artstdweb/infinityroom/__init__.py"]} |
43,664 | shaeed/ArtStudio | refs/heads/master | /artstdweb/infinityroom/migrations/0001_initial.py | # Generated by Django 2.0.7 on 2018-08-07 05:17
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='LEDThemes',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('themeName', models.CharField(max_length=100)),
('themeId', models.IntegerField()),
('themeSNo', models.IntegerField()),
('themeType', models.IntegerField()),
('themeBrt', models.IntegerField()),
('themeSpd', models.IntegerField()),
('themeClr', models.CharField(max_length=8)),
('themeOption', models.CharField(max_length=100)),
],
),
]
| {"/artstdweb/infinityroom/urls.py": ["/artstdweb/infinityroom/__init__.py"]} |
43,665 | shaeed/ArtStudio | refs/heads/master | /artstdweb/infinityroom/common_functions.py | # -*- coding: utf-8 -*-
def insertInDict(source, key, data):
dct = {} | {"/artstdweb/infinityroom/urls.py": ["/artstdweb/infinityroom/__init__.py"]} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.