index
int64 | repo_name
string | branch_name
string | path
string | content
string | import_graph
string |
|---|---|---|---|---|---|
39,473
|
bosefalk/WFB-simulation
|
refs/heads/master
|
/flask_main.py
|
from flask import render_template, request, Flask, send_file
app = Flask(__name__)
app.config.from_object('config')
from forms import UnitForm
from unit_class import Unit
from wfb_simulation import wfb_simulation
from read_csv import win_percent
@app.route('/', methods=['GET', 'POST'])
@app.route('/index', methods=['GET', 'POST'])
def index():
form = UnitForm()
output = ""
if form.validate_on_submit():
unit_one = Unit(request.form['name1'], int(request.form['models1']),
int(request.form['WS1']), int(request.form['S1']),
int(request.form['T1']), int(request.form['I1']),
int(request.form['Sv1']), int(request.form['Ld1']))
unit_two = Unit(request.form['name2'], int(request.form['models2']),
int(request.form['WS2']), int(request.form['S2']),
int(request.form['T2']), int(request.form['I2']),
int(request.form['Sv2']), int(request.form['Ld2']))
wfb_simulation(unit_one, unit_two, int(request.form['runs']))
output = win_percent()
return render_template('index.html',
form = form, output = output)
@app.route('/results.csv')
def results():
return send_file('results.csv')
@app.route('/log.txt')
def log():
return send_file('log.txt', as_attachment=True)
|
{"/wfb_simulation.py": ["/unit_class.py", "/cc_round.py"], "/cc_round.py": ["/roll_dice.py", "/compare_characteristics.py"], "/unit_test.py": ["/roll_dice.py", "/compare_characteristics.py"], "/run.py": ["/unit_class.py", "/wfb_simulation.py"], "/flask_main.py": ["/forms.py", "/unit_class.py", "/wfb_simulation.py", "/read_csv.py"], "/sandbox.py": ["/unit_class.py", "/wfb_simulation.py"]}
|
39,474
|
bosefalk/WFB-simulation
|
refs/heads/master
|
/sandbox.py
|
from unit_class import Unit
from wfb_simulation import *
#cont = cc_round(orc, dwarf)
#print(cont)
orc = Unit(name="Orc", models=20, WS=3, S=4, T=2, I=2, Sv=6, Ld=7)
dwarf = Unit(name="Dwarf", models=20, WS=3, S=3, T=2, I=2, Sv=4, Ld=8)
wfb_simulation(orc, dwarf, 10000)
|
{"/wfb_simulation.py": ["/unit_class.py", "/cc_round.py"], "/cc_round.py": ["/roll_dice.py", "/compare_characteristics.py"], "/unit_test.py": ["/roll_dice.py", "/compare_characteristics.py"], "/run.py": ["/unit_class.py", "/wfb_simulation.py"], "/flask_main.py": ["/forms.py", "/unit_class.py", "/wfb_simulation.py", "/read_csv.py"], "/sandbox.py": ["/unit_class.py", "/wfb_simulation.py"]}
|
39,475
|
bosefalk/WFB-simulation
|
refs/heads/master
|
/forms.py
|
from flask_wtf import FlaskForm
from wtforms import StringField, IntegerField, BooleanField
from wtforms.validators import DataRequired
class UnitForm(FlaskForm):
name1 = StringField('name1', validators = [DataRequired()])
models1 = IntegerField('models1', validators = [DataRequired()])
WS1 = IntegerField('WS1', validators = [DataRequired()])
S1 = IntegerField('S1', validators = [DataRequired()])
T1 = IntegerField('T1', validators = [DataRequired()])
I1 = IntegerField('I1', validators = [DataRequired()])
Sv1 = IntegerField('Sv1', validators = [DataRequired()])
Ld1 = IntegerField('Ld1', validators = [DataRequired()])
name2 = StringField('name2', validators=[DataRequired()])
models2 = IntegerField('models2', validators=[DataRequired()])
WS2 = IntegerField('WS2', validators=[DataRequired()])
S2 = IntegerField('S2', validators=[DataRequired()])
T2 = IntegerField('T2', validators=[DataRequired()])
I2 = IntegerField('I2', validators=[DataRequired()])
Sv2 = IntegerField('Sv2', validators=[DataRequired()])
Ld2 = IntegerField('Ld2', validators=[DataRequired()])
runs = IntegerField('runs', validators=[DataRequired()])
|
{"/wfb_simulation.py": ["/unit_class.py", "/cc_round.py"], "/cc_round.py": ["/roll_dice.py", "/compare_characteristics.py"], "/unit_test.py": ["/roll_dice.py", "/compare_characteristics.py"], "/run.py": ["/unit_class.py", "/wfb_simulation.py"], "/flask_main.py": ["/forms.py", "/unit_class.py", "/wfb_simulation.py", "/read_csv.py"], "/sandbox.py": ["/unit_class.py", "/wfb_simulation.py"]}
|
39,476
|
bosefalk/WFB-simulation
|
refs/heads/master
|
/roll_dice.py
|
# Library import
import random
# Given the number of rolls and the value to roll equal to or above (i.e. 4+), returns number of successes
def roll_dice(n_rolls, success_plus):
# Create empty list
roll_results = []
# Roll n_roll dice
for i in range(0, n_rolls):
# Add roll result to list
roll_results.append(random.randint(1,6))
# Count number of rolls equal to or greater than success_plus
tmp = [i for i in roll_results if i >= success_plus]
n_success = len(tmp)
return n_success;
# Temp class for returning both roll and pass / fail result of leadership tests
class Return_ld_test(object):
def __init__(self, result, roll):
self.result = result
self.roll = roll
# Given a (possibly modified) Ld value, rolls 2d6 and records "Pass" if equal or below
def ld_test(ld_value):
# ld_value is the number to be rolled equal to or below
# Create empty list
roll_results = []
# Roll 2d6
roll_results.append(random.randint(1,6) + random.randint(1,6))
roll = roll_results[0]
if roll <= ld_value or roll == 2:
result = "Pass"
else:
result = "Fail"
return Return_ld_test(roll = roll, result = result)
|
{"/wfb_simulation.py": ["/unit_class.py", "/cc_round.py"], "/cc_round.py": ["/roll_dice.py", "/compare_characteristics.py"], "/unit_test.py": ["/roll_dice.py", "/compare_characteristics.py"], "/run.py": ["/unit_class.py", "/wfb_simulation.py"], "/flask_main.py": ["/forms.py", "/unit_class.py", "/wfb_simulation.py", "/read_csv.py"], "/sandbox.py": ["/unit_class.py", "/wfb_simulation.py"]}
|
39,495
|
femiogundare/invasive-ductal-carcinoma-diagnosis
|
refs/heads/main
|
/build_idc_dataset.py
|
# -*- coding: utf-8 -*-
"""
Created on Tue Jan 19 20:29:51 2021
@author: femiogundare
"""
# Import the required libraries
import os
from os import listdir
import json
import cv2
import pickle
import progressbar
import numpy as np
import pandas as pd
from sklearn.preprocessing import LabelEncoder
from sklearn.model_selection import train_test_split
from utilities.io.hdf5datasetwriter import HDF5DatasetWriter
from config import idc_config as config
from utilities.build.build_dataset import extract_coords
from utilities.preprocessing.aspectawarepreprocessor import AspectAwarePreprocessor
# Initialize the Config class in the config script
configs = config.Config()
# Put all the arguments of the argparse in a dictionary by calling the get_config method of the Config class
configs_dict = configs.get_config()
# Load the supplied arguments from the config file
DATA_PATH = configs_dict['base_path']
IMAGES_PATH = configs_dict['images_dir']
TRAIN_HDF5_PATH = configs_dict['training_hdf5_path']
VAL_HDF5_PATH = configs_dict['validation_hdf5_path']
TEST_HDF5_PATH = configs_dict['test_hdf5_path']
SEED = configs_dict['random_seed']
IMAGE_HEIGHT = configs_dict['image_height']
IMAGE_WIDTH = configs_dict['image_width']
N_CHANNELS = configs_dict['n_channels']
OUTPUT_DIR = configs_dict['output_dir']
DATASET_MEAN_PATH = OUTPUT_DIR + '/idc_dataset_mean.json'
LABEL_ENCODER_PATH = OUTPUT_DIR + '/label_encoder.cpickle'
NAMES_OF_IMAGES_IN_DATASET = OUTPUT_DIR + '/names_of_images.json'
FOLDER = listdir(IMAGES_PATH)
TOTAL_IMAGES = 277524
# Create a dataframe containing the IDs of the patients, Path to each images, the Target value and the Image name
data = pd.DataFrame(index=np.arange(0, TOTAL_IMAGES), columns=["path", "target", "patient_id", "image_name"])
k = 0
for n in range(len(FOLDER)):
patient_id = FOLDER[n]
patient_path = IMAGES_PATH + '/' + patient_id
for c in [0,1]:
class_path = patient_path + "/" + str(c) + "/"
subfiles = listdir(class_path)
for m in range(len(subfiles)):
image_path = subfiles[m]
data.iloc[k]["path"] = class_path + image_path
data.iloc[k]["target"] = c
data.iloc[k]["patient_id"] = patient_id
data.iloc[k]["image_name"] = image_path
k += 1
print(data.shape)
print(f'There are {data.shape[0]} images in the dataset.')
# Ensure the target variable is in integer format
data.target = data.target.astype(np.int)
# Encode the target variable
print('Encoding the target variable...')
le = LabelEncoder()
data.target = le.fit_transform(data.target)
# Get the unique patient ids in the dataset, and split them into training, validation, and test ids
patient_ids = data.patient_id.unique()
split_size = round(len(patient_ids)/10) # split ratio is 10%
train_ids, test_ids = train_test_split(patient_ids, test_size=split_size, random_state=SEED)
train_ids, val_ids = train_test_split(train_ids, test_size=split_size, random_state=SEED)
# Get the training, validation, and test dataframes based on the patient ids
training_df = data.loc[data.patient_id.isin(train_ids), :].copy()
validation_df = data.loc[data.patient_id.isin(val_ids), :].copy()
test_df = data.loc[data.patient_id.isin(test_ids), :].copy()
idc_class_freq = training_df['target'].sum()/training_df.shape[0]
non_idc_class_freq = 1 - idc_class_freq
num_train = 18000
num_val = 3000
num_test = 3000
training_df_idc = training_df[training_df['target']==1].sample(int(round(idc_class_freq*num_train)))
training_df_non_idc = training_df[training_df['target']==0].sample(int(round(non_idc_class_freq*num_train)))
training_df = pd.concat([training_df_idc, training_df_non_idc], axis=0).sample(num_train)
validation_df_idc = validation_df[validation_df['target']==1].sample(int(round(idc_class_freq*num_val)))
validation_df_non_idc = validation_df[validation_df['target']==0].sample(int(round(non_idc_class_freq*num_val)))
validation_df = pd.concat([validation_df_idc, validation_df_non_idc], axis=0).sample(num_val)
test_df_idc = test_df[test_df['target']==1].sample(int(round(idc_class_freq*num_test)))
test_df_non_idc = test_df[test_df['target']==0].sample(int(round(non_idc_class_freq*num_test)))
test_df = pd.concat([test_df_idc, test_df_non_idc], axis=0).sample(num_test)
print(f'There are {training_df.shape[0]} images in the training set.')
print(f'There are {validation_df.shape[0]} images in the validation set.')
print(f'There are {test_df.shape[0]} images in the test set.')
print(training_df.isnull().sum())
print(validation_df.isnull().sum())
print(test_df.isnull().sum())
"""
# Add the coordinates (x, y) where each patch is found in the whole mount sample to the dataframe
#training_df = extract_coords(training_df)
#validation_df = extract_coords(validation_df)
#test_df = extract_coords(test_df)
"""
# Construct a list pairing the images paths, images labels and the output hdf5 files of the training,
# validation and test sets
print('Pairing the images paths, images labels and the output hdf5 files of the training, validation and test sets...')
datasets = [
("train", training_df['path'], training_df['target'], TRAIN_HDF5_PATH),
("val", validation_df['path'], validation_df['target'], VAL_HDF5_PATH),
("test", test_df['path'], test_df['target'], TEST_HDF5_PATH)
]
# Initialize the image preprocessor and the RGB channels mean
aap = AspectAwarePreprocessor(width=IMAGE_HEIGHT, height=IMAGE_WIDTH, inter=cv2.INTER_AREA)
R, G, B = [], [], []
#loop over the datasets tuples
for dType, images, labels, outputPath in datasets:
#create the HDF5 writer
print("Building {}...".format(outputPath))
writer = HDF5DatasetWriter(
dims=(len(images), IMAGE_HEIGHT, IMAGE_WIDTH, N_CHANNELS), outputPath=outputPath, dataKey="images", buffSize=1000
)
pbar = ["Building Dataset: ", progressbar.Percentage(), " ",
progressbar.Bar(), " ", progressbar.ETA()]
pbar = progressbar.ProgressBar(maxval=len(images),
widgets=pbar).start()
#loop over the image paths
for i, (image, label) in enumerate(zip(images, labels)):
#load and preprocess the image
image = cv2.imread(image)
image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
image = aap.preprocess(image)
#compute the mean of each channel in the training set
if dType=="train":
r, g, b = cv2.mean(image)[:3]
R.append(r)
G.append(g)
B.append(b)
#add the processed images to the HDF5 writer
writer.add(rows=[image], labels=[label])
pbar.update(i)
pbar.finish()
writer.close()
# Serialize the means to a json file
print('Serializing the means...')
dic = {'R' : np.mean(R), 'G' : np.mean(G), 'B' : np.mean(B)}
f = open(DATASET_MEAN_PATH, 'w')
f.write(json.dumps(dic))
f.close()
# Serialize the label encoder to a json file
print('Serializing the label encoder...')
f = open(LABEL_ENCODER_PATH, 'wb')
f.write(pickle.dumps(le))
f.close()
# Serialize the names of images in the training, validation, and test sets to json
print('Serializing the names of the images...')
train_names = training_df['image_name']
val_names = validation_df['image_name']
test_names = test_df['image_name']
dic = {'train_names' : list(train_names), 'val_names' : list(val_names), 'test_names' : list(test_names)}
f = open(NAMES_OF_IMAGES_IN_DATASET, 'w')
f.write(json.dumps(dic))
f.close()
|
{"/build_idc_dataset.py": ["/utilities/build/build_dataset.py"], "/predict.py": ["/utilities/preprocessing/stain_normalization.py", "/utilities/preprocessing/meanpreprocessor.py", "/utilities/preprocessing/hematoxylin_eosin_aug.py", "/utilities/preprocessing/zoom_aug.py", "/utilities/preprocessing/normal_aug.py", "/utilities/io/hdf5datasetgenerator.py", "/utilities/metrics/metrics_for_scoring.py"], "/train_model.py": ["/utilities/preprocessing/meanpreprocessor.py", "/utilities/preprocessing/stain_normalization.py", "/utilities/preprocessing/hematoxylin_eosin_aug.py", "/utilities/preprocessing/zoom_aug.py", "/utilities/preprocessing/normal_aug.py", "/utilities/io/hdf5datasetgenerator.py", "/utilities/nn/neural_network.py", "/utilities/callbacks/poly_decay.py", "/utilities/metrics/metrics_for_compiling.py"]}
|
39,496
|
femiogundare/invasive-ductal-carcinoma-diagnosis
|
refs/heads/main
|
/utilities/preprocessing/normal_aug.py
|
# -*- coding: utf-8 -*-
"""
Created on Thu Feb 25 23:30:50 2021
@author: femiogundare
"""
import albumentations as A
normal_aug = A.Compose([
A.RandomRotate90(p=0.7),
A.OneOf([
A.HorizontalFlip(p=1),
A.VerticalFlip(p=1)]
)
])
|
{"/build_idc_dataset.py": ["/utilities/build/build_dataset.py"], "/predict.py": ["/utilities/preprocessing/stain_normalization.py", "/utilities/preprocessing/meanpreprocessor.py", "/utilities/preprocessing/hematoxylin_eosin_aug.py", "/utilities/preprocessing/zoom_aug.py", "/utilities/preprocessing/normal_aug.py", "/utilities/io/hdf5datasetgenerator.py", "/utilities/metrics/metrics_for_scoring.py"], "/train_model.py": ["/utilities/preprocessing/meanpreprocessor.py", "/utilities/preprocessing/stain_normalization.py", "/utilities/preprocessing/hematoxylin_eosin_aug.py", "/utilities/preprocessing/zoom_aug.py", "/utilities/preprocessing/normal_aug.py", "/utilities/io/hdf5datasetgenerator.py", "/utilities/nn/neural_network.py", "/utilities/callbacks/poly_decay.py", "/utilities/metrics/metrics_for_compiling.py"]}
|
39,497
|
femiogundare/invasive-ductal-carcinoma-diagnosis
|
refs/heads/main
|
/utilities/build/build_dataset.py
|
# -*- coding: utf-8 -*-
"""
Created on Tue Jan 19 21:13:16 2021
@author: femiogundare
"""
def extract_coords(df):
"""
Returns the coordinates (x, y) where each patch is found in the whole mount sample.
Args:
df: dataframe
"""
coord = df.path.str.rsplit("_", n=4, expand=True)
coord = coord.drop([0, 1, 4], axis=1)
coord = coord.rename({2: "x", 3: "y"}, axis=1)
coord.loc[:, "x"] = coord.loc[:,"x"].str.replace("x", "", case=False).astype(np.int)
coord.loc[:, "y"] = coord.loc[:,"y"].str.replace("y", "", case=False).astype(np.int)
df.loc[:, "x"] = coord.x.values
df.loc[:, "y"] = coord.y.values
return df
|
{"/build_idc_dataset.py": ["/utilities/build/build_dataset.py"], "/predict.py": ["/utilities/preprocessing/stain_normalization.py", "/utilities/preprocessing/meanpreprocessor.py", "/utilities/preprocessing/hematoxylin_eosin_aug.py", "/utilities/preprocessing/zoom_aug.py", "/utilities/preprocessing/normal_aug.py", "/utilities/io/hdf5datasetgenerator.py", "/utilities/metrics/metrics_for_scoring.py"], "/train_model.py": ["/utilities/preprocessing/meanpreprocessor.py", "/utilities/preprocessing/stain_normalization.py", "/utilities/preprocessing/hematoxylin_eosin_aug.py", "/utilities/preprocessing/zoom_aug.py", "/utilities/preprocessing/normal_aug.py", "/utilities/io/hdf5datasetgenerator.py", "/utilities/nn/neural_network.py", "/utilities/callbacks/poly_decay.py", "/utilities/metrics/metrics_for_compiling.py"]}
|
39,498
|
femiogundare/invasive-ductal-carcinoma-diagnosis
|
refs/heads/main
|
/predict.py
|
# -*- coding: utf-8 -*-
"""
Created on Wed Feb 17 22:17:08 2021
@author: femiogundare
"""
# Import the required packages
import matplotlib
#matplotlib.use('Agg')
from matplotlib import pyplot as plt
import os
import h5py
import progressbar
import json
import numpy as np
import pandas as pd
from imutils import paths
import efficientnet.tfkeras as efn
from sklearn.metrics import roc_auc_score, confusion_matrix, roc_curve, precision_score, recall_score, f1_score
from tensorflow.keras.models import load_model
from utilities.preprocessing.stain_normalization import StainNormalization
from utilities.preprocessing.meanpreprocessor import MeanPreprocessor
from utilities.preprocessing.hematoxylin_eosin_aug import hematoxylin_eosin_aug
from utilities.preprocessing.zoom_aug import zoom_aug
from utilities.preprocessing.normal_aug import normal_aug
from utilities.io.hdf5datasetgenerator import HDF5DatasetGenerator
from utilities.metrics.metrics_for_scoring import *
from utilities.others import plot_confusion_matrix
from config import idc_config as config
# Initialize the Config class in the config script
configs = config.Config()
# Put all the arguments of the argparse in a dictionary by calling the get_config method of the Config class
configs_dict = configs.get_config()
TRAIN_HDF5_PATH = configs_dict['training_hdf5_path']
TEST_HDF5_PATH = configs_dict['test_hdf5_path']
NUM_CLASSES = configs_dict['num_classes']
IMAGE_HEIGHT = configs_dict['image_height']
IMAGE_WIDTH = configs_dict['image_width']
#CROP_IMAGE_HEIGHT = configs_dict['crop_image_height']
#CROP_IMAGE_WIDTH = configs_dict['crop_image_width']
N_CHANNELS = configs_dict['n_channels']
TTA_STEPS = configs_dict['tta_steps']
BATCH_SIZE = configs_dict['batch_size']
NETWORK_NAME = configs_dict['network_name']
AUGMENTATION_TYPE = configs_dict['augmentation_type']
OUTPUT_DIR = configs_dict['output_dir']
DATASET_MEAN_PATH = OUTPUT_DIR + '/idc_dataset_mean.json'
WEIGHTS_PATH = OUTPUT_DIR + '/weights/' + NETWORK_NAME + '.hdf5'
RESULT_DIR = configs_dict['result_dir']
PREDICTIONS_DIR = configs_dict['predictions_dir']
classes_names = ['Non-IDC', 'IDC']
# Get the test labels
print('Obtaining the test labels...')
testLabels = h5py.File(TEST_HDF5_PATH, mode='r')['labels']
testLabels = np.array(testLabels)
# Initialize the preprocessors
print('Initializing the preprocessors...')
#sn = StainNormalization()
means = json.loads(open(DATASET_MEAN_PATH, 'r').read())
mp = MeanPreprocessor(rMean=means['R'], gMean=means['G'], bMean=means['B'])
# Load the pretrained network
print('Loading the model...')
model = load_model(WEIGHTS_PATH, compile=False)
print('Name of model: {}'.format(NETWORK_NAME))
# Select augmentation type to be performed during test-time augmentation
if AUGMENTATION_TYPE=='hematoxylin_eosin':
aug = hematoxylin_eosin_aug
elif AUGMENTATION_TYPE=='zoom_aug':
aug = zoom_aug
elif AUGMENTATION_TYPE=='normal_aug':
aug = normal_aug
# Initialize the test generator (and allow for test time augmentation to be performed)
print('Initializing the test generator...')
testGen = HDF5DatasetGenerator(
TEST_HDF5_PATH, BATCH_SIZE, preprocessors=[mp], aug=aug, n_classes=NUM_CLASSES,
)
"""
# Predict on the test data
print('Predicting on the test data...')
predictions = model.predict_generator(
testGen.generator(), steps=(testGen.numImages//BATCH_SIZE),
max_queue_size=BATCH_SIZE*2
)
"""
# Perform predictions on the test data using test-time augmentation
print(f'Predicting on the test data with TTA of {TTA_STEPS} steps...')
predictions_with_tta = []
for i in range(TTA_STEPS):
print('TTA step {}'.format(i+1))
predictions = model.predict_generator(
testGen.generator(), steps=(testGen.numImages//BATCH_SIZE),
max_queue_size=BATCH_SIZE*2
)
predictions_with_tta.append(predictions)
predictions = (np.array(predictions_with_tta).sum(axis=0)) / TTA_STEPS
# Check the model performance
print('Checking the model performance on the test data...')
conf_matrix = confusion_matrix(testLabels, predictions.argmax(axis=1))
tn, fn, tp, fp = conf_matrix[0][0], conf_matrix[1][0], conf_matrix[1][1], conf_matrix[0][1]
auc = roc_auc_score(testLabels, predictions[:, 1])
sensitivity = tp/(tp+fn)
specificity = tn/(tn+fp)
ppv = tp/(tp+fp)
npv = tn/(tn+fn)
J = (sensitivity + specificity - 1)
print('AUC: {:.4f}'.format(auc))
print('Sensitivity: {:.4f}'.format(sensitivity))
print('Specificity: {:.4f}'.format(specificity))
print('Positive Predictive Value: {:.4f}'.format(ppv))
print('Negative Predictive Value: {:.4f}'.format(npv))
print("Youden's J Statistic: {:.4f}".format(J))
print('Confusion Matrix: \n{}'.format(conf_matrix))
# Store the predictions to a csv file
print('Storing the predictions to csv...')
names_of_images_in_dataset = OUTPUT_DIR + '/names_of_images.json'
names = json.loads(open(names_of_images_in_dataset).read())
names_of_test_images = names['test_names']
df = pd.DataFrame(
dict(
name=names_of_test_images,
label=testLabels,
prediction=predictions[:, 1]
)
)
df.to_csv(PREDICTIONS_DIR+'/'+NETWORK_NAME+'_predictions.csv', index=False)
# Plot the confusion matrix
plt.figure()
plot_confusion_matrix(conf_matrix, classes=classes_names,
title='Confusion matrix')
#plt.show()
plt.savefig(RESULT_DIR + '/'+NETWORK_NAME + '/confusion_matrix.png')
# Plot the ROC Curve and save to png
plt.figure()
fpr, tpr, _ = roc_curve(testLabels, predictions[:, 1], pos_label=1)
plt.style.use('seaborn')
plt.plot(fpr, tpr, color='orange', label='ROC curve (area = %0.4f)' % auc)
plt.plot([0, 1], [0, 1], color='blue', linestyle='--')
plt.title('Receiving Operating Characteristic Curve')
plt.xlabel('False Positive Rate')
plt.ylabel('True Positive Rate')
plt.legend(loc="lower right")
#plt.show()
plt.savefig(RESULT_DIR + '/'+NETWORK_NAME+ '/roc_curve.png')
|
{"/build_idc_dataset.py": ["/utilities/build/build_dataset.py"], "/predict.py": ["/utilities/preprocessing/stain_normalization.py", "/utilities/preprocessing/meanpreprocessor.py", "/utilities/preprocessing/hematoxylin_eosin_aug.py", "/utilities/preprocessing/zoom_aug.py", "/utilities/preprocessing/normal_aug.py", "/utilities/io/hdf5datasetgenerator.py", "/utilities/metrics/metrics_for_scoring.py"], "/train_model.py": ["/utilities/preprocessing/meanpreprocessor.py", "/utilities/preprocessing/stain_normalization.py", "/utilities/preprocessing/hematoxylin_eosin_aug.py", "/utilities/preprocessing/zoom_aug.py", "/utilities/preprocessing/normal_aug.py", "/utilities/io/hdf5datasetgenerator.py", "/utilities/nn/neural_network.py", "/utilities/callbacks/poly_decay.py", "/utilities/metrics/metrics_for_compiling.py"]}
|
39,499
|
femiogundare/invasive-ductal-carcinoma-diagnosis
|
refs/heads/main
|
/ensemble.py
|
# -*- coding: utf-8 -*-
"""
Created on Sat Feb 20 00:27:33 2021
@author: femiogundare
"""
import os
import json
import itertools
import numpy as np
import pandas as pd
import seaborn as sns
from matplotlib import pyplot as plt
from sklearn.metrics import roc_auc_score, roc_curve, confusion_matrix
from utilities.others import plot_confusion_matrix
from config import idc_config as config
# Initialize the Config class in the config script
configs = config.Config()
# Put all the arguments of the argparse in a dictionary by calling the get_config method of the Config class
configs_dict = configs.get_config()
OUTPUT_DIR = configs_dict['output_dir']
RESULT_DIR = configs_dict['result_dir']
PREDICTIONS_DIR = configs_dict['predictions_dir']
classes_names = ['Non-IDC', 'IDC']
# Check the files in the predictions directory
print(os.listdir(PREDICTIONS_DIR))
network_names = ['EfficientNetB3', 'EfficientNetB4', 'EfficientNetB5', 'ResNet50']
# Read the csv file of the predictions made by the neural networks and put them in a list
combined_predictions = [
pd.read_csv(PREDICTIONS_DIR + '/'+ pred_file) for pred_file in os.listdir(PREDICTIONS_DIR)
]
x = np.zeros((len(combined_predictions[0]), len(os.listdir(PREDICTIONS_DIR))))
for k in range(len(os.listdir(PREDICTIONS_DIR))):
x[:, k] = combined_predictions[k].prediction.values
target = combined_predictions[0].label.values
### ENSEMBLE==========
# Compute the average of the predictions of the networks
avg_preds = (x[:, 0] + x[:, 1] + x[:, 2] + x[:, 3])/4
# Compute the AUC of the ensemble
ensemble_auc_score = roc_auc_score(target, avg_preds)
print('AUC: {:.4f}'.format(ensemble_auc_score))
# Compute the Sensitivity, Specificity, PPV, NPV, and J statistic of the ensemble
pred_labels = [1 if pred>=0.5 else 0 for pred in avg_preds]
cnf_matrix = confusion_matrix(target, pred_labels)
tn, fn, tp, fp = cnf_matrix[0][0], cnf_matrix[1][0], cnf_matrix[1][1], cnf_matrix[0][1]
ensemble_sensitivity_score = tp/(tp+fn)
ensemble_specificity_score = tn/(tn+fp)
ensemble_ppv_score = tp/(tp+fp)
ensemble_npv_score = tn/(tn+fn)
ensemble_J_score = (ensemble_sensitivity_score + ensemble_specificity_score - 1)
print('Sensitivity: {:.4f}'.format(ensemble_sensitivity_score))
print('Specificity: {:.4f}'.format(ensemble_specificity_score))
print('Positive Predictive Value: {:.4f}'.format(ensemble_ppv_score))
print('Negative Predictive Value: {:.4f}'.format(ensemble_npv_score))
print("Youden's J statistic: {:.4f}".format(ensemble_J_score))
print('Confusion Matrix: \n{}'.format(cnf_matrix))
# Store the ensemble predictions to a csv file
print('Storing the predictions to csv...')
names_of_images_in_dataset = OUTPUT_DIR + '/names_of_images.json'
names = json.loads(open(names_of_images_in_dataset).read())
names_of_test_images = names['test_names']
df = pd.DataFrame(
dict(
name=names_of_test_images,
label=target,
prediction=avg_preds
)
)
df.to_csv(PREDICTIONS_DIR+ '/ensemble_avg_predictions.csv', index=False)
### COMPOSE A DATAFRAME FOR THE SCORES OF THE NEURAL NETWORKS AND ENSEMBLE
auc_scores = []
sensitivity_scores = []
specificity_scores = []
ppv_scores = []
npv_scores = []
youden_indices = []
for k in range(x.shape[1]):
print('Computing scores for {}...'.format(network_names[k]))
predictions = x[:, k]
prediction_labels = [1 if pred>=0.5 else 0 for pred in predictions]
pred_labels = [1 if pred>=0.5 else 0 for pred in predictions]
cnf_matrix = confusion_matrix(target, prediction_labels)
tn, fn, tp, fp = cnf_matrix[0][0], cnf_matrix[1][0], cnf_matrix[1][1], cnf_matrix[0][1]
sensitivity, specificity = tp/(tp+fn), tn/(tn+fp)
sensitivity_scores.append(round(sensitivity, 4))
specificity_scores.append(round(specificity, 4))
auc_scores.append(round(roc_auc_score(target, predictions), 4))
ppv_scores.append(round(tp/(tp+fp), 4))
npv_scores.append(round(tn/(tn+fn), 4))
youden_indices.append(round(sensitivity+specificity-1, 4))
#auc = roc_auc_score(target, x[:, k])
for k in range(x.shape[1]):
print('{}: Sensitivity = {}, Specificty = {}, AUC = {}, PPV={}, NPV={}, Youden J Index = {}'.format(
network_names[k], round(sensitivity_scores[k], 4),
round(specificity_scores[k], 4), round(auc_scores[k], 4),
round(ppv_scores[k], 4), round(npv_scores[k], 4), round(youden_indices[k], 4)
))
# Store the results in a csv file
results = pd.DataFrame({
'Neural Network' : ['EfficientNetB3', 'EfficientNetB4', 'EfficientNetB5', 'ResNet50', 'Ensemble (Average)'],
'AUC (%)' : [auc_scores[0], auc_scores[1], auc_scores[2], auc_scores[3], round(ensemble_auc_score, 4)],
'Sensitivity (%)' : [sensitivity_scores[0], sensitivity_scores[1], sensitivity_scores[2],
sensitivity_scores[3], round(ensemble_sensitivity_score, 4)
],
'Specificity (%)' : [specificity_scores[0], specificity_scores[1], specificity_scores[2],
specificity_scores[3], round(ensemble_specificity_score, 4)
],
'PPV (%)' : [ppv_scores[0], ppv_scores[1], ppv_scores[2],
ppv_scores[3], round(ensemble_ppv_score, 4)
],
'NPV (%)' : [npv_scores[0], npv_scores[1], npv_scores[2],
npv_scores[3], round(ensemble_npv_score, 4)
],
'J Statistic (%)' : [youden_indices[0], youden_indices[1], youden_indices[2],
youden_indices[3], round(ensemble_J_score, 4)
]
})
results.set_index('Neural Network', drop=True, inplace=True)
results = 100*results
results.to_csv(RESULT_DIR+'/scores.csv')
print(results)
# Plot the confusion matrix of the ensemble
plt.figure()
plot_confusion_matrix(cnf_matrix, classes=classes_names,
title='Confusion matrix')
#plt.show()
plt.savefig(RESULT_DIR + '/ensemble/confusion_matrix.png')
# Plot the ROC Curve of the ensemble and save to png
plt.figure()
fpr, tpr, _ = roc_curve(target, avg_preds, pos_label=1)
plt.style.use('seaborn')
plt.plot(fpr, tpr, color='orange', label='ROC curve (area = %0.4f)' % ensemble_auc_score)
plt.plot([0, 1], [0, 1], color='blue', linestyle='--')
plt.title('Receiving Operating Characteristic Curve')
plt.xlabel('False Positive Rate')
plt.ylabel('True Positive Rate')
plt.legend(loc="lower right")
#plt.show()
plt.savefig(RESULT_DIR + '/ensemble/roc_curve.png')
|
{"/build_idc_dataset.py": ["/utilities/build/build_dataset.py"], "/predict.py": ["/utilities/preprocessing/stain_normalization.py", "/utilities/preprocessing/meanpreprocessor.py", "/utilities/preprocessing/hematoxylin_eosin_aug.py", "/utilities/preprocessing/zoom_aug.py", "/utilities/preprocessing/normal_aug.py", "/utilities/io/hdf5datasetgenerator.py", "/utilities/metrics/metrics_for_scoring.py"], "/train_model.py": ["/utilities/preprocessing/meanpreprocessor.py", "/utilities/preprocessing/stain_normalization.py", "/utilities/preprocessing/hematoxylin_eosin_aug.py", "/utilities/preprocessing/zoom_aug.py", "/utilities/preprocessing/normal_aug.py", "/utilities/io/hdf5datasetgenerator.py", "/utilities/nn/neural_network.py", "/utilities/callbacks/poly_decay.py", "/utilities/metrics/metrics_for_compiling.py"]}
|
39,500
|
femiogundare/invasive-ductal-carcinoma-diagnosis
|
refs/heads/main
|
/utilities/metrics/metrics_for_scoring.py
|
# -*- coding: utf-8 -*-
"""
Created on Fri Feb 5 21:35:47 2021
@author: femiogundare
"""
# Performance metrics
import numpy as np
from sklearn.metrics import confusion_matrix, roc_curve, roc_auc_score
"""
def optimal_threshold(y_true, y_prob):
# Returns the optimal threshold based on the false positive rate, true positive rate, and thresholds.
fpr, tpr, thresholds = roc_curve(y_true, y_prob)
opt_i = np.argmax(tpr - fpr)
return thresholds[opt_i]
"""
def optimal_threshold(y_true, y_prob):
# Returns the optimal threshold based on the false positive rate, true positive rate, and thresholds.
fpr, tpr, thresholds = roc_curve(y_true, y_prob)
J = tpr - fpr
ix = np.argmax(J)
opt_i = thresholds[ix]
return opt_i
def optimal_conf_matrix(y_true, y_prob):
# Returns the optimal confusion matrix based on the optimal threshold.
c = confusion_matrix(y_true, (y_prob > optimal_threshold(y_true, y_prob))*1)
return c
def opt_sensitivity_score(y_true, y_prob):
# Returns the optimal sensitivity score based on the optimal threshold.
c = optimal_conf_matrix(y_true, y_prob)
return c[1][1]/(c[1][1] + c[1][0])
def opt_specificity_score(y_true, y_prob):
# Returns the optimal specificity score based on the optimal threshold.
c = optimal_conf_matrix(y_true, y_prob)
return c[0][0]/(c[0][0] + c[0][1])
def opt_ppv_score(y_true, y_prob):
# Returns the optimal ppv score based on the optimal threshold.
c = optimal_conf_matrix(y_true, y_prob)
return c[1][1]/(c[1][1] + c[0][1])
def opt_npv_score(y_true, y_prob):
# Returns the optimal npv score based on the optimal threshold.
c = optimal_conf_matrix(y_true, y_prob)
return c[0][0]/(c[0][0] + c[1][0])
def opt_J_score(y_true, y_prob):
# Returns the optimal specificity score based on the optimal threshold.
sensitivity = opt_sensitivity_score(y_true, y_prob)
specificity = opt_specificity_score(y_true, y_prob)
return (sensitivity + specificity - 1)
def opt_auc_score(y_true, y_prob):
# Returns the optimal AUC score based on the optimal threshold.
opt_t = optimal_threshold(y_true, y_prob)
y_pred = (y_prob > opt_t)*1
return roc_auc_score(y_true, y_pred)
def opt_threshold_score(y_true, y_prob):
return optimal_threshold(y_true, y_prob)
|
{"/build_idc_dataset.py": ["/utilities/build/build_dataset.py"], "/predict.py": ["/utilities/preprocessing/stain_normalization.py", "/utilities/preprocessing/meanpreprocessor.py", "/utilities/preprocessing/hematoxylin_eosin_aug.py", "/utilities/preprocessing/zoom_aug.py", "/utilities/preprocessing/normal_aug.py", "/utilities/io/hdf5datasetgenerator.py", "/utilities/metrics/metrics_for_scoring.py"], "/train_model.py": ["/utilities/preprocessing/meanpreprocessor.py", "/utilities/preprocessing/stain_normalization.py", "/utilities/preprocessing/hematoxylin_eosin_aug.py", "/utilities/preprocessing/zoom_aug.py", "/utilities/preprocessing/normal_aug.py", "/utilities/io/hdf5datasetgenerator.py", "/utilities/nn/neural_network.py", "/utilities/callbacks/poly_decay.py", "/utilities/metrics/metrics_for_compiling.py"]}
|
39,501
|
femiogundare/invasive-ductal-carcinoma-diagnosis
|
refs/heads/main
|
/utilities/nn/neural_network.py
|
# -*- coding: utf-8 -*-
"""
Created on Fri Feb 5 21:03:04 2021
@author: femiogundare
"""
import efficientnet.tfkeras as efn
from tensorflow.keras import backend as K
from tensorflow.keras.applications import ResNet50, DenseNet121
from tensorflow.keras.models import Model
from tensorflow.keras.regularizers import l2
from tensorflow.keras.layers import Dense, BatchNormalization, Activation, Dropout, GlobalAveragePooling2D, GlobalMaxPooling2D, Flatten, Concatenate
class NeuralNetwork:
"""
Convolutional Neural Network Architecture to train the histopathology images on.
"""
@staticmethod
def build(name, width, height, depth, n_classes, reg=0.8):
"""
Args:
name: name of the network
width: width of the images
height: height of the images
depth: number of channels of the images
reg: regularization value
"""
# If Keras backend is TensorFlow
inputShape = (height, width, depth)
chanDim = -1
# If Keras backend is Theano
if K.image_data_format() == "channels_first":
inputShape = (depth, height, width)
chanDim = 1
# Define the base model architecture
if name=='EfficientNetB0':
base_model = efn.EfficientNetB0(weights='imagenet', include_top=False, input_shape=inputShape)
elif name=='EfficientNetB1':
base_model = efn.EfficientNetB1(weights='imagenet', include_top=False, input_shape=inputShape)
elif name=='EfficientNetB2':
base_model = efn.EfficientNetB2(weights='imagenet', include_top=False, input_shape=inputShape)
elif name=='EfficientNetB3':
base_model = efn.EfficientNetB3(weights='imagenet', include_top=False, input_shape=inputShape)
elif name=='EfficientNetB4':
base_model = efn.EfficientNetB4(weights='imagenet', include_top=False, input_shape=inputShape)
elif name=='EfficientNetB5':
base_model = efn.EfficientNetB5(weights='imagenet', include_top=False, input_shape=inputShape)
elif name=='EfficientNetB6':
base_model = efn.EfficientNetB6(weights='imagenet', include_top=False, input_shape=inputShape)
elif name=='ResNet50':
base_model = ResNet50(weights='imagenet', include_top=False, input_shape=inputShape)
elif name=='DenseNet121':
base_model = DenseNet121(weights='imagenet', include_top=False, input_shape=inputShape)
#x1 = GlobalMaxPooling2D()(base_model.output) # Compute the max pooling of the base model output
#x2 = GlobalAveragePooling2D()(base_model.output) # Compute the average pooling of the base model output
#x3 = Flatten()(base_model.output) # Flatten the base model output
#x = Concatenate(axis=-1)([x1, x2, x3])
x = GlobalAveragePooling2D()(base_model.output)
x = Dropout(0.5)(x)
"""
# First Dense => Relu => BN => DO
fc_layer_1 = Dense(512, kernel_regularizer=l2(reg))(x)
activation_1 = Activation('relu')(fc_layer_1)
batch_norm_1 = BatchNormalization(axis=-1)(activation_1)
dropout_1 = Dropout(0.5)(batch_norm_1)
# First Dense => Relu => BN => DO
fc_layer_2 = Dense(256, kernel_regularizer=l2(reg))(dropout_1)
activation_2 = Activation('relu')(fc_layer_2)
batch_norm_2 = BatchNormalization(axis=-1)(activation_2)
dropout_2 = Dropout(0.5)(batch_norm_2)
# Add the output layer
output = Dense(n_classes, kernel_regularizer=l2(reg), activation='softmax')(dropout_2)
"""
output = Dense(n_classes, kernel_regularizer=l2(reg), activation='softmax')(x)
# Create the model
model = Model(inputs=base_model.inputs, outputs=output)
return model
|
{"/build_idc_dataset.py": ["/utilities/build/build_dataset.py"], "/predict.py": ["/utilities/preprocessing/stain_normalization.py", "/utilities/preprocessing/meanpreprocessor.py", "/utilities/preprocessing/hematoxylin_eosin_aug.py", "/utilities/preprocessing/zoom_aug.py", "/utilities/preprocessing/normal_aug.py", "/utilities/io/hdf5datasetgenerator.py", "/utilities/metrics/metrics_for_scoring.py"], "/train_model.py": ["/utilities/preprocessing/meanpreprocessor.py", "/utilities/preprocessing/stain_normalization.py", "/utilities/preprocessing/hematoxylin_eosin_aug.py", "/utilities/preprocessing/zoom_aug.py", "/utilities/preprocessing/normal_aug.py", "/utilities/io/hdf5datasetgenerator.py", "/utilities/nn/neural_network.py", "/utilities/callbacks/poly_decay.py", "/utilities/metrics/metrics_for_compiling.py"]}
|
39,502
|
femiogundare/invasive-ductal-carcinoma-diagnosis
|
refs/heads/main
|
/utilities/preprocessing/stain_normalization.py
|
# -*- coding: utf-8 -*-
"""
Created on Tue Jan 19 21:52:54 2021
@author: femiogundare
"""
import numpy as np
class StainNormalization:
"""
Adopted and modified from "Classification of breast cancer histology images using Convolutional Neural Networks",
Teresa Araújo , Guilherme Aresta, Eduardo Castro, José Rouco, Paulo Aguiar, Catarina Eloy, António Polónia,
Aurélio Campilho. https://doi.org/10.1371/journal.pone.0177544
Performs staining normalization.
"""
def __init__(self, Io=240, beta=0.15, alpha=1):
# Store the image
self.Io = Io
self.beta = beta
self.alpha = alpha
def preprocess(self, img):
"""
# Arguments
img: Numpy image array.
# Returns
Normalized Numpy image array.
"""
HERef = np.array([[0.5626, 0.2159],
[0.7201, 0.8012],
[0.4062, 0.5581]])
maxCRef = np.array([1.9705, 1.0308])
h, w, c = img.shape
image = img.reshape(h * w, c)
OD = -np.log((image.astype("uint16") + 1) / self.Io)
ODhat = OD[(OD >= self.beta).all(axis=1)]
if (OD >= self.beta).all(axis=1).sum() <= 1:
return img.astype('uint8')
W, V = np.linalg.eig(np.cov(ODhat, rowvar=False))
Vec = -V.T[:2][::-1].T
That = np.dot(ODhat, Vec)
phi = np.arctan2(That[:, 1], That[:, 0])
minPhi = np.percentile(phi, self.alpha)
maxPhi = np.percentile(phi, 100 - self.alpha)
vMin = np.dot(Vec, np.array([np.cos(minPhi), np.sin(minPhi)]))
vMax = np.dot(Vec, np.array([np.cos(maxPhi), np.sin(maxPhi)]))
if vMin[0] > vMax[0]:
HE = np.array([vMin, vMax])
else:
HE = np.array([vMax, vMin])
HE = HE.T
Y = OD.reshape(h * w, c).T
C = np.linalg.lstsq(HE, Y)
maxC = np.percentile(C[0], 99, axis=1)
C = C[0] / maxC[:, None]
C = C * maxCRef[:, None]
Inorm = self.Io * np.exp(-np.dot(HERef, C))
Inorm = Inorm.T.reshape(h, w, c).clip(0, 255).astype("uint8")
return Inorm
"""
def normalize_staining(img):
#Adopted from "Classification of breast cancer histology images using Convolutional Neural Networks",
#Teresa Araújo , Guilherme Aresta, Eduardo Castro, José Rouco, Paulo Aguiar, Catarina Eloy, António Polónia,
#Aurélio Campilho. https://doi.org/10.1371/journal.pone.0177544
#Performs staining normalization.
# Arguments
# img: Numpy image array.
# Returns
# Normalized Numpy image array.
Io = 240
beta = 0.15
alpha = 1
HERef = np.array([[0.5626, 0.2159],
[0.7201, 0.8012],
[0.4062, 0.5581]])
maxCRef = np.array([1.9705, 1.0308])
h, w, c = img.shape
img = img.reshape(h * w, c)
OD = -np.log((img.astype("uint16") + 1) / Io)
ODhat = OD[(OD >= beta).all(axis=1)]
W, V = np.linalg.eig(np.cov(ODhat, rowvar=False))
Vec = -V.T[:2][::-1].T # desnecessario o sinal negativo
That = np.dot(ODhat, Vec)
phi = np.arctan2(That[:, 1], That[:, 0])
minPhi = np.percentile(phi, alpha)
maxPhi = np.percentile(phi, 100 - alpha)
vMin = np.dot(Vec, np.array([np.cos(minPhi), np.sin(minPhi)]))
vMax = np.dot(Vec, np.array([np.cos(maxPhi), np.sin(maxPhi)]))
if vMin[0] > vMax[0]:
HE = np.array([vMin, vMax])
else:
HE = np.array([vMax, vMin])
HE = HE.T
Y = OD.reshape(h * w, c).T
C = np.linalg.lstsq(HE, Y)
maxC = np.percentile(C[0], 99, axis=1)
C = C[0] / maxC[:, None]
C = C * maxCRef[:, None]
Inorm = Io * np.exp(-np.dot(HERef, C))
Inorm = Inorm.T.reshape(h, w, c).clip(0, 255).astype("uint8")
return Inorm
"""
|
{"/build_idc_dataset.py": ["/utilities/build/build_dataset.py"], "/predict.py": ["/utilities/preprocessing/stain_normalization.py", "/utilities/preprocessing/meanpreprocessor.py", "/utilities/preprocessing/hematoxylin_eosin_aug.py", "/utilities/preprocessing/zoom_aug.py", "/utilities/preprocessing/normal_aug.py", "/utilities/io/hdf5datasetgenerator.py", "/utilities/metrics/metrics_for_scoring.py"], "/train_model.py": ["/utilities/preprocessing/meanpreprocessor.py", "/utilities/preprocessing/stain_normalization.py", "/utilities/preprocessing/hematoxylin_eosin_aug.py", "/utilities/preprocessing/zoom_aug.py", "/utilities/preprocessing/normal_aug.py", "/utilities/io/hdf5datasetgenerator.py", "/utilities/nn/neural_network.py", "/utilities/callbacks/poly_decay.py", "/utilities/metrics/metrics_for_compiling.py"]}
|
39,503
|
femiogundare/invasive-ductal-carcinoma-diagnosis
|
refs/heads/main
|
/utilities/preprocessing/hematoxylin_eosin_aug.py
|
# -*- coding: utf-8 -*-
"""
Created on Sat Feb 20 01:51:20 2021
@author: femiogundare
"""
import numpy as np
def hematoxylin_eosin_aug(img, low=0.7, high=1.3, seed=None):
"""
"Quantification of histochemical staining by color deconvolution"
Arnout C. Ruifrok, Ph.D. and Dennis A. Johnston, Ph.D.
http://www.math-info.univ-paris5.fr/~lomn/Data/2017/Color/Quantification_of_histochemical_staining.pdf
Performs random hematoxylin-eosin augmentation
# Arguments
img: Numpy image array.
low: Low boundary for augmentation multiplier
high: High boundary for augmentation multiplier
# Returns
Augmented Numpy image array.
"""
D = np.array([[1.88, -0.07, -0.60],
[-1.02, 1.13, -0.48],
[-0.55, -0.13, 1.57]])
M = np.array([[0.65, 0.70, 0.29],
[0.07, 0.99, 0.11],
[0.27, 0.57, 0.78]])
Io = 240
h, w, c = img.shape
OD = -np.log10((img.astype("uint16") + 1) / Io)
C = np.dot(D, OD.reshape(h * w, c).T).T
r = np.ones(3)
r[:2] = np.random.RandomState(seed).uniform(low=low, high=high, size=2)
img_aug = np.dot(C * r, M)
img_aug = Io * np.exp(-img_aug * np.log(10)) - 1
img_aug = img_aug.reshape(h, w, c).clip(0, 255).astype("uint8")
return img_aug
|
{"/build_idc_dataset.py": ["/utilities/build/build_dataset.py"], "/predict.py": ["/utilities/preprocessing/stain_normalization.py", "/utilities/preprocessing/meanpreprocessor.py", "/utilities/preprocessing/hematoxylin_eosin_aug.py", "/utilities/preprocessing/zoom_aug.py", "/utilities/preprocessing/normal_aug.py", "/utilities/io/hdf5datasetgenerator.py", "/utilities/metrics/metrics_for_scoring.py"], "/train_model.py": ["/utilities/preprocessing/meanpreprocessor.py", "/utilities/preprocessing/stain_normalization.py", "/utilities/preprocessing/hematoxylin_eosin_aug.py", "/utilities/preprocessing/zoom_aug.py", "/utilities/preprocessing/normal_aug.py", "/utilities/io/hdf5datasetgenerator.py", "/utilities/nn/neural_network.py", "/utilities/callbacks/poly_decay.py", "/utilities/metrics/metrics_for_compiling.py"]}
|
39,504
|
femiogundare/invasive-ductal-carcinoma-diagnosis
|
refs/heads/main
|
/train_model.py
|
# -*- coding: utf-8 -*-
"""
Created on Wed Feb 17 18:34:39 2021
@author: femiogundare
"""
# Import the required libraries and packages
import matplotlib
matplotlib.use('Agg')
from matplotlib import pyplot as plt
import warnings
warnings.filterwarnings('ignore')
import os
import sys
import h5py
import json
import numpy as np
#import albumentations as A
#from albumentations import Compose, RandomRotate90, Transpose, Flip, OneOf, CLAHE, IAASharpen, IAAEmboss, RandomBrightnessContrast, JpegCompression, Blur, GaussNoise, HueSaturationValue, ShiftScaleRotate, Normalize
import tensorflow as tf
from tensorflow.keras.optimizers import Adam, SGD, RMSprop
from tensorflow.keras.metrics import AUC
from tensorflow.keras.losses import BinaryCrossentropy
from tensorflow.keras.callbacks import ModelCheckpoint, LearningRateScheduler, TerminateOnNaN, EarlyStopping, ReduceLROnPlateau
from utilities.preprocessing.simplepreprocessor import SimplePreprocessor
from utilities.preprocessing.imagetoarraypreprocessor import ImageToArrayPreprocessor
from utilities.preprocessing.meanpreprocessor import MeanPreprocessor
from utilities.preprocessing.stain_normalization import StainNormalization
from utilities.preprocessing.hematoxylin_eosin_aug import hematoxylin_eosin_aug
from utilities.preprocessing.zoom_aug import zoom_aug
from utilities.preprocessing.normal_aug import normal_aug
from utilities.io.hdf5datasetgenerator import HDF5DatasetGenerator
from utilities.nn.neural_network import NeuralNetwork
from utilities.callbacks.poly_decay import poly_decay
from utilities.callbacks.cyclical_learning_rate import CyclicLR
from utilities.callbacks.training_monitor import TrainingMonitor
from utilities.metrics.metrics_for_compiling import sensitivity, specificity
from config import idc_config as config
# Initialize the Config class in the config script
configs = config.Config()
# Put all the arguments of the argparse in a dictionary by calling the get_config method of the Config class
configs_dict = configs.get_config()
# Load the supplied arguments from the config file
print('Loading the supplied arguments from the config file...')
TRAIN_HDF5_PATH = configs_dict['training_hdf5_path']
VAL_HDF5_PATH = configs_dict['validation_hdf5_path']
NUM_CLASSES = configs_dict['num_classes']
IMAGE_HEIGHT = configs_dict['image_height']
IMAGE_WIDTH = configs_dict['image_width']
N_CHANNELS = configs_dict['n_channels']
MIN_LR = configs_dict['min_lr']
MAX_LR = configs_dict['max_lr']
BATCH_SIZE = configs_dict['batch_size']
STEP_SIZE = configs_dict['step_size']
CLR_METHOD = configs_dict['clr_method'] #default is 'Traingular'
NUM_EPOCHS = configs_dict['n_epochs']
FACTOR = configs_dict['factor']
PATIENCE = configs_dict['patience']
NETWORK_NAME = configs_dict['network_name']
AUGMENTATION_TYPE = configs_dict['augmentation_type']
OPTIMIZER = configs_dict['optimizer']
if OPTIMIZER=='Adam':
OPTIMIZER = Adam(learning_rate=MAX_LR)
elif OPTIMIZER=='SGD':
OPTIMIZER = SGD(learning_rate=MAX_LR, momentum=0.9, nesterov=True)
elif OPTIMIZER=='RMSprop':
OPTIMIZER = RMSprop(learning_rate=MAX_LR)
else:
print('Specified optimizer not allowed for this task')
sys.exit(-1)
OUTPUT_DIR = configs_dict['output_dir']
DATASET_MEAN_PATH = OUTPUT_DIR + '/idc_dataset_mean.json'
CLASS_WEIGHTS_PATH = OUTPUT_DIR + '/idc_dataset_class_weight.json'
WEIGHTS_PATH = OUTPUT_DIR + '/weights/' + NETWORK_NAME + '.hdf5'
PLOT_PATH = OUTPUT_DIR + '/plots/' + NETWORK_NAME + '.png'
MODEL_PATH = OUTPUT_DIR + '/models/' + NETWORK_NAME + '.hdf5'
MONITOR_DIR = OUTPUT_DIR + '/monitor/' + NETWORK_NAME
MONITOR_PLOTS_PATH = MONITOR_DIR + '/fig_path'
MONITOR_JSON_PATH = MONITOR_DIR + '/json_path'
# Initialize the preprocessors
sp = SimplePreprocessor(width=IMAGE_WIDTH, height=IMAGE_HEIGHT)
itap = ImageToArrayPreprocessor()
sn = StainNormalization()
means = json.loads(open(DATASET_MEAN_PATH, 'r').read())
mp = MeanPreprocessor(rMean=means['R'], gMean=means['G'], bMean=means['B'])
#calculate the frequencies of the idc and non idc classes in the training labels
#the calculated frequencies will serve as the class weights in the generator function
print('Computing the frequencies and weights of each class...')
trainLabels = h5py.File(TRAIN_HDF5_PATH, mode='r')['labels']
trainLabels = np.array(trainLabels)
train_idc_freq = trainLabels.sum(axis=0)/trainLabels.shape[0]
train_non_idc_freq = 1-train_idc_freq
train_idc_weight, train_non_idc_weight = train_non_idc_freq, train_idc_freq
print(f'The IDC class and the Non-IDC class have weights of {train_idc_weight} and {train_non_idc_weight} respectively in the training set')
# Serialize the weights to a json file
print('Serializing the weights to json...')
dic = {'train_idc_weight' : train_idc_weight, 'train_non_idc_weight' : train_non_idc_weight}
f = open(CLASS_WEIGHTS_PATH, 'w')
f.write(json.dumps(dic))
f.close()
# Select augmentation type
if AUGMENTATION_TYPE=='hematoxylin_eosin':
aug = hematoxylin_eosin_aug
elif AUGMENTATION_TYPE=='zoom_aug':
aug = zoom_aug
elif AUGMENTATION_TYPE=='normal_aug':
aug = normal_aug
#initialize the training and validation dataset generators
print('Initializing the training and validation generators...')
trainGen = HDF5DatasetGenerator(
dbPath=TRAIN_HDF5_PATH, batchSize=BATCH_SIZE,
preprocessors=[sp, mp, itap], aug=aug, n_classes=NUM_CLASSES,
)
valGen = HDF5DatasetGenerator(
dbPath=VAL_HDF5_PATH, batchSize=BATCH_SIZE,
preprocessors=[sp, mp, itap], n_classes=NUM_CLASSES,
)
print('Model: {}'.format(NETWORK_NAME))
# Initialize and compile the model
print('Compiling the model...')
metrics = [sensitivity, specificity, AUC()]
model = NeuralNetwork.build(name=NETWORK_NAME, width=IMAGE_WIDTH, height=IMAGE_HEIGHT, depth=N_CHANNELS,
n_classes=NUM_CLASSES
)
print(model.summary())
model.compile(loss=BinaryCrossentropy(label_smoothing=0.1),
optimizer=OPTIMIZER,
metrics=metrics
)
# Initialize the list of callbacks
print('Initializing the list of callbacks...')
print('1. Learning Rate')
print(f'Learning rate to be reduced by a factor of {FACTOR} if loss does not decrease in {PATIENCE} epochs')
lr_schedule = ReduceLROnPlateau(monitor='val_loss',
factor=FACTOR,
patience=PATIENCE,
verbose=1,
mode='auto',
min_lr=MIN_LR,)
"""
if NETWORK_NAME in ['EfficientNetB3', 'EfficientNetB4', 'EfficientNetB5']:
# Set learning rate schudule to Cyclic Learning Rate
print(f'Using {CLR_METHOD} with a minimum learning rate of {MIN_LR}, maximum learning rate of {MAX_LR} and step size of {STEP_SIZE}')
lr_schedule = CyclicLR(
mode=CLR_METHOD,
base_lr=MIN_LR,
max_lr=MAX_LR,
step_size= STEP_SIZE * (trainGen.numImages // BATCH_SIZE)
)
else:
# Reduce learning rate on plateau
print(f'Learning rate to be reduced by a factor of {FACTOR} if loss does not decrease in {PATIENCE} epochs')
lr_schedule = ReduceLROnPlateau(monitor='val_loss',
factor=FACTOR,
patience=PATIENCE,
verbose=1,
mode='auto',
min_lr=MIN_LR,)
"""
print('2. Model Checkpoint')
model_checkpoint = ModelCheckpoint(WEIGHTS_PATH, monitor='val_loss', mode='min', save_best_only=True, verbose=1)
print('3. Training Monitor')
print("[INFO process ID: {}]".format(os.getpid()))
figPath = os.path.sep.join([MONITOR_PLOTS_PATH, "{}.png".format(os.getpid())])
jsonPath = os.path.sep.join([MONITOR_JSON_PATH, "{}.json".format(os.getpid())])
training_monitor = TrainingMonitor(fig_path=figPath, json_path=jsonPath)
print('4. Terminate on NaN')
terminate_on_nan = TerminateOnNaN()
print('5. Early Stopping')
early_stopping = EarlyStopping(monitor='val_loss', patience=4, mode='min', verbose=1, restore_best_weights=True)
callbacks = [lr_schedule, training_monitor, terminate_on_nan, early_stopping, model_checkpoint]
# Check to see if a GPU is available for training or not
print('GPU is', 'Available' if tf.test.is_gpu_available() else 'Not Available')
# Train the model
print('Training the model...')
H = model.fit_generator(generator=trainGen.generator(),
steps_per_epoch=trainGen.numImages//BATCH_SIZE,
validation_data=valGen.generator(),
validation_steps=valGen.numImages//BATCH_SIZE,
epochs=NUM_EPOCHS, callbacks=callbacks, verbose=1,
class_weight={0:train_non_idc_weight, 1:train_idc_weight}
)
print('Serializing the model...')
model.save(MODEL_PATH, overwrite=True)
# Close the HDF5 datasets
trainGen.close()
valGen.close()
# Loss and AUC Curves for the trained model
plt.figure()
plt.plot(np.arange(0, len(H.history['auc'])), H.history['auc'], '-o', label='Train AUC', color='#ff7f0e')
plt.plot(np.arange(0, len(H.history['val_auc'])), H.history['val_auc'], '-o', label='Val AUC', color='#1f77b4')
x = np.argmax( H.history['val_auc'] ); y = np.max( H.history['val_auc'] )
xdist = plt.xlim()[1] - plt.xlim()[0]; ydist = plt.ylim()[1] - plt.ylim()[0]
plt.scatter(x,y,s=200,color='#1f77b4'); plt.text(x-0.03*xdist,y-0.13*ydist,'max auc\n%.2f'%y,size=14)
plt.ylabel('AUC',size=14); plt.xlabel('Epoch',size=14)
plt.legend(loc=2)
plt2 = plt.gca().twinx()
plt2.plot(np.arange(0, len(H.history['loss'])), H.history['loss'], '-o', label='Train Loss', color='#2ca02c')
plt2.plot(np.arange(0, len(H.history['val_loss'])), H.history['val_loss'], '-o', label='Val Loss', color='#d62728')
x = np.argmin( H.history['val_loss'] ); y = np.min( H.history['val_loss'] )
ydist = plt.ylim()[1] - plt.ylim()[0]
plt.scatter(x,y,s=200,color='#d62728'); plt.text(x-0.03*xdist,y+0.05*ydist,'min loss',size=14)
plt.ylabel('Loss',size=14)
plt.title(f'Training AUC and Loss Curves ({NETWORK_NAME})',size=18)
plt.legend(loc=3)
plt.show()
plt.savefig(PLOT_PATH)
|
{"/build_idc_dataset.py": ["/utilities/build/build_dataset.py"], "/predict.py": ["/utilities/preprocessing/stain_normalization.py", "/utilities/preprocessing/meanpreprocessor.py", "/utilities/preprocessing/hematoxylin_eosin_aug.py", "/utilities/preprocessing/zoom_aug.py", "/utilities/preprocessing/normal_aug.py", "/utilities/io/hdf5datasetgenerator.py", "/utilities/metrics/metrics_for_scoring.py"], "/train_model.py": ["/utilities/preprocessing/meanpreprocessor.py", "/utilities/preprocessing/stain_normalization.py", "/utilities/preprocessing/hematoxylin_eosin_aug.py", "/utilities/preprocessing/zoom_aug.py", "/utilities/preprocessing/normal_aug.py", "/utilities/io/hdf5datasetgenerator.py", "/utilities/nn/neural_network.py", "/utilities/callbacks/poly_decay.py", "/utilities/metrics/metrics_for_compiling.py"]}
|
39,505
|
femiogundare/invasive-ductal-carcinoma-diagnosis
|
refs/heads/main
|
/utilities/callbacks/poly_decay.py
|
# -*- coding: utf-8 -*-
"""
Created on Wed Feb 17 20:03:24 2021
@author: femiogundare
"""
from config import idc_config as config
configs = config.Config()
configs_dict = configs.get_config()
NUM_EPOCHS = configs_dict['n_epochs']
INIT_LR = configs_dict['max_lr']
def poly_decay(epoch):
"""Polynomial Learning Rate Decay"""
# Initialize the maximum number of epochs, base learning rate, and power of the polynomial
maxEpochs = NUM_EPOCHS
baseLR = INIT_LR
power = 2.0
# Compute the new learning rate based on polynomial decay
alpha = baseLR * (1 - (epoch / float(maxEpochs))) ** power
return alpha
|
{"/build_idc_dataset.py": ["/utilities/build/build_dataset.py"], "/predict.py": ["/utilities/preprocessing/stain_normalization.py", "/utilities/preprocessing/meanpreprocessor.py", "/utilities/preprocessing/hematoxylin_eosin_aug.py", "/utilities/preprocessing/zoom_aug.py", "/utilities/preprocessing/normal_aug.py", "/utilities/io/hdf5datasetgenerator.py", "/utilities/metrics/metrics_for_scoring.py"], "/train_model.py": ["/utilities/preprocessing/meanpreprocessor.py", "/utilities/preprocessing/stain_normalization.py", "/utilities/preprocessing/hematoxylin_eosin_aug.py", "/utilities/preprocessing/zoom_aug.py", "/utilities/preprocessing/normal_aug.py", "/utilities/io/hdf5datasetgenerator.py", "/utilities/nn/neural_network.py", "/utilities/callbacks/poly_decay.py", "/utilities/metrics/metrics_for_compiling.py"]}
|
39,506
|
femiogundare/invasive-ductal-carcinoma-diagnosis
|
refs/heads/main
|
/config/idc_config.py
|
# -*- coding: utf-8 -*-
"""
Created on Tue Jan 19 20:06:04 2021
@author: femiogundare
"""
import sys
import argparse
class Config:
"""Config
Attributes:
parser: to read all config
args: argument from argument parser
config: save config in pairs like key:value
"""
def __init__(self):
"""Load common and customized settings
"""
super(Config, self).__init__()
self.parser = argparse.ArgumentParser(description='Skin Cancer Classification')
self.config = {}
# add setting via parser
self._add_common_setting()
self._add_customized_setting()
# get argument parser
self.args = self.parser.parse_args()
# load them into config
self._load_common_setting()
self._load_customized_setting()
def _add_common_setting(self):
# Need be defined each time
# define the data directory --- BASEPATH
self.parser.add_argument(
'--base_path', default='C:\\Users\\Dell\\Desktop\\CV Projects\\Invasive Ductal Carcinoma Diagnosis/data', type=str, help='data directory'
)
# define the path to the images
self.parser.add_argument(
'--images_dir', default='C:\\Users\\Dell\\Desktop\\CV Projects\\Invasive Ductal Carcinoma Diagnosis\\data/IDC_regular_ps50_idx5', type=str, help='path to the images'
)
# define the path to the training hdf5 dataset
self.parser.add_argument(
'--training_hdf5_path', default='C:\\Users\\Dell\\Desktop\\CV Projects\\Invasive Ductal Carcinoma Diagnosis\\data\\hdf5/train.hdf5', type=str,
help='path to the training hdf5 dataset'
)
# define the path to the validation hdf5 dataset
self.parser.add_argument(
'--validation_hdf5_path', default='C:\\Users\\Dell\\Desktop\\CV Projects\\Invasive Ductal Carcinoma Diagnosis\\data\\hdf5/val.hdf5', type=str,
help='path to the validation hdf5 dataset'
)
# define the path to the test hdf5 dataset
self.parser.add_argument(
'--test_hdf5_path', default='C:\\Users\\Dell\\Desktop\\CV Projects\\Invasive Ductal Carcinoma Diagnosis\\data\\hdf5/test.hdf5', type=str,
help='path to the test hdf5 dataset'
)
# define the path to the output directory
self.parser.add_argument(
'--output_dir', default='C:\\Users\\Dell\\Desktop\\CV Projects\\Invasive Ductal Carcinoma Diagnosis/output', type=str, help='path to the outputs'
)
# define the path to the result directory
self.parser.add_argument(
'--result_dir', default='C:\\Users\\Dell\\Desktop\\CV Projects\\Invasive Ductal Carcinoma Diagnosis/result', type=str, help='path to the results'
)
# define the path to the predictions directory
self.parser.add_argument(
'--predictions_dir', default='C:\\Users\\Dell\\Desktop\\CV Projects\\Invasive Ductal Carcinoma Diagnosis/predictions', type=str, help='path to the predictions'
)
# Hyper parameters
self.parser.add_argument('--lr_type', default='Cyclical', type=str,
help="learning rate schedule", choices=['Cyclical', 'Fixed', 'Decayed'])
self.parser.add_argument('--min_lr', default=0.000001, type=float,
help="minimum learning rate")
self.parser.add_argument('--max_lr', default=0.0006, type=float,
help="maximum learning rate")
self.parser.add_argument('--clr_method', default='triangular', type=str,
choices=['triangular', 'triangular2', 'exp_range'],
help="cyclic learning rate method(traingular, triangular2, exp_range)"
)
self.parser.add_argument('--step_size', default=8, type=int,
choices=[i for i in range(2, 9)],
help="step size for cyclic learning rate (2-8)"
)
self.parser.add_argument("--batch_size", default=8, type=int,
help="batch size per epoch")
self.parser.add_argument("--n_epochs", default=25, type=int,
help="#epochs to train the network on")
self.parser.add_argument('--random_seed', default=47, type=int,
help='desired radom state for numpy and other packages')
self.parser.add_argument('--optimizer', default='Adam', type=str,
choices=['Adam', 'SGD', 'RMSprop'],
help="optimizer to used (use 'Adam', 'SGD' or 'RMSprop')"
)
self.parser.add_argument('--network_name', default='ResNet50', type=str,
choices=[
'EfficientNetB3', 'EfficientNetB4', 'EfficientNetB5',
'ResNet50'
],
help="name of neural network to be used"
)
self.parser.add_argument("--factor", default=0.25, type=float,
help='factor to reduce learning rate by')
self.parser.add_argument("--patience", default=3, type=int,
help='patience')
# Input images related
self.parser.add_argument("--image_height", default=50, type=int,
help="image height")
self.parser.add_argument("--image_width", default=50, type=int,
help="image width")
#self.parser.add_argument("--crop_image_height", default=45, type=int,
# help="crop image height")
#self.parser.add_argument("--crop_image_width", default=45, type=int,
# help="crop image width")
self.parser.add_argument("--n_channels", default=3, type=int,
help="number of channels of the images")
self.parser.add_argument("--augmentation_type", default='normal_aug', type=str,
choices=['hematoxylin_eosin', 'zoom', 'normal_aug'],
help="type of augmentation")
self.parser.add_argument("--tta_steps", default=25, type=int,
help="number of test time augmentation steps")
def _add_customized_setting(self):
"""Add customized setting
"""
# define the number of classes to be trained on
self.parser.add_argument(
'--num_classes', default=2, type=int,
help='#classes to train on'
)
def _load_common_setting(self):
"""Load default setting from Parser
"""
# Directories and network types
self.config['base_path'] = self.args.base_path
self.config['images_dir'] = self.args.images_dir
self.config['output_dir'] = self.args.output_dir
self.config['result_dir'] = self.args.result_dir
self.config['predictions_dir'] = self.args.predictions_dir
self.config['training_hdf5_path'] = self.args.training_hdf5_path
self.config['validation_hdf5_path'] = self.args.validation_hdf5_path
self.config['test_hdf5_path'] = self.args.test_hdf5_path
# Hyperparameters
self.config['lr_type'] = self.args.lr_type
self.config['min_lr'] = self.args.min_lr
self.config['max_lr'] = self.args.max_lr
self.config['clr_method'] = self.args.clr_method
self.config['step_size'] = self.args.step_size
self.config['batch_size'] = self.args.batch_size
self.config['n_epochs'] = self.args.n_epochs
self.config['random_seed'] = self.args.random_seed
self.config['optimizer'] = self.args.optimizer
self.config['network_name'] = self.args.network_name
self.config['factor'] = self.args.factor
self.config['patience'] = self.args.patience
# Input images related
self.config['image_height'] = self.args.image_height
self.config['image_width'] = self.args.image_width
#self.config['crop_image_height'] = self.args.crop_image_height
#self.config['crop_image_width'] = self.args.crop_image_width
self.config['n_channels'] = self.args.n_channels
self.config['augmentation_type'] = self.args.augmentation_type
self.config['tta_steps'] = self.args.tta_steps
def _load_customized_setting(self):
"""Load sepcial setting
"""
self.config['num_classes'] = self.args.num_classes
def get_config(self):
"""return config
"""
return self.config
|
{"/build_idc_dataset.py": ["/utilities/build/build_dataset.py"], "/predict.py": ["/utilities/preprocessing/stain_normalization.py", "/utilities/preprocessing/meanpreprocessor.py", "/utilities/preprocessing/hematoxylin_eosin_aug.py", "/utilities/preprocessing/zoom_aug.py", "/utilities/preprocessing/normal_aug.py", "/utilities/io/hdf5datasetgenerator.py", "/utilities/metrics/metrics_for_scoring.py"], "/train_model.py": ["/utilities/preprocessing/meanpreprocessor.py", "/utilities/preprocessing/stain_normalization.py", "/utilities/preprocessing/hematoxylin_eosin_aug.py", "/utilities/preprocessing/zoom_aug.py", "/utilities/preprocessing/normal_aug.py", "/utilities/io/hdf5datasetgenerator.py", "/utilities/nn/neural_network.py", "/utilities/callbacks/poly_decay.py", "/utilities/metrics/metrics_for_compiling.py"]}
|
39,507
|
femiogundare/invasive-ductal-carcinoma-diagnosis
|
refs/heads/main
|
/utilities/preprocessing/meanpreprocessor.py
|
# -*- coding: utf-8 -*-
"""
Created on Thu Apr 30 22:30:36 2020
@author: femiogundare
"""
import cv2
class MeanPreprocessor:
def __init__(self, rMean, gMean, bMean):
#store the R, G, B means across the training set
self.rMean = rMean
self.gMean = gMean
self.bMean = bMean
def preprocess(self, image):
#split the image into its resppective Red, Blue and Green channels
B, G, R = cv2.split(image.astype("float32"))
#subtract the means for each channels
B -= self.bMean
G -= self.gMean
R -= self.rMean
#merge the channels back and return the image
return cv2.merge([B, G, R])
|
{"/build_idc_dataset.py": ["/utilities/build/build_dataset.py"], "/predict.py": ["/utilities/preprocessing/stain_normalization.py", "/utilities/preprocessing/meanpreprocessor.py", "/utilities/preprocessing/hematoxylin_eosin_aug.py", "/utilities/preprocessing/zoom_aug.py", "/utilities/preprocessing/normal_aug.py", "/utilities/io/hdf5datasetgenerator.py", "/utilities/metrics/metrics_for_scoring.py"], "/train_model.py": ["/utilities/preprocessing/meanpreprocessor.py", "/utilities/preprocessing/stain_normalization.py", "/utilities/preprocessing/hematoxylin_eosin_aug.py", "/utilities/preprocessing/zoom_aug.py", "/utilities/preprocessing/normal_aug.py", "/utilities/io/hdf5datasetgenerator.py", "/utilities/nn/neural_network.py", "/utilities/callbacks/poly_decay.py", "/utilities/metrics/metrics_for_compiling.py"]}
|
39,508
|
femiogundare/invasive-ductal-carcinoma-diagnosis
|
refs/heads/main
|
/utilities/io/hdf5datasetgenerator.py
|
# -*- coding: utf-8 -*-
"""
Created on Sat Mar 28 10:40:06 2020
@author: femiogundare
"""
import h5py
import numpy as np
from tensorflow.keras.utils import to_categorical
class HDF5DatasetGenerator:
def __init__(self, dbPath, batchSize, preprocessors=None, aug=None, binarize=True, n_classes=2):
#store the variables
self.batchSize = batchSize
self.preprocessors = preprocessors
self.aug = aug
self.binarize = binarize
self.n_classes = n_classes
#open the HDF5 database and ccheck for the total number of images in the database
self.db = h5py.File(name=dbPath, mode='r')
self.numImages = self.db['labels'].shape[0]
def generator(self, passes=np.inf):
epochs = 0
#loop infinitely; the model will stop when the desired epoch is reached
while epochs < passes:
#loop over and generate images in batches
for i in np.arange(0, self.numImages, self.batchSize):
images = self.db['images'][i : i + self.batchSize]
labels = self.db['labels'][i : i + self.batchSize]
#check whether or not the labels should be binarized
if self.binarize:
labels = to_categorical(labels, self.n_classes)
#check whether or not any preprocessing should be done to the images
if self.preprocessors is not None:
#initialize a list of processed images
procImages = []
#loop over the images
for image in images:
#loop over the preprocessors and apply each to the image
for p in self.preprocessors:
image = p.preprocess(image)
#update the list of the processed image
procImages.append(image)
#convert the processed images to array
images = np.array(procImages)
#if data augmentation is to be applied
if self.aug is not None:
images = np.stack([self.aug(image=image)['image'] for image in images], axis=0)
yield images, np.array(labels)
epochs += 1
def close(self):
#close the database
self.db.close()
|
{"/build_idc_dataset.py": ["/utilities/build/build_dataset.py"], "/predict.py": ["/utilities/preprocessing/stain_normalization.py", "/utilities/preprocessing/meanpreprocessor.py", "/utilities/preprocessing/hematoxylin_eosin_aug.py", "/utilities/preprocessing/zoom_aug.py", "/utilities/preprocessing/normal_aug.py", "/utilities/io/hdf5datasetgenerator.py", "/utilities/metrics/metrics_for_scoring.py"], "/train_model.py": ["/utilities/preprocessing/meanpreprocessor.py", "/utilities/preprocessing/stain_normalization.py", "/utilities/preprocessing/hematoxylin_eosin_aug.py", "/utilities/preprocessing/zoom_aug.py", "/utilities/preprocessing/normal_aug.py", "/utilities/io/hdf5datasetgenerator.py", "/utilities/nn/neural_network.py", "/utilities/callbacks/poly_decay.py", "/utilities/metrics/metrics_for_compiling.py"]}
|
39,509
|
femiogundare/invasive-ductal-carcinoma-diagnosis
|
refs/heads/main
|
/utilities/preprocessing/zoom_aug.py
|
# -*- coding: utf-8 -*-
"""
Created on Sat Feb 20 01:53:05 2021
@author: femiogundare
"""
import numpy as np
import cv2
def zoom_aug(img, zoom_var=1.5, seed=None):
"""Performs a random spatial zoom of a Numpy image array.
# Arguments
img: Numpy image array.
zoom_var: zoom range multiplier for width and height.
seed: Random seed.
# Returns
Zoomed Numpy image array.
"""
scale = np.random.RandomState(seed).uniform(low=1 / zoom_var, high=zoom_var)
resized_img = cv2.resize(img, None, fx=scale, fy=scale, interpolation=cv2.INTER_CUBIC)
return resized_img
|
{"/build_idc_dataset.py": ["/utilities/build/build_dataset.py"], "/predict.py": ["/utilities/preprocessing/stain_normalization.py", "/utilities/preprocessing/meanpreprocessor.py", "/utilities/preprocessing/hematoxylin_eosin_aug.py", "/utilities/preprocessing/zoom_aug.py", "/utilities/preprocessing/normal_aug.py", "/utilities/io/hdf5datasetgenerator.py", "/utilities/metrics/metrics_for_scoring.py"], "/train_model.py": ["/utilities/preprocessing/meanpreprocessor.py", "/utilities/preprocessing/stain_normalization.py", "/utilities/preprocessing/hematoxylin_eosin_aug.py", "/utilities/preprocessing/zoom_aug.py", "/utilities/preprocessing/normal_aug.py", "/utilities/io/hdf5datasetgenerator.py", "/utilities/nn/neural_network.py", "/utilities/callbacks/poly_decay.py", "/utilities/metrics/metrics_for_compiling.py"]}
|
39,510
|
femiogundare/invasive-ductal-carcinoma-diagnosis
|
refs/heads/main
|
/utilities/metrics/metrics_for_compiling.py
|
# -*- coding: utf-8 -*-
"""
Created on Fri Feb 5 21:31:40 2021
@author: femiogundare
"""
from tensorflow.keras import backend as K
def sensitivity(y_true, y_pred):
true_positives = K.sum(K.round(K.clip(y_true * y_pred, 0, 1)))
possible_positives = K.sum(K.round(K.clip(y_true, 0, 1)))
return true_positives / (possible_positives + K.epsilon())
def specificity(y_true, y_pred):
true_negatives = K.sum(K.round(K.clip((1-y_true) * (1-y_pred), 0, 1)))
possible_negatives = K.sum(K.round(K.clip(1-y_true, 0, 1)))
return true_negatives / (possible_negatives + K.epsilon())
|
{"/build_idc_dataset.py": ["/utilities/build/build_dataset.py"], "/predict.py": ["/utilities/preprocessing/stain_normalization.py", "/utilities/preprocessing/meanpreprocessor.py", "/utilities/preprocessing/hematoxylin_eosin_aug.py", "/utilities/preprocessing/zoom_aug.py", "/utilities/preprocessing/normal_aug.py", "/utilities/io/hdf5datasetgenerator.py", "/utilities/metrics/metrics_for_scoring.py"], "/train_model.py": ["/utilities/preprocessing/meanpreprocessor.py", "/utilities/preprocessing/stain_normalization.py", "/utilities/preprocessing/hematoxylin_eosin_aug.py", "/utilities/preprocessing/zoom_aug.py", "/utilities/preprocessing/normal_aug.py", "/utilities/io/hdf5datasetgenerator.py", "/utilities/nn/neural_network.py", "/utilities/callbacks/poly_decay.py", "/utilities/metrics/metrics_for_compiling.py"]}
|
39,512
|
blavad/soccer
|
refs/heads/master
|
/soccer/discrete_soccer/discrete_soccer_env.py
|
"""
Discret soccer game.
"""
import soccer
import math
import os
import gym
from gym import spaces, logger
from gym.utils import seeding
import numpy as np
import cv2
from soccer.core import Team1, Team2
class DiscreteSoccerEnv(gym.Env):
"""
Description:
Soccer game.
Observation:
Type: Discrete(NbAgent*(Width x Height)**NbAgent)
Actions:
Type: Discrete(5)
Num Action
0 Do nothing
1 Front
2 Back
3 Left
4 Right
"""
metadata = {
'render.modes': ['human', 'rgb_array'],
'video.frames_per_second': 50
}
actions = [
'none',
'front',
'back',
'left',
'right'
]
obs_types = ['integer',
'matrix']
score = np.array([0,0])
l_bound = 100
def __init__(self, width_field=5, height_field=4, height_goal=None, nb_pl_team1=1, nb_pl_team2=1, obs_type='integer'):
DiscreteSoccerEnv.score = np.array([0,0])
# Field parameters
self.w_field = width_field
self.h_field = height_field
self.h_goal = self.h_field//2 if height_goal is None else height_goal
self.goal_pos = (self.h_field//2 - self.h_goal//2, self.h_field//2 + (self.h_goal-self.h_goal//2))
self.field = np.zeros((self.h_field, self.w_field))
# Dimensions
self.width = width_field*DiscreteSoccerEnv.l_bound
self.height = height_field*DiscreteSoccerEnv.l_bound
# Players parameters
self.team = [Team1(nb_pl_team1).init_config(self.w_field, self.h_field), Team2(nb_pl_team2).init_config(self.w_field, self.h_field)]
self.all_players[np.random.randint(self.n_players)].has_ball=True
self.update_field()
# Autres parametres d etats
assert obs_type in DiscreteSoccerEnv.obs_types
self.obs_type = obs_type
self.done_flag = False
self.action_space = spaces.Discrete(len(DiscreteSoccerEnv.actions))
if obs_type is 'integer':
self.observation_space = spaces.Discrete(self.state_space)
else :
self.observation_space = spaces.Box(low=0, high=1, shape=(3, self.h_field, self.w_field),
dtype=np.uint8)
self.init_assets()
self.viewer = None
def init_assets(self):
c = DiscreteSoccerEnv.l_bound
u_j1 = os.path.join(os.path.dirname(soccer.__file__),'discrete_soccer/assets/j1.png')
u_j1b = os.path.join(os.path.dirname(soccer.__file__),'discrete_soccer/assets/j1_ball.png')
u_j2 = os.path.join(os.path.dirname(soccer.__file__),'discrete_soccer/assets/j2.png')
u_j2b = os.path.join(os.path.dirname(soccer.__file__),'discrete_soccer/assets/j2_ball.png')
self.j1 = cv2.cvtColor(cv2.resize(cv2.imread(u_j1), (c,c)), cv2.COLOR_BGR2RGB)
self.j1_ball = cv2.cvtColor(cv2.resize(cv2.imread(u_j1b), (c,c)), cv2.COLOR_BGR2RGB)
self.j2 = cv2.cvtColor(cv2.resize(cv2.imread(u_j2), (c,c)), cv2.COLOR_BGR2RGB)
self.j2_ball = cv2.cvtColor(cv2.resize(cv2.imread(u_j2b), (c,c)), cv2.COLOR_BGR2RGB)
@property
def state(self):
if self.obs_type is 'integer':
return self.calculate_int_state()
else:
return self.map_state()
@property
def team1(self):
return self.team[0]
@property
def team2(self):
return self.team[1]
@property
def n_players(self):
return len(self.team1) + len(self.team2)
@property
def all_players(self):
return self.team1.player + self.team2.player
@property
def state_space(self):
return (self.n_players)*(self.w_field*self.h_field)**(self.n_players)
def pl_state(self, i):
pl_pos = self.all_players[i].pos
return pl_pos[0] + self.h_field * pl_pos[1]
def calculate_int_state(self):
coef = (self.w_field*self.h_field)**np.arange(self.n_players)
pos_pl = np.array([self.pl_state(i) for i in range(self.n_players)])
tmp_state = sum(coef*pos_pl)
for i, pl in enumerate(self.all_players):
if pl.has_ball:
tmp_state += i * (self.w_field*self.h_field)**(self.n_players)
break
return tmp_state
def reset(self):
self.team[0] = self.team[0].init_config(self.w_field, self.h_field)
self.team[1] = self.team[1].init_config(self.w_field, self.h_field)
self.all_players[np.random.randint(self.n_players)].has_ball=True
self.done_flag = False
self.update_field()
return [self.state]*self.n_players
def step(self, actions):
action = []
try :
actions = list(actions)
except TypeError :
actions = [actions]
for act in actions:
assert self.action_space.contains(act), "%r (%s) invalid" % (act, type(act))
action += [DiscreteSoccerEnv.actions[act]]
if len(action) < self.n_players:
action += 'none'*(self.n_players- len(action))
rew, done = self.reward(action)
self.update_state(action)
self.update_field()
return [self.state]*self.n_players, rew, done, {}
def new_pos(self, player, action):
l_pos = list(player.pos)
if isinstance(player.team, Team1):
l_pos[1] += 1 if action=='front' and l_pos[1]+1 < self.w_field else 0
l_pos[1] -= 1 if action=='back' and l_pos[1] > 0 else 0
l_pos[0] += 1 if action=='right' and l_pos[0]+1 < self.h_field else 0
l_pos[0] -= 1 if action=='left' and l_pos[0] > 0 else 0
if isinstance(player.team, Team2):
l_pos[1] += 1 if action=='back' and l_pos[1]+1 < self.w_field else 0
l_pos[1] -= 1 if action=='front' and l_pos[1] > 0 else 0
l_pos[0] += 1 if action=='left' and l_pos[0]+1 < self.h_field else 0
l_pos[0] -= 1 if action=='right' and l_pos[0] > 0 else 0
return tuple(l_pos)
def reward(self, action):
rew_team1 = 0
rew_team2 = 0
for pl, act in list(zip(self.all_players, action)):
but = self.buuut(pl, act)
if but != [0,0]:
self.done_flag = True
DiscreteSoccerEnv.score += but
rew_team1 = rew_team1 + (but[0] - but[1])*1
rew_team2 = rew_team2 + (but[1] - but[0])*1
# rew_team1 += int(self.team1.has_ball) - int(self.team2.has_ball)
# rew_team2 += int(self.team2.has_ball) - int(self.team1.has_ball)
rew = [rew_team1]*len(self.team1) + [rew_team2]*len(self.team2)
done = [self.done_flag]*self.n_players
return rew, done
def buuut(self, pl, action):
if action=='front':
if isinstance(pl.team, Team1) and pl.has_ball and pl.pos[1]+1 >= self.w_field and pl.pos[0] >= self.goal_pos[0] and pl.pos[0] < self.goal_pos[1]:
pl.has_ball = False
return [1,0]
if isinstance(pl.team, Team2) and pl.has_ball and pl.pos[1] < 1 and pl.pos[0] >= self.goal_pos[0] and pl.pos[0] < self.goal_pos[1]:
pl.has_ball = False
return [0,1]
return [0,0]
def update_field(self):
self.field = np.zeros((self.h_field, self.w_field))
for i, pl in enumerate(self.all_players):
self.field[pl.pos] = 10*(i+1) if pl.has_ball else i+1
return self.field
def update_state(self, actions):
for i, (pl, act) in enumerate(list(zip(self.all_players, actions))):
# print(pl.pos, ' - ', act)
pl.pos = self.new_pos(pl, act)
if pl.pos == pl.old_pos:
actions[i] = 'none'
conflit = {}
for pl, act in list(zip(self.all_players, actions)):
if pl.pos in conflit.keys():
conflit[pl.pos] += [[pl,act]]
else:
conflit[pl.pos] = [[pl,act]]
self.gere_conflits(conflit)
# print('Conflits avant step ',conflit)
for p in self.all_players:
p.old_pos = p.pos
def gere_conflits(self, conflit):
# Update major conflicts
for conf_pos, conf_pl in conflit.items():
if len(conf_pl) >1:
if not 'none' in list(zip(*conf_pl))[1]:
num_pl = int(len(conf_pl)*np.random.random())
for i, p in enumerate(conf_pl):
if i != num_pl:
p[0].pos = p[0].old_pos
if p[0].has_ball:
p[0].has_ball = False
conf_pl[num_pl][0].has_ball = True
for conf_pos, conf_pl in conflit.items():
if len(conf_pl) >1:
if 'none' in list(zip(*conf_pl))[1]:
pl_stay = list(zip(*conf_pl))[1].index('none')
for i, p in enumerate(conf_pl):
if i != pl_stay:
p[0].pos = p[0].old_pos
if p[0].has_ball:
keep_ball = np.random.random()<0.7
p[0].has_ball = keep_ball
conf_pl[pl_stay][0].has_ball = not keep_ball
elif conf_pl[pl_stay][0].has_ball:
keep_ball = np.random.random()<0.3
conf_pl[pl_stay][0].has_ball = keep_ball
p[0].has_ball = not keep_ball
########## RENDER PART ##############
def render(self, mode='human'):
if mode == 'human':
return self.render_human(mode)
elif mode == 'rbg_array':
return self.render_rgb_array()
return self.render_array()
def render_human(self, mode='human'):
from gym.envs.classic_control import rendering
if self.viewer is None:
self.viewer = rendering.SimpleImageViewer()
return self.viewer.imshow(self.render(mode='rbg_array'))
def render_array(self):
print(self.field)
def render_rgb_array(self):
color = np.array([50,150,50])
return np.concatenate((self.renderGame(color_background=color), self.renderInfos(score=DiscreteSoccerEnv.score,color_background=color-50)), axis=0)
def renderGame(self, color_background=[50,200,50]):
img = np.full(
(self.height, self.width, 3),
255,
dtype=np.uint8,
)
img[:,:,:3] = color_background
for p in self.all_players:
img = self.draw_player(img, p)
img = self.draw_goal(img)
return img
def renderInfos(self, score=None, color_background=[50,200,200]):
height = self.width//6
infosImg = np.full(
(height, self.width, 3),
255,
dtype=np.uint8,
)
infosImg[:,:,:3] = color_background
return self.displayInfos(infosImg, score)
def close(self):
if self.viewer:
self.viewer.close()
self.viewer = None
def displayInfos(self, img, score):
font = cv2.FONT_HERSHEY_SIMPLEX
color = (0,0,0)
cv2.putText(img, "Blue {} - {} Red".format(DiscreteSoccerEnv.score[0],DiscreteSoccerEnv.score[1]), (2*self.width//7, self.width//10), font, min(1., 0.2*self.w_field), color, 1, cv2.LINE_AA)
return img
def draw_goal(self, img):
ep = max(4, DiscreteSoccerEnv.l_bound//10)
y_deb = self.goal_pos[0]*DiscreteSoccerEnv.l_bound
y_fin = self.goal_pos[1]*DiscreteSoccerEnv.l_bound
but1_img = np.zeros((y_fin-y_deb, ep, 3))[:,:,:3] = [50,50,150]
but2_img = np.zeros((y_fin-y_deb, ep, 3))[:,:,:3] = [150,50,50]
img[y_deb:y_fin, 0:ep] = but1_img
img[y_deb:y_fin, self.width-ep:] = but2_img
return img
def draw_player(self, img, p):
x_offset = p.pos[1]*DiscreteSoccerEnv.l_bound
y_offset = p.pos[0]*DiscreteSoccerEnv.l_bound
if isinstance(p.team, Team1):
player_img = self.j1 if not p.has_ball else self.j1_ball
if isinstance(p.team, Team2):
player_img = self.j2 if not p.has_ball else self.j2_ball
img[y_offset:y_offset+player_img.shape[0], x_offset:x_offset+player_img.shape[1]] = player_img
return img
def map_state(self):
tmp_state = np.zeros((3, self.h_field, self.w_field))
for pl in self.team1.player:
tmp_state[1, pl.pos[0],pl.pos[1]] = 1
if pl.has_ball:
tmp_state[0,pl.pos[0],pl.pos[1]] = 1
for pl in self.team2.player:
tmp_state[2, pl.pos[0],pl.pos[1]] = 1
if pl.has_ball:
tmp_state[0, pl.pos[0],pl.pos[1]] = 1
return tmp_state
|
{"/soccer/discrete_soccer/discrete_soccer_env.py": ["/soccer/__init__.py", "/soccer/core.py"], "/soccer/continuous_soccer/continuous_soccer.py": ["/soccer/__init__.py", "/soccer/core.py"], "/soccer/base_soccer.py": ["/soccer/__init__.py", "/soccer/core.py"], "/soccer/__init__.py": ["/soccer/base_soccer.py", "/soccer/discrete_soccer/__init__.py", "/soccer/continuous_soccer/__init__.py"], "/soccer/discrete_soccer/__init__.py": ["/soccer/discrete_soccer/discrete_soccer_env.py"], "/soccer/continuous_soccer/__init__.py": ["/soccer/continuous_soccer/continuous_soccer.py"]}
|
39,513
|
blavad/soccer
|
refs/heads/master
|
/soccer/core.py
|
class Team(object):
def __init__(self, nb_players=1):
self.player = [Player(self) for i in range(nb_players)]
def __len__(self):
return len(self.player)
def init_config(self, w, h, size_pl=20, type_config="discrete"):
for i, pl in enumerate(self.player):
pl.has_ball = False
pl.pos = self._config(w,h,size_pl)[type_config][i]
return self
@property
def has_ball(self):
for pl in self.player:
if pl.has_ball:
return True
return False
class Team1(Team):
def __init__(self, nb_players=1):
super(Team1, self).__init__(nb_players)
def _config(self, w, h, size_pl):
return {"discrete": [(h//2,0), (0,0), (h-1,0)],
"continuous": [(h//2 - int(0.5*size_pl),int(2*size_pl)), (int(0.1*size_pl),int(0.5*size_pl)), (h-int(1.1*size_pl),int(0.5*size_pl))]}
class Team2(Team):
def __init__(self, nb_players=1):
super(Team2, self).__init__(nb_players)
def _config(self, w, h, size_pl):
return {"discrete": [(h//2,w-1), (0,w-1), (h-1,w-1)],
"continuous": [(h//2 - int(0.5*size_pl), w-int(2.36*size_pl)), (int(0.1*size_pl), w-int(0.86*size_pl)), (h-int(1.1*size_pl), w-int(0.86*size_pl))]
}
class Player(object):
def __init__(self, team, x=0, y=0):
self.has_ball = False
self.pos = (x,y)
self.old_pos = self.pos
self.team = team
@property
def x(self):
return self.pos[0]
|
{"/soccer/discrete_soccer/discrete_soccer_env.py": ["/soccer/__init__.py", "/soccer/core.py"], "/soccer/continuous_soccer/continuous_soccer.py": ["/soccer/__init__.py", "/soccer/core.py"], "/soccer/base_soccer.py": ["/soccer/__init__.py", "/soccer/core.py"], "/soccer/__init__.py": ["/soccer/base_soccer.py", "/soccer/discrete_soccer/__init__.py", "/soccer/continuous_soccer/__init__.py"], "/soccer/discrete_soccer/__init__.py": ["/soccer/discrete_soccer/discrete_soccer_env.py"], "/soccer/continuous_soccer/__init__.py": ["/soccer/continuous_soccer/continuous_soccer.py"]}
|
39,514
|
blavad/soccer
|
refs/heads/master
|
/soccer/continuous_soccer/continuous_soccer.py
|
"""
Discret soccer game.
"""
import os
import cv2
import math
import numpy as np
import gym
from gym import spaces, logger
from gym.utils import seeding
import soccer
from soccer import BaseSoccerEnv
from soccer.core import Team1, Team2
class ContinuousSoccerEnv(BaseSoccerEnv):
"""
Description:
Continuous soccer game.
Observation:
Type: Box((5+2*nb_player,))
Num Observation
Actions:
Type: Discrete(5)
Num Action
0 Do nothing
1 Front
2 Back
3 Left
4 Right
"""
metadata = {
'render.modes': ['human', 'rgb_array'],
'video.frames_per_second': 50
}
actions = [
'none',
'front',
'back',
'left',
'right'
]
act_types = ['discrete',
'continuous']
obs_types = ['positions',
'image']
def __init__(self, width_field=500, height_field=300, height_goal=None, nb_pl_team1=1, nb_pl_team2=1, act_type='discrete', obs_type='positions'):
BaseSoccerEnv.__init__(self, width=width_field, height=height_field,height_goal=height_goal,nb_pl_team1=nb_pl_team1,nb_pl_team2=nb_pl_team2, type_config="continuous")
# Ball
self.ball_pos = [np.random.randint(self.size_ball, self.height-self.size_ball), self.width//2-self.size_ball//2]
# Autres parametres d etats
assert act_type in ContinuousSoccerEnv.act_types
self.act_type = act_type
assert obs_type in ContinuousSoccerEnv.obs_types
self.obs_type = obs_type
self.speed_pl = 8
self.ep_goal = max(4, self.width//50)
self.frein = 0.2
self.velocity_ball = [0,0]
self.action_space = spaces.Discrete(len(ContinuousSoccerEnv.actions))
if obs_type is 'positions':
self.observation_space = spaces.Box(low=-1, high=1, shape=(1, 5+2*(self.n_players-1)))
else :
self.observation_space = spaces.Box(low=-1, high=1, shape=(3, 64, 64))
if self.act_type is 'discrete':
self.action_space = spaces.Discrete(len(ContinuousSoccerEnv.actions))
else :
self.observation_space = spaces.Box(low=0, high=1, shape=(len(ContinuousSoccerEnv.actions)))
def init_assets(self):
u_j1 = os.path.join(os.path.dirname(soccer.__file__),'assets/j1_t.png')
u_j2 = os.path.join(os.path.dirname(soccer.__file__),'assets/j2_t.png')
u_ball = os.path.join(os.path.dirname(soccer.__file__),'assets/ball.png')
self.j1 = cv2.cvtColor(cv2.resize(cv2.imread(u_j1, cv2.IMREAD_UNCHANGED), (self.size_player_w,self.size_player)), cv2.COLOR_BGRA2RGBA)
self.j2 = cv2.cvtColor(cv2.resize(cv2.imread(u_j2, cv2.IMREAD_UNCHANGED), (self.size_player_w,self.size_player)), cv2.COLOR_BGRA2RGBA)
self.ball = cv2.cvtColor(cv2.resize(cv2.imread(u_ball, cv2.IMREAD_UNCHANGED), (self.size_ball,self.size_ball)), cv2.COLOR_BGRA2RGBA)
def diff_pos(self, pos_ref, pos_comp):
diff = np.array(list(pos_comp)) - np.array(list(pos_ref))
return tuple(diff)
def reset(self):
self.team[0] = self.team[0].init_config(self.w_field, self.h_field, size_pl=self.size_player, type_config=self.type_config)
self.team[1] = self.team[1].init_config(self.w_field, self.h_field, size_pl=self.size_player, type_config=self.type_config)
self.done_flag = False
self.ball_pos = [np.random.randint(self.size_ball, self.height-self.size_ball), self.width//2-self.size_ball//2]
self.velocity_ball = [0,0]
return self.state
@property
def state(self):
if self.obs_type is "positions":
states = []
for me in self.all_players:
obs = []#[me.pos[0]/self.height, me.pos[1]/self.width]
b0 = self.diff_pos(me.pos, self.ball_pos)[0]/self.height if me.team is self.team1 else self.diff_pos(self.ball_pos, me.pos)[0]/self.height
b1 = self.diff_pos(me.pos, self.ball_pos)[1]/self.width if me.team is self.team1 else self.diff_pos(self.ball_pos, me.pos)[1]/self.width
g0 = self.diff_pos(me.pos, (self.goal_pos[0]+self.h_goal//2, self.width))[0]/self.height if me.team is self.team1 else self.diff_pos((self.goal_pos[0]+self.h_goal//2, 0), me.pos)[0]/self.height
g1 = self.diff_pos(me.pos, (self.goal_pos[0]+self.h_goal//2, self.width))[1]/self.width if me.team is self.team1 else self.diff_pos((self.goal_pos[0]+self.h_goal//2, 0), me.pos)[1]/self.width
# my_g0 = self.diff_pos(me.pos, (self.goal_pos[0]+self.h_goal//2, self.width))[0]/self.height if me.team is self.team1 else self.diff_pos((self.goal_pos[0]+self.h_goal//2, 0), me.pos)[0]/self.height
my_g1 = self.diff_pos(me.pos, (self.goal_pos[0]+self.h_goal//2, 0))[1]/self.width if me.team is self.team1 else self.diff_pos((self.goal_pos[0]+self.h_goal//2, self.width), me.pos)[1]/self.width
obs += [b0,b1,g0,g1, my_g1]
for pl_w_me in me.team.player:
if pl_w_me is not me:
o0 = self.diff_pos(me.pos, pl_w_me.pos)[0]/self.height if me.team is self.team1 else self.diff_pos(pl_w_me.pos, me.pos)[0]/self.height
o1 = self.diff_pos(me.pos, pl_w_me.pos)[1]/self.width if me.team is self.team1 else self.diff_pos(pl_w_me.pos, me.pos)[1]/self.width
obs += [o0,o1]
for _, pl in enumerate(self.all_players):
if pl.team is not me.team:
o0 = self.diff_pos(me.pos, pl.pos)[0]/self.height if me.team is self.team1 else self.diff_pos(pl.pos, me.pos)[0]/self.height
o1 = self.diff_pos(me.pos, pl.pos)[1]/self.width if me.team is self.team1 else self.diff_pos(pl.pos, me.pos)[1]/self.width
obs += [o0,o1]
states += [obs]
return states
else:
return self.renderGame()
def new_pos(self, player, action):
l_pos = list(player.pos)
if isinstance(player.team, Team1):
l_pos[1] += self.speed_pl if action=='front' and l_pos[1] + self.size_player_w + self.speed_pl < self.width else 0
l_pos[1] -= self.speed_pl if action=='back' and l_pos[1] - self.speed_pl > 0 else 0
l_pos[0] += self.speed_pl if action=='right' and l_pos[0] + self.speed_pl + self.size_player < self.height else 0
l_pos[0] -= self.speed_pl if action=='left' and l_pos[0] - self.speed_pl > 0 else 0
if isinstance(player.team, Team2):
l_pos[1] += self.speed_pl if action=='back' and l_pos[1] + self.size_player_w + self.speed_pl < self.width else 0
l_pos[1] -= self.speed_pl if action=='front' and l_pos[1] - self.speed_pl > 0 else 0
l_pos[0] += self.speed_pl if action=='left' and l_pos[0] + self.speed_pl + self.size_player < self.height else 0
l_pos[0] -= self.speed_pl if action=='right' and l_pos[0] - self.speed_pl > 0 else 0
return tuple(l_pos)
def reward(self, action=None):
rew_team1 = 0
rew_team2 = 0
but = self.buuut()
if but != [0,0]:
self.done_flag = True
self.score += but
rew_team1 = rew_team1 + (but[0] - but[1]) *1
rew_team2 = rew_team2 + (but[1] - but[0]) *1
rew_team1 -= (self.width - self.ball_pos[1])/self.width
rew_team2 -= (self.ball_pos[1])/self.width
rew = [rew_team1]*len(self.team1) + [rew_team2]*len(self.team2)
done = [self.done_flag]*self.n_players
return rew, done
def buuut(self):
if self.ball_pos[1]+self.size_ball >= self.width-self.ep_goal and self.ball_pos[0] >= self.goal_pos[0] and self.ball_pos[0] < self.goal_pos[1]:
return [1,0]
if self.ball_pos[1] <= self.ep_goal and self.ball_pos[0] >= self.goal_pos[0] and self.ball_pos[0] < self.goal_pos[1]:
return [0,1]
return [0,0]
def update_field(self):
pass
def collision_pl(self, p1,p2):
return (p1.pos[1] < p2.pos[1] + self.size_player_w and
p1.pos[1] + self.size_player_w > p2.pos[1] and
p1.pos[0] < p2.pos[0] + self.size_player and
p1.pos[0] + self.size_player > p2.pos[0])
def collision_ball(self, pl):
return (self.ball_pos[1] < pl.pos[1] + self.size_player_w and
self.ball_pos[1] + self.size_ball > pl.pos[1] and
self.ball_pos[0] < pl.pos[0] + self.size_player and
self.ball_pos[0] + self.size_ball > pl.pos[0])
def gere_conflits(self, p1, p2):
# p1 vers la droite
if p1.pos[1] - p1.old_pos[1] > 0:
# p2 vers la gauche
if p2.pos[1] - p2.old_pos[1] < 0:
p1.pos = p1.old_pos
p2.pos = p2.old_pos
# p2 vers le haut
elif p2.pos[0] - p2.old_pos[0] < 0:
if p2.old_pos[0] < p1.pos[0]+self.size_player:
p2.pos = (p2.pos[0], p1.pos[1]+self.size_player_w)
else:
p1.pos = (p2.pos[0]-self.size_player, p1.pos[1])
# p2 vers le bas
elif p2.pos[0] - p2.old_pos[0] > 0 :
if p2.old_pos[0]+self.size_player > p1.pos[0]:
p2.pos = (p2.pos[0], p1.pos[1]+self.size_player_w)
else:
p1.pos = (p2.pos[0]+self.size_player, p1.pos[1])
# p2 ne bouge pas
elif p2.pos[0] - p2.old_pos[0] == 0 and p2.pos[1] - p2.old_pos[1] == 0:
p2.pos = (p2.pos[0], p1.pos[1]+self.size_player_w)
# p1 vers le haut
elif p1.pos[0] - p1.old_pos[0] < 0:
# p2 vers le bas
if p2.pos[0] - p2.old_pos[0] > 0:
p1.pos = p1.old_pos
p2.pos = p2.old_pos
# p2 vers droite, gauche ou rien
if p1.old_pos[0] > p2.pos[0]+self.size_player:
p2.pos = (p1.pos[0]-self.size_player, p2.pos[1])
# p1 vers le bas
elif p1.pos[0] - p1.old_pos[0] < 0:
# p2 vers le haut
if p2.pos[0] - p2.old_pos[0] < 0:
p1.pos = p1.old_pos
p2.pos = p2.old_pos
# p2 vers droite, gauche ou rien
if p1.old_pos[0] < p2.pos[0]-self.size_player:
p2.pos = (p1.pos[0]+self.size_player, p2.pos[1])
# p1 vers la gauche
elif p1.pos[1] - p1.old_pos[1] < 0:
# p2 vers le haut
if p2.pos[0] - p2.old_pos[0] < 0:
p2.pos = (p2.pos[0], p1.pos[1]-self.size_player_w)
# p2 vers le bas
if p2.pos[0] - p2.old_pos[0] > 0:
p2.pos = (p2.pos[0], p1.pos[1]-self.size_player_w)
# p2 ne bouge pas
elif p2.pos[0] - p2.old_pos[0] == 0 and p2.pos[1] - p2.old_pos[1] == 0:
p2.pos = (p2.pos[0], p1.pos[1] - self.size_player_w)
def gere_conflits_ball(self, pl):
vel = [0,0]
# p1 vers la droite
if pl.pos[1] - pl.old_pos[1] > 0:
vel = [0, self.speed_pl*2]
# p1 vers la gauche
if pl.pos[1] - pl.old_pos[1] < 0:
vel = [0,-self.speed_pl*2]
# p1 vers la bas
if pl.pos[0] - pl.old_pos[0] > 0:
vel = [self.speed_pl*2,0]
# p1 vers la droite
if pl.pos[0] - pl.old_pos[0] < 0:
vel = [ -self.speed_pl*2,0]
self.velocity_ball[0] += vel[0]
self.velocity_ball[1] += vel[1]
def is_valide_pos(self, pos0, pos1, w, h):
return pos0>0 and pos0+h<self.height and pos1>0 and pos1+w<self.width
def update_state(self, actions):
for i, (pl, act) in enumerate(list(zip(self.all_players, actions))):
pl.pos = self.new_pos(pl, act)
if pl.pos == pl.old_pos:
actions[i] = 'none'
for pl1, act1 in list(zip(self.all_players, actions)):
for pl2, act2 in list(zip(self.all_players, actions)):
if pl1 is not pl2:
if self.collision_pl(pl1,pl2):
self.gere_conflits(pl1,pl2)
self.velocity_ball[0] = int(self.frein * self.velocity_ball[0])
self.velocity_ball[1] = int(self.frein * self.velocity_ball[1])
for pl in self.all_players:
if self.collision_ball(pl):
self.velocity_ball = [0,0]
self.gere_conflits_ball(pl)
self.ball_pos[0] += self.velocity_ball[0] if self.is_valide_pos(self.ball_pos[0]+self.velocity_ball[0], self.ball_pos[1],self.size_ball, self.size_ball) else 0
if self.is_valide_pos(self.ball_pos[0], self.ball_pos[1]+self.velocity_ball[1],self.size_ball, self.size_ball):
self.ball_pos[1] += self.velocity_ball[1]
else :
if self.ball_pos[1]+self.velocity_ball[1] < 0 :
self.ball_pos[1] = self.ep_goal//2
else:
self.ball_pos[1] = self.width - self.size_ball - self.ep_goal//2
for p in self.all_players:
if self.is_valide_pos(p.pos[0], p.pos[1], self.size_player_w, self.size_player):
p.old_pos = p.pos
else:
p.pos = p.old_pos
def draw_goal(self, img):
ep = self.ep_goal
y_deb = self.goal_pos[0]
y_fin = self.goal_pos[1]
but1_img = np.zeros((y_fin-y_deb, ep, 4))[:,:,:4] = [50,50,150,255]
but2_img = np.zeros((y_fin-y_deb, ep, 4))[:,:,:4] = [150,50,50,255]
img[y_deb:y_fin, 0:ep] = but1_img
img[y_deb:y_fin, self.width-ep:] = but2_img
return img
def draw_ball(self, img):
x_offset = self.ball_pos[1]
y_offset = self.ball_pos[0]
ball_img = self.ball
ind = np.where(ball_img[:,:,3]>250)
img[ind[0]+y_offset, ind[1]+x_offset] = ball_img[ind]
return img
def draw_player(self, img, p):
x_offset = p.pos[1]
y_offset = p.pos[0]
player_img = self.j1 if isinstance(p.team, Team1) else self.j2
ind = np.where(player_img[:,:,3]>250)
img[ind[0]+y_offset, ind[1]+x_offset] = player_img[ind]
return img
|
{"/soccer/discrete_soccer/discrete_soccer_env.py": ["/soccer/__init__.py", "/soccer/core.py"], "/soccer/continuous_soccer/continuous_soccer.py": ["/soccer/__init__.py", "/soccer/core.py"], "/soccer/base_soccer.py": ["/soccer/__init__.py", "/soccer/core.py"], "/soccer/__init__.py": ["/soccer/base_soccer.py", "/soccer/discrete_soccer/__init__.py", "/soccer/continuous_soccer/__init__.py"], "/soccer/discrete_soccer/__init__.py": ["/soccer/discrete_soccer/discrete_soccer_env.py"], "/soccer/continuous_soccer/__init__.py": ["/soccer/continuous_soccer/continuous_soccer.py"]}
|
39,515
|
blavad/soccer
|
refs/heads/master
|
/soccer/base_soccer.py
|
"""
Base class of soccer games.
"""
import os
import cv2
import math
import numpy as np
import gym
from gym import spaces, logger
from gym.utils import seeding
import soccer
from soccer.core import Team1, Team2
class BaseSoccerEnv(gym.Env):
def __init__(self, width=300, height=200, height_goal=None, nb_pl_team1=1, nb_pl_team2=1, type_config="discrete"):
self.score = np.array([0,0])
# Field parameters
self.width = width
self.height = height
self.w_field = width
self.h_field = height
self.h_goal = self.h_field//2 if height_goal is None else height_goal
self.goal_pos = (self.h_field//2 - self.h_goal//2, self.h_field//2 + (self.h_goal-self.h_goal//2))
self.type_config = type_config
# Players parameters
self.size_player = min(width//5, height//5)
self.size_player_w = int(self.size_player*0.36)
self.team = [Team1(nb_pl_team1).init_config(self.w_field, self.h_field, size_pl=self.size_player, type_config=self.type_config), Team2(nb_pl_team2).init_config(self.w_field, self.h_field, size_pl=self.size_player, type_config=self.type_config)]
self.all_players[np.random.randint(self.n_players)].has_ball=True
self.update_field()
self.size_ball = self.size_player//3
self.done_flag = False
self.init_assets()
self.viewer = None
@property
def team1(self):
return self.team[0]
@property
def team2(self):
return self.team[1]
@property
def n_players(self):
return len(self.team1) + len(self.team2)
@property
def all_players(self):
return self.team1.player + self.team2.player
def step(self, actions):
action = []
try :
actions = list(actions)
except TypeError :
actions = [actions]
for act in actions:
assert self.action_space.contains(act), "%r (%s) invalid" % (act, type(act))
action += [self.__class__.actions[act]]
self.update_state(action)
rew, done = self.reward(action)
self.update_field()
return self.state, rew, done, {}
########## RENDER PART ##############
def render(self, mode='human'):
if mode == 'human':
return self.render_human(mode)
elif mode == 'rbg_array':
return self.render_rgb_array()
return self.render_array()
def render_human(self, mode='human'):
from gym.envs.classic_control import rendering
if self.viewer is None:
self.viewer = rendering.SimpleImageViewer()
return self.viewer.imshow(self.render(mode='rbg_array')[:,:,:3])
def render_array(self):
print(self.field)
def render_rgb_array(self):
color = np.array([50,150,50])
return np.concatenate((self.renderGame(color_background=color), self.renderInfos(score=self.score, color_background=color-50)), axis=0)
def renderGame(self, color_background=[50,200,50]):
img = np.full(
(self.height, self.width, 4),
255,
dtype=np.uint8,
)
img[:,:,:3] = color_background
for p in self.all_players:
img = self.draw_player(img, p)
img = self.draw_ball(img)
img = self.draw_goal(img)
return img
def renderInfos(self, score=None, color_background=[50,200,200]):
height = self.width//6
infosImg = np.full(
(height, self.width, 4),
255,
dtype=np.uint8,
)
infosImg[:,:,:3] = color_background
return self.displayInfos(infosImg, score)
def close(self):
if self.viewer:
self.viewer.close()
self.viewer = None
def displayInfos(self, img, score):
font = cv2.FONT_HERSHEY_SIMPLEX
color = (0,0,0)
cv2.putText(img, "Blue {} - {} Red".format(self.score[0],self.score[1]), (2*self.width//7, self.width//10), font, min(1., 0.2*self.w_field), color, 1, cv2.LINE_AA)
return img
|
{"/soccer/discrete_soccer/discrete_soccer_env.py": ["/soccer/__init__.py", "/soccer/core.py"], "/soccer/continuous_soccer/continuous_soccer.py": ["/soccer/__init__.py", "/soccer/core.py"], "/soccer/base_soccer.py": ["/soccer/__init__.py", "/soccer/core.py"], "/soccer/__init__.py": ["/soccer/base_soccer.py", "/soccer/discrete_soccer/__init__.py", "/soccer/continuous_soccer/__init__.py"], "/soccer/discrete_soccer/__init__.py": ["/soccer/discrete_soccer/discrete_soccer_env.py"], "/soccer/continuous_soccer/__init__.py": ["/soccer/continuous_soccer/continuous_soccer.py"]}
|
39,516
|
blavad/soccer
|
refs/heads/master
|
/setup.py
|
#!/usr/bin/env python
import imp
from setuptools import setup, find_packages
setup(
name='soccer',
version='0.1.0',
packages=find_packages(),
install_requires=['pyglet', 'gym'],
)
|
{"/soccer/discrete_soccer/discrete_soccer_env.py": ["/soccer/__init__.py", "/soccer/core.py"], "/soccer/continuous_soccer/continuous_soccer.py": ["/soccer/__init__.py", "/soccer/core.py"], "/soccer/base_soccer.py": ["/soccer/__init__.py", "/soccer/core.py"], "/soccer/__init__.py": ["/soccer/base_soccer.py", "/soccer/discrete_soccer/__init__.py", "/soccer/continuous_soccer/__init__.py"], "/soccer/discrete_soccer/__init__.py": ["/soccer/discrete_soccer/discrete_soccer_env.py"], "/soccer/continuous_soccer/__init__.py": ["/soccer/continuous_soccer/continuous_soccer.py"]}
|
39,517
|
blavad/soccer
|
refs/heads/master
|
/soccer/__init__.py
|
from soccer.base_soccer import BaseSoccerEnv
from soccer.discrete_soccer import DiscreteSoccerEnv
from soccer.continuous_soccer import ContinuousSoccerEnv
|
{"/soccer/discrete_soccer/discrete_soccer_env.py": ["/soccer/__init__.py", "/soccer/core.py"], "/soccer/continuous_soccer/continuous_soccer.py": ["/soccer/__init__.py", "/soccer/core.py"], "/soccer/base_soccer.py": ["/soccer/__init__.py", "/soccer/core.py"], "/soccer/__init__.py": ["/soccer/base_soccer.py", "/soccer/discrete_soccer/__init__.py", "/soccer/continuous_soccer/__init__.py"], "/soccer/discrete_soccer/__init__.py": ["/soccer/discrete_soccer/discrete_soccer_env.py"], "/soccer/continuous_soccer/__init__.py": ["/soccer/continuous_soccer/continuous_soccer.py"]}
|
39,518
|
blavad/soccer
|
refs/heads/master
|
/soccer/discrete_soccer/__init__.py
|
from soccer.discrete_soccer.discrete_soccer_env import DiscreteSoccerEnv
|
{"/soccer/discrete_soccer/discrete_soccer_env.py": ["/soccer/__init__.py", "/soccer/core.py"], "/soccer/continuous_soccer/continuous_soccer.py": ["/soccer/__init__.py", "/soccer/core.py"], "/soccer/base_soccer.py": ["/soccer/__init__.py", "/soccer/core.py"], "/soccer/__init__.py": ["/soccer/base_soccer.py", "/soccer/discrete_soccer/__init__.py", "/soccer/continuous_soccer/__init__.py"], "/soccer/discrete_soccer/__init__.py": ["/soccer/discrete_soccer/discrete_soccer_env.py"], "/soccer/continuous_soccer/__init__.py": ["/soccer/continuous_soccer/continuous_soccer.py"]}
|
39,519
|
blavad/soccer
|
refs/heads/master
|
/soccer/continuous_soccer/__init__.py
|
from soccer.continuous_soccer.continuous_soccer import ContinuousSoccerEnv
|
{"/soccer/discrete_soccer/discrete_soccer_env.py": ["/soccer/__init__.py", "/soccer/core.py"], "/soccer/continuous_soccer/continuous_soccer.py": ["/soccer/__init__.py", "/soccer/core.py"], "/soccer/base_soccer.py": ["/soccer/__init__.py", "/soccer/core.py"], "/soccer/__init__.py": ["/soccer/base_soccer.py", "/soccer/discrete_soccer/__init__.py", "/soccer/continuous_soccer/__init__.py"], "/soccer/discrete_soccer/__init__.py": ["/soccer/discrete_soccer/discrete_soccer_env.py"], "/soccer/continuous_soccer/__init__.py": ["/soccer/continuous_soccer/continuous_soccer.py"]}
|
39,520
|
JHuenerberg/rpz-light-control
|
refs/heads/master
|
/app.py
|
from flask import Flask, render_template, request
import rf_send
app = Flask(__name__)
@app.route('/', methods=["GET", "POST"])
def index():
if request.method == "POST":
if request.form.get("lighton"):
rf_send.control_light("on")
elif request.form.get("lightoff"):
rf_send.control_light("off")
return render_template('index.html')
if __name__ == "__main__":
app.run(debug=True, host='0.0.0.0')
|
{"/app.py": ["/rf_send.py"]}
|
39,521
|
JHuenerberg/rpz-light-control
|
refs/heads/master
|
/rf_send.py
|
import logging
from rpi_rf import RFDevice
logging.basicConfig(level=logging.INFO, datefmt='%Y-%m-%d %H:%M:%S',
format='%(asctime)-15s - [%(levelname)s] %(module)s: %(message)s',)
codes = {"on": 1052693, "off": 1052692}
def control_light(code):
rfdevice = RFDevice(17)
rfdevice.enable_tx()
protocol = 1
pulselength = 380
logging.info("code: {0}".format(code) +
" [protocol: " + str(protocol) +
", pulselength: " + str(pulselength) + "]")
rfdevice.tx_code(codes[code], protocol, pulselength)
rfdevice.cleanup()
|
{"/app.py": ["/rf_send.py"]}
|
39,524
|
jyotiyadav99111/clickbait_identification
|
refs/heads/main
|
/preprocessing.py
|
import csv
import pickle
import random
import numpy as np
import tensorflow as tf
import matplotlib.pyplot as plt
from tensorflow.keras.preprocessing.text import Tokenizer
from tensorflow.keras.preprocessing.sequence import pad_sequences
from nltk.corpus import stopwords
random.seed(0)
#STOPWORDS = set(stopwords.words('english'))
VOCAB_SIZE = 5000
EMBEDDING_DIM = 64
MAX_LENGTH = 25
TRUNC_TYPE = 'post'
PADDING_TYPE = 'post'
OOV_TOK = '<OOV>'
TRIAINING_PORTION = .8
heading= []
labels = []
def data_loading(heading, labels, path, label_value):
"""
heading: list where data is to be appended
labels: list where labels can be appended
path: path to the file
label_value: if clickbait then 1, else 0
"""
for lines in open(path, encoding="utf8").readlines():
if(lines != "\n"):
heading.append(lines.split("\n")[0])
labels.append(label_value)
return heading, labels
def train_val_split(headings, labels, train_fraction = 0.8):
"""
headings: full set of heading data
labels: full set of labels data
train_fraction: fraction for training set
"""
# to shuffle the lists before split
temp = list(zip(headings, labels))
random.shuffle(temp)
list1, list2 = zip(*temp)
len_train = int(len(list1) * train_fraction)
train_headings = list1[0:len_train]
train_labels = list2[0:len_train]
val_headings = list1[len_train:]
val_labels = list2[len_train:]
return train_headings, train_labels, val_headings, val_labels
def tokenizer(sequence_list):
tokenizer = Tokenizer(num_words = VOCAB_SIZE, oov_token = OOV_TOK)
tokenizer.fit_on_texts(sequence_list)
#word_index = tokenizer.word_index #list of all tokens created
return tokenizer
def apply_tokenizer(tokenizer, sequence_list):
train_sequence = tokenizer.texts_to_sequences(sequence_list)
train_padded = pad_sequences(train_sequence, maxlen = MAX_LENGTH, padding = PADDING_TYPE, truncating = TRUNC_TYPE)
return train_padded
def LSTM_model(num_epochs, train_padded, train_label, val_padded, val_labels):
model = tf.keras.Sequential([
tf.keras.layers.Embedding(VOCAB_SIZE, EMBEDDING_DIM),
tf.keras.layers.Bidirectional(tf.keras.layers.LSTM(EMBEDDING_DIM)),
tf.keras.layers.GaussianNoise(0.5),
tf.keras.layers.Dense(EMBEDDING_DIM, activation = 'relu'),
tf.keras.layers.Dense(2, activation= 'sigmoid')
])
model.compile(loss = 'sparse_categorical_crossentropy', optimizer = 'adam', metrics = ['accuracy'])
earlystopping = tf.keras.callbacks.ModelCheckpoint('best_model.h5', monitor = 'val_loss', mode = 'min', save_best_only = True)
history = model.fit(train_padded, train_label, epochs = num_epochs, validation_data = (val_padded, val_labels), verbose = 2, callbacks = [earlystopping])
return model.summary(), history
def plot_graphs(history, string):
plt.plot(history.history[string])
plt.plot(history.history['val_'+ string])
plt.xlabel("Epochs")
plt.legend([string, 'val_' + string])
plt.show()
def text_for_pred(tokenizer, txt, model):
padded = apply_tokenizer(tokenizer, txt)
pred = model.predict(padded)
return pred
data_X, label_Y = data_loading(heading=heading, labels = labels, path = "data/clickbait_data", label_value = 1)
print("*****************************", len(data_X))
data_X, label_Y = data_loading(heading=data_X, labels = label_Y, path = "data/non_clickbait_data", label_value = 0)
print("*****************************", len(data_X))
train_headings, train_labels, val_headings, val_labels = train_val_split(data_X, label_Y)
train_labels= np.array(train_labels)
val_labels= np.array(val_labels)
tokenizer = tokenizer(train_headings)
# Save tokenizer to use in predict.py file
with open('tokenizer.pickle', 'wb') as handle:
pickle.dump(tokenizer, handle, protocol=pickle.HIGHEST_PROTOCOL)
train_headings_padded = apply_tokenizer(tokenizer, train_headings)
val_headings_padded = apply_tokenizer(tokenizer, val_headings)
summary, history = LSTM_model(5, train_headings_padded, train_labels, val_headings_padded, val_labels)
plot_graphs(history, 'loss')
|
{"/predict.py": ["/preprocessing.py"]}
|
39,525
|
jyotiyadav99111/clickbait_identification
|
refs/heads/main
|
/predict.py
|
import pickle
import tensorflow as tf
from preprocessing import apply_tokenizer, text_for_pred
# loading model
model = tf.keras.models.load_model('best_model.h5')
# loading tokenizer
with open('tokenizer.pickle', 'rb') as handle:
tokenizer = pickle.load(handle)
# text to predict for
txt = []
heading = input("Please enter the article heading here: ")
txt.append(heading)
#txt = ["A Fencer Strives to Crack a Saber Ceiling"]
prediction = text_for_pred(tokenizer, txt, model)
if prediction[0][0] > prediction[0][1]:
print("Phew! You are safe! Go ahead...")
else:
print("It's a clickbait!!!!!")
|
{"/predict.py": ["/preprocessing.py"]}
|
39,528
|
niEmerance/PythonWeek2IP
|
refs/heads/master
|
/instance/config.py
|
SOURCE_API_KEY='ca646ffdcd7c47028f4fd29cd28644da'
SECRET_KEY='12345'
|
{"/app/requests.py": ["/app/models.py"], "/app/main/views.py": ["/app/requests.py"]}
|
39,529
|
niEmerance/PythonWeek2IP
|
refs/heads/master
|
/app/models.py
|
class Source:
'''
Sources class to define Source Objects
'''
def __init__(self,id,name,description,url,category,language,country):
self.id =id
self.name = name
self.description = description
self.url=url
# self.poster = "https://image.tmdb.org/t/p/w500/" + poster
self.category = category
self.language = language
self.country= country
class Articles:
# all_articles=[]
def __init__(self,id,author,title,description,url,urlToImage,publishedAt,content):
self.id= id
self.author=author
self.title=title
self.description=description
self.url=url
self.urlToImage=urlToImage
self.publishedAt=publishedAt
self.content=content
# def save_article(self):
|
{"/app/requests.py": ["/app/models.py"], "/app/main/views.py": ["/app/requests.py"]}
|
39,530
|
niEmerance/PythonWeek2IP
|
refs/heads/master
|
/app/requests.py
|
import urllib.request,json
from .models import Source,Articles
# Source=source.Source
# Getting api key
api_key = None
base_url = None
articles_url=None
def configure_request(app):
global api_key,base_url,articles_url
api_key = app.config['SOURCE_API_KEY']
base_url = app.config['SOURCE_API_BASE_URL']
articles_url=app.config['ARTICLE_API_BASE_URL']
def get_sources(category):
get_sources_url=base_url.format(category,api_key)
with urllib.request.urlopen(get_sources_url) as url:
get_sources_data = url.read()
get_sources_response = json.loads(get_sources_data)
source_results = None
if get_sources_response['sources']:
source_results_list = get_sources_response['sources']
source_results = process_results(source_results_list)
return source_results
def process_results(source_list):
source_results=[]
for source_item in source_list:
id=source_item.get('id')
name=source_item.get('name')
description=source_item.get('description')
url=source_item.get('url')
category=source_item.get('category')
language=source_item.get('language')
country=source_item.get('country')
source_object=Source(id,name,description,url,category,language,country)
source_results.append(source_object)
return source_results
def get_articles(id):
print('Hey')
print(articles_url)
get_articles_url=articles_url.format(id,api_key)
with urllib.request.urlopen(get_articles_url) as url:
get_articles_data=url.read()
get_articles_response=json.loads(get_articles_data)
articles_results=None
if get_articles_response['articles']:
articles_results_list=get_articles_response['articles']
articles_results=process_articles(articles_results_list)
return articles_results
def process_articles(article_list):
articles_results=[]
for article_item in article_list:
id=article_item.get('id')
author=article_item.get('author')
title=article_item.get('title')
description=article_item.get('description')
url=article_item.get('url')
urlToImage=article_item.get('urlToImage')
publishedAt=article_item.get('publishedAt')
content=article_item.get('content')
if urlToImage:
article_object=Articles(id,author,title,description,url,urlToImage,publishedAt,content)
articles_results.append(article_object)
return articles_results
|
{"/app/requests.py": ["/app/models.py"], "/app/main/views.py": ["/app/requests.py"]}
|
39,531
|
niEmerance/PythonWeek2IP
|
refs/heads/master
|
/app/main/views.py
|
from flask import render_template,request,redirect,url_for
from . import main
from ..requests import get_sources, get_articles
# Views
@main.route('/')
def index():
sources=get_sources('general')
title='Welcome to our articles'
return render_template('index.html', title=title, general=sources)
@main.route('/articles/<id>')
def source(id):
articles_source=get_articles(id)
title='Welcome to our articles'
return render_template('articles.html', title=title, articles=articles_source)
|
{"/app/requests.py": ["/app/models.py"], "/app/main/views.py": ["/app/requests.py"]}
|
39,532
|
niEmerance/PythonWeek2IP
|
refs/heads/master
|
/config.py
|
import os
class Config:
SOURCE_API_BASE_URL='https://newsapi.org/v2/sources?category={}&apiKey=ca646ffdcd7c47028f4fd29cd28644da'
ARTICLE_API_BASE_URL='https://newsapi.org/v2/everything?language=en&sources={}&apiKey={}'
SOURCE_API_KEY=os.environ.get('SOURCE_API_KEY')
SECRET_KEY = os.environ.get('SECRET_KEY')
class ProdConfig(Config):
pass
class DevConfig(Config):
DEBUG = True
config_options = {
'development':DevConfig,
'production':ProdConfig
}
|
{"/app/requests.py": ["/app/models.py"], "/app/main/views.py": ["/app/requests.py"]}
|
39,615
|
BrionGahl/Aeth-Bot
|
refs/heads/main
|
/config/example_config.py
|
#!/usr/bin/python3
TOKEN = "TOKEN GOES HERE"
PREFIX = "$"
|
{"/main.py": ["/loader.py"]}
|
39,616
|
BrionGahl/Aeth-Bot
|
refs/heads/main
|
/main.py
|
#!/usr/bin/python3
import logging
import discord
from discord.ext import commands
import loader
VERSION = '0.1'
DESCRIPTION = 'AETH-BOT\nAuthor: Brion Gahl\n'
bot = commands.Bot(command_prefix=loader.PREFIX, description=DESCRIPTION)
@bot.event
async def on_ready():
print("LOGGED IN AS {0.user}".format(bot))
bot.load_extension('cogs.raider')
if __name__ == '__main__':
bot.run(loader.TOKEN)
|
{"/main.py": ["/loader.py"]}
|
39,617
|
BrionGahl/Aeth-Bot
|
refs/heads/main
|
/cogs/raider.py
|
#!/usr/bin/python3
import discord
from discord.ext import commands
import requests
import json
DEFAULT_REGION = 'us'
DEFAULT_LOCALE = 'en'
RAIDER_IMG = "https://cdnassets.raider.io/images/brand/Icon_FullColor_Square.png"
RAIDER_API = "https://raider.io/api/v1/"
class Raider(commands.Cog):
def __init__(self, bot):
self.bot = bot
@commands.command(aliases=["affix"])
@commands.cooldown(1, 60, commands.cooldowns.BucketType.default)
async def affixes(self, ctx, *args):
region = DEFAULT_REGION
if len(args) == 1:
region = args[0]
elif len(args) > 1:
await ctx.send("Error: too many arguments")
parameters = {
"region": region,
"locale": DEFAULT_LOCALE
}
response = requests.get(RAIDER_API + "mythic-plus/affixes", params=parameters)
if response.status_code != 200:
await ctx.send("Error: did you input an incorrect region or locale?")
return
affix_data = response.json()['affix_details']
embed = discord.Embed(title="This Week's Affixes")
embed.set_thumbnail(url=RAIDER_IMG)
for affix in affix_data:
embed.add_field(name=affix['name'], value=affix['description'], inline=False)
await ctx.send(embed=embed)
return
@affixes.error
async def affixes_error(self, ctx, error):
if isinstance(error, commands.CommandError): #add more to catch more errors such as cooldown
await ctx.send("Usage: $affix [REGION]")
return
@commands.command(aliases=["raiderscore", "raider"])
@commands.cooldown(1, 60, commands.cooldowns.BucketType.default)
async def score(self, ctx, region, realm, char_name):
parameters = {
"region": region,
"realm": realm,
"name": char_name,
"fields": "mythic_plus_scores_by_season:current"
}
response = requests.get(RAIDER_API + "characters/profile", params=parameters)
if response.status_code != 200:
await ctx.send("Error: Something went wrong. Did you input the correct region, realm, or name?")
return
embed = discord.Embed(title="Raider IO Score")
embed.set_thumbnail(url=RAIDER_IMG)
embed.add_field(name=response.json()['name'], value=response.json()['race'], inline=False)
embed.add_field(name=response.json()['class'], value=response.json()['active_spec_name'], inline=False)
embed.add_field(name="Score", value=response.json()["mythic_plus_scores_by_season"][0]['scores']['all'], inline=False)
await ctx.send(embed=embed)
return
@score.error
async def score_error(self, ctx, error):
if isinstance(error, commands.CommandError): #add more to catch more errors such as cooldown
await ctx.send("Usage: $score [REGION] [REALM] [CHARACTER]")
return
def setup(bot):
bot.add_cog(Raider(bot))
|
{"/main.py": ["/loader.py"]}
|
39,618
|
BrionGahl/Aeth-Bot
|
refs/heads/main
|
/loader.py
|
#!/usr/bin/python3
import os
config = os.path.join('.', 'config', 'config.py')
if os.path.isfile(config):
try:
from config.config import TOKEN
except:
raise Exception('Cannot find TOKEN variable, is it set?')
try:
from config.config import PREFIX
except:
raise Exception('Cannot find PREFIX variable, is it set?')
|
{"/main.py": ["/loader.py"]}
|
39,637
|
alanmmckay/KLEINcompiler
|
refs/heads/master
|
/programs/class-programs/python-equivalents/is-cantor-number-fast.py
|
def main( n ):
if n < 3:
return n < 2
else:
return main( n // 3) and main(MOD(n , 3))
def MOD( m , n ):
return m - m // n * n
|
{"/src/AST_node.py": ["/src/errors.py", "/src/stack_operations.py"], "/src/scanner.py": ["/src/k_token.py", "/src/errors.py"], "/src/drivers/code_gen_validate.py": ["/src/parser.py", "/src/scanner.py", "/src/code_generator.py"], "/src/parser.py": ["/src/scanner.py", "/src/errors.py", "/src/parse_table.py", "/src/k_token.py", "/src/AST_node.py", "/src/stack_operations.py"], "/src/drivers/parse_validate.py": ["/src/parser.py", "/src/scanner.py"], "/src/parse_table.py": ["/src/k_token.py", "/src/AST_node.py"], "/src/tests.py": ["/src/scanner.py"]}
|
39,638
|
alanmmckay/KLEINcompiler
|
refs/heads/master
|
/programs/class-programs/python-equivalents/palindrome.py
|
def MOD( m , n ):
return m - n*( m // n )
def reverse(n ):
return reverseL( n , 0 )
def reverseL( n , nR ):
if n == 0:
return nR
else:
return reverseL( n // 10 , 10 * nR + MOD(n, 10) )
def isPalindrome( n ):
return 0 == ( n - reverse(n) )
def main( number ):
print(number)
print(reverse(number))
return isPalindrome(number)
|
{"/src/AST_node.py": ["/src/errors.py", "/src/stack_operations.py"], "/src/scanner.py": ["/src/k_token.py", "/src/errors.py"], "/src/drivers/code_gen_validate.py": ["/src/parser.py", "/src/scanner.py", "/src/code_generator.py"], "/src/parser.py": ["/src/scanner.py", "/src/errors.py", "/src/parse_table.py", "/src/k_token.py", "/src/AST_node.py", "/src/stack_operations.py"], "/src/drivers/parse_validate.py": ["/src/parser.py", "/src/scanner.py"], "/src/parse_table.py": ["/src/k_token.py", "/src/AST_node.py"], "/src/tests.py": ["/src/scanner.py"]}
|
39,639
|
alanmmckay/KLEINcompiler
|
refs/heads/master
|
/src/k_token.py
|
from enum import Enum
class TokenType(Enum):
NUMBER = 1
KEYWORD = 2
WORD = 3
OPERATORS = 4
DELIMETER = 5
BOOLEAN = 6
PRIMITIVE = 7
EOF = 8
class Token:
def __init__(self, token_type, token_value=None):
self.token_type = token_type
self.token_value = token_value
def is_number(self):
return self.token_type == TokenType.NUMBER
def is_keyword(self):
return self.token_type == TokenType.KEYWORD
def is_boolean(self):
return self.token_type == TokenType.BOOLEAN
def is_operator(self):
return self.token_type == TokenType.OPERATORS
def is_delimeter(self):
return self.token_type == TokenType.DELIMETER
def is_primitive(self):
return self.token_type == TokenType.PRIMITIVE
def is_word(self):
return self.token_type == TokenType.WORD
def is_eof(self):
return self.token_type == TokenType.EOF
def value(self):
return self.token_value
def __repr__(self):
if self.is_keyword():
return 'keyword ' + self.token_value
elif self.is_number():
return 'number ' + str(self.token_value)
elif self.is_word():
return 'word ' + self.token_value
elif self.is_boolean():
return 'boolean ' + self.token_value
elif self.is_primitive():
return 'primitive ' + self.token_value
elif self.is_operator():
return 'operator ' + self.token_value
elif self.is_delimeter():
return 'delimeter ' + self.token_value
else: # is_eof()
return 'end_of_stream'
|
{"/src/AST_node.py": ["/src/errors.py", "/src/stack_operations.py"], "/src/scanner.py": ["/src/k_token.py", "/src/errors.py"], "/src/drivers/code_gen_validate.py": ["/src/parser.py", "/src/scanner.py", "/src/code_generator.py"], "/src/parser.py": ["/src/scanner.py", "/src/errors.py", "/src/parse_table.py", "/src/k_token.py", "/src/AST_node.py", "/src/stack_operations.py"], "/src/drivers/parse_validate.py": ["/src/parser.py", "/src/scanner.py"], "/src/parse_table.py": ["/src/k_token.py", "/src/AST_node.py"], "/src/tests.py": ["/src/scanner.py"]}
|
39,640
|
alanmmckay/KLEINcompiler
|
refs/heads/master
|
/programs/class-programs/python-equivalents/Euclid.py
|
def remainder ( a, b ):
if a < b:
return a
else:
return remainder( a - b , b)
def gcd( a , b ):
if b == 0:
return a
else:
return gcd( b , remainder ( a , b ))
def main ( a , b ):
print (gcd ( a , b ))
|
{"/src/AST_node.py": ["/src/errors.py", "/src/stack_operations.py"], "/src/scanner.py": ["/src/k_token.py", "/src/errors.py"], "/src/drivers/code_gen_validate.py": ["/src/parser.py", "/src/scanner.py", "/src/code_generator.py"], "/src/parser.py": ["/src/scanner.py", "/src/errors.py", "/src/parse_table.py", "/src/k_token.py", "/src/AST_node.py", "/src/stack_operations.py"], "/src/drivers/parse_validate.py": ["/src/parser.py", "/src/scanner.py"], "/src/parse_table.py": ["/src/k_token.py", "/src/AST_node.py"], "/src/tests.py": ["/src/scanner.py"]}
|
39,641
|
alanmmckay/KLEINcompiler
|
refs/heads/master
|
/programs/class-programs/python-equivalents/is-cantor-number-bool.py
|
def main ( n ):
return (n < 2) or ((2 < n) and main(n / 3) and main(MOD(n, 3)))
def MOD( m , n ):
return m - m // n * n
|
{"/src/AST_node.py": ["/src/errors.py", "/src/stack_operations.py"], "/src/scanner.py": ["/src/k_token.py", "/src/errors.py"], "/src/drivers/code_gen_validate.py": ["/src/parser.py", "/src/scanner.py", "/src/code_generator.py"], "/src/parser.py": ["/src/scanner.py", "/src/errors.py", "/src/parse_table.py", "/src/k_token.py", "/src/AST_node.py", "/src/stack_operations.py"], "/src/drivers/parse_validate.py": ["/src/parser.py", "/src/scanner.py"], "/src/parse_table.py": ["/src/k_token.py", "/src/AST_node.py"], "/src/tests.py": ["/src/scanner.py"]}
|
39,642
|
alanmmckay/KLEINcompiler
|
refs/heads/master
|
/programs/class-programs/python-equivalents/sum-factors.py
|
def loopToN( n , current , sum ):
if n == current:
return n - sum
else:
return testAndLoop( n , current , sum )
def testAndLoop( n , current , sum ):
if divides( current , n ):
return printCurrentAndLoop( n , current , sum + current )
else:
return loopToN( n , current + 1 , sum )
def printCurrentAndLoop( n , current , sum ):
print(current)
return loopToN( n , current + 1 , sum )
def divides ( a , b ):
return remainder( b , a ) == 0
def remainder( num , den ):
if num < den:
return num
else:
return remainder( num - den , den )
def main ( n ):
return loopToN( n , 1 , 0 )
|
{"/src/AST_node.py": ["/src/errors.py", "/src/stack_operations.py"], "/src/scanner.py": ["/src/k_token.py", "/src/errors.py"], "/src/drivers/code_gen_validate.py": ["/src/parser.py", "/src/scanner.py", "/src/code_generator.py"], "/src/parser.py": ["/src/scanner.py", "/src/errors.py", "/src/parse_table.py", "/src/k_token.py", "/src/AST_node.py", "/src/stack_operations.py"], "/src/drivers/parse_validate.py": ["/src/parser.py", "/src/scanner.py"], "/src/parse_table.py": ["/src/k_token.py", "/src/AST_node.py"], "/src/tests.py": ["/src/scanner.py"]}
|
39,643
|
alanmmckay/KLEINcompiler
|
refs/heads/master
|
/programs/class-programs/python-equivalents/lib.py
|
def main( testArgument):
print(SQRT(testArgument) )
return ODD( testArgument )
def MAXINT():
return 2147483647
def MININT():
return -2147483647 - 1
def LT ( p , q ):
return p < q
def EQ( p , q ):
return p == q
def NE( p , q ):
return not EQ( p , q )
def LE( p , q ):
return LT( p , q ) or EQ( p , q )
def GE( p , q ):
return not LT( p , q )
def GT( p , q ):
return not LE( p , q )
def OR( p , q ):
return p or q
def AND( p , q ):
if p:
return q
else:
False
def PLUS( p , q ):
return p + q
def MINUS( p , q ):
return p - q
def TIMES( p , q ):
return p * q
def DIV( p , q ):
return p // q
def NEG( n ):
return -n
def ABS( n ):
if 0 < n:
return n
else:
return NEG(n)
def MOD( m , n ):
return m - m/n * n
def EXP( m , n ):
if n == 0:
return 1
else:
return m * EXP( m , n-1 )
def ODD( n ):
if LE( 0 , n ):
return GT( n , DIV( n , 2 ) + DIV(NEG(n) , 2 ) )
def SQRT( n ):
return SQRTSEARCH( n , 0 , n )
def SQRTSEARCH( n , low , high ):
if LE( high , low + 1 ):
if LE( n - TIMES(low,low), TIMES(high,high) - n ):
return low
else:
return high
else:
return SQRTSPLIT( n, low, high, PLUS(low, high)// 2 )
def SQRTSPLIT( n , low , high , mid ):
if LE( mid * mid , n ):
return SQRTSEARCH( n , mid , high )
else:
return SQRTSEARCH( n , low , mid )
|
{"/src/AST_node.py": ["/src/errors.py", "/src/stack_operations.py"], "/src/scanner.py": ["/src/k_token.py", "/src/errors.py"], "/src/drivers/code_gen_validate.py": ["/src/parser.py", "/src/scanner.py", "/src/code_generator.py"], "/src/parser.py": ["/src/scanner.py", "/src/errors.py", "/src/parse_table.py", "/src/k_token.py", "/src/AST_node.py", "/src/stack_operations.py"], "/src/drivers/parse_validate.py": ["/src/parser.py", "/src/scanner.py"], "/src/parse_table.py": ["/src/k_token.py", "/src/AST_node.py"], "/src/tests.py": ["/src/scanner.py"]}
|
39,644
|
alanmmckay/KLEINcompiler
|
refs/heads/master
|
/programs/class-programs/python-equivalents/factors.py
|
def main( n ):
loopToN ( n , 1)
def loopToN( n , current ):
if n == current:
return n
else:
return testAndLoop( n , current )
def testAndLoop( n , current ):
if divides( current , n ):
return printAndLoop( n , current)
else:
return loopToN( n , current + 1 )
def printAndLoop( n , current ):
print(current)
return loopToN( n , current + 1 )
def divides( a , b ):
return remainder ( b , a ) == 0
def remainder( num , den ):
if num < den:
return num
else:
return remainder( num - den , den )
|
{"/src/AST_node.py": ["/src/errors.py", "/src/stack_operations.py"], "/src/scanner.py": ["/src/k_token.py", "/src/errors.py"], "/src/drivers/code_gen_validate.py": ["/src/parser.py", "/src/scanner.py", "/src/code_generator.py"], "/src/parser.py": ["/src/scanner.py", "/src/errors.py", "/src/parse_table.py", "/src/k_token.py", "/src/AST_node.py", "/src/stack_operations.py"], "/src/drivers/parse_validate.py": ["/src/parser.py", "/src/scanner.py"], "/src/parse_table.py": ["/src/k_token.py", "/src/AST_node.py"], "/src/tests.py": ["/src/scanner.py"]}
|
39,645
|
alanmmckay/KLEINcompiler
|
refs/heads/master
|
/programs/class-programs/python-equivalents/is-special.py
|
def MOD( m , n ):
return m - m // n * n
def divides( x , n ):
return MOD ( n , x ) == 0
def count( x , n ):
if n < 10:
if x == n:
return 1
else:
return 0
else:
if x == MOD( n , 10 ):
return 1 + count(x, n // 10)
else:
return count(x, n // 10)
def to_binary( n ):
if n == 0:
return 0
else:
return 10 * to_binary(n // 2) + MOD(n , 2)
def apply_definition( binary_n , n ):
return divides (count(1, binary_n), n) and divides(count(0, binary_n), n)
def main( n ):
return apply_definition(to_binary(n) , n )
|
{"/src/AST_node.py": ["/src/errors.py", "/src/stack_operations.py"], "/src/scanner.py": ["/src/k_token.py", "/src/errors.py"], "/src/drivers/code_gen_validate.py": ["/src/parser.py", "/src/scanner.py", "/src/code_generator.py"], "/src/parser.py": ["/src/scanner.py", "/src/errors.py", "/src/parse_table.py", "/src/k_token.py", "/src/AST_node.py", "/src/stack_operations.py"], "/src/drivers/parse_validate.py": ["/src/parser.py", "/src/scanner.py"], "/src/parse_table.py": ["/src/k_token.py", "/src/AST_node.py"], "/src/tests.py": ["/src/scanner.py"]}
|
39,646
|
alanmmckay/KLEINcompiler
|
refs/heads/master
|
/src/AST_node.py
|
import sys
import os
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
from src.errors import SemanticError
from src.stack_operations import top, pop, push, push_rule
function_record = []
# used to keep track of the function definition
# that is currently being processed
function_table = {}
#function_table[functionName] = {"functionNode":FunctionNode object, "stack_position": position of function in IMEM.}
#temp_vars stores available dmem addresses
temp_vars = [0]
# This function should return the offset from the stack pointer of the next open
# memory location for saving a temporary variable (and update the count)
def get_open_place( ):
next_open = temp_vars.pop( )
temp_vars.append( next_open + 1 )
return next_open
def nodeBuilder(semantic_stack, nodeType):
if nodeType == ExpressionNode:
expression = top(semantic_stack)
pop(semantic_stack)
return nodeType(expression)
elif issubclass(nodeType, ValueNode):
value = top(semantic_stack)
pop(semantic_stack)
if issubclass(nodeType, BinaryOperator):
# right hand side is popped first...
rightHandSide = value
leftHandSide = top(semantic_stack)
pop(semantic_stack)
return nodeType(leftHandSide, rightHandSide)
else:
return nodeType(value)
elif nodeType == PrintStatementNode:
expressions = []
while isinstance(top(semantic_stack), ExpressionNode):
push(top(semantic_stack), expressions)
pop(semantic_stack)
return nodeType(expressions)
elif nodeType == IfNode: # rename these vars...[?]
elseStatement = top(semantic_stack)
pop(semantic_stack)
thenStatement = top(semantic_stack)
pop(semantic_stack)
ifCondition = top(semantic_stack)
pop(semantic_stack)
return nodeType(ifCondition, thenStatement, elseStatement)
elif nodeType == ActualsNode:
actuals = []
while isinstance(top(semantic_stack), ExpressionNode):
push(top(semantic_stack), actuals)
pop(semantic_stack)
return nodeType(actuals)
elif nodeType == FunctionCallNode:
if isinstance(top(semantic_stack), ActualsNode):
actualsNode = top(semantic_stack)
pop(semantic_stack)
else:
# create empty actualsNode
actualsNode = ActualsNode([])
functionName = top(semantic_stack)
pop(semantic_stack)
return nodeType(functionName, actualsNode)
elif nodeType == FormalsNode: # getting parameter and argument switched up here...?
parameters = []
while True:
if isinstance(top(semantic_stack), TypeNode):
parameterType = top(semantic_stack)
pop(semantic_stack)
identifier = top(semantic_stack)
pop(semantic_stack)
push((identifier, parameterType), parameters)
else:
break
return nodeType(parameters)
elif nodeType == FunctionNode:
body = top(semantic_stack)
pop(semantic_stack)
returnType = top(semantic_stack)
pop(semantic_stack)
if (isinstance(top(semantic_stack), FormalsNode)):
parameters = top(semantic_stack)
pop(semantic_stack)
else:
# create empty formalsNode
parameters = FormalsNode([])
name = top(semantic_stack)
pop(semantic_stack)
return nodeType(name, parameters, returnType, body)
elif nodeType == BodyNode:
expressions = []
while isinstance(top(semantic_stack), ExpressionNode) or isinstance(top(semantic_stack),
PrintStatementNode) or isinstance(
top(semantic_stack), BodyNode):
push(top(semantic_stack), expressions)
pop(semantic_stack)
return nodeType(expressions)
elif nodeType == DefinitionsNode:
functions = []
while True:
if len(semantic_stack) > 0 and isinstance(top(semantic_stack), FunctionNode):
push(top(semantic_stack), functions)
pop(semantic_stack)
else:
break
return nodeType(functions)
elif nodeType == ProgramNode:
#hand the DefinitionsNode to the ProgramNode
functionDefinitions = top(semantic_stack)
pop(semantic_stack)
return nodeType(functionDefinitions)
else:
raise ValueError("Invalid node type in nodeBuilder")
class ErrorNode(object):
def __init__(self, msg):
self.msg = msg
def get_message(self):
return self.msg
class ASTnode(object):
def __init__(self):
# this information list will populate during each node construction
self.information = []
# outputType designates the type of values to expect from the result of the node
self.outputType = str()
def process_node(self, position=0):
# ad-hoc means to push the ErrorNode to the parser---
if len(function_record) > 0 and isinstance(top(function_record), ErrorNode):
return function_record
# I'm unsure if this block is neccessary---#
if position < len(self.information): # if position is in bounds of information
evaluate = self.information[position] # take a value to evaluate
if isinstance(evaluate, ASTnode): # if it is a node
if isinstance(evaluate, FunctionNode):
function_record.append(
evaluate.get_name()) # this list is used to keep track of which function is currently being processed
nextNode = evaluate.process_node(0) # make way to the leaf of a branch
# ad-hoc means to push the ErrorNode to the parser---
if len(function_record) > 0 and isinstance(top(function_record), ErrorNode):
return function_record
# this block is used. Keep it until a more elegant solution is found---#
# traverse each leaf of a respective branch
nextInfo = self.process_node(position + 1)
if isinstance(evaluate, ASTnode):
# the aboverecursive descent will force typecheck to start at a leaf
errorState = evaluate.typeCheck()
if errorState != None:
push(ErrorNode(errorState), function_record)
# ad-hocery! ---
if isinstance(top(function_record), ErrorNode):
return function_record
#this block is used, simillar to previous block---#
return self.information[position]
def typeCheck(self):
if 'main' not in function_table:
return "A main function must be declared."
def get_outputType(self):
return self.outputType
#end ASTNode superclass
class ProgramNode(ASTnode):
#consideration: put all class definitions WITHIN this node.
#use this node to store the function table and function record
def __init__(self, functionDefinitions):
ASTnode.__init__(self)
self.definitionsNode = functionDefinitions
push(self.definitionsNode,self.information)
def __str__(self):
#Definitions.__str__() prints out the function list...
self.returnString = "Program: \n"
self.returnString += self.definitionsNode.__str__()
return self.returnString
def typeCheck(self):
pass
def code_gen(self, line):
program = []
#This block rebuilds the argument list in DMEM to factor the existence
#of a return address and return value within a regular function
#Basically: an adhoc implementation to allow TM parameters
r6_address = len(function_table['main']['functionNode'].get_formals())
main_arguments = []
index = 1
while index < r6_address + 1:
main_arguments = main_arguments + ['LD 1,'+str(index)+'(0)', 'ST 1,'+str(index+3)+'(6)']
index+=1
#perhaps replace index - 1 with r6_address
program += self.definitionsNode.code_gen(7 + ((index - 1)*2))
front_matter = main_arguments + ['LDC 6,1(4)',#set position of top
'LDC 1,2(4)',
'ADD 1,7,1',
'ST 1,0(6)',#store return address
'LDA 7,' + str(function_table['main']['stack_position'] + 2) + '(7)',
'OUT 0,0,0',
'HALT 0,0,0']
program = front_matter + program
# Second pass to find function calls and insert the address of the
# function (since we might now know this address when the function is
# called)
for index, instruction in enumerate(program):
if 'FUNCTION-CALL' in instruction:
# Remove the placeholder instruction from the program...
program.pop( index )
# ... and replace with a 'load-constant' to put the address of
# the called function into the PC (register 7)
function_address = function_table[instruction.split()[1]]['function_address']
program.insert( index, 'LDC 7,' + str(function_address) + '(4) : '+instruction.split()[1]+' FUNCTION-CALL')
return program
#end ProgramNode
class DefinitionsNode(ASTnode):
def __init__(self, functionsList):
ASTnode.__init__(self)
self.functions = functionsList
self.information = self.functions
self.functionSwitch = None
# build function a function table to ensure function calls are valid
for function in self.functions:
functionName = function.get_name()
if functionName not in function_table:
function_table[functionName] = {}
function_table[functionName]["functionNode"] = function
else:
self.functionSwitch = functionName
continue
def __str__(self):
self.returnString = str()
for function in reversed(self.functions):
self.returnString += str(function) + "\n"
return self.returnString
def typeCheck(self):
if self.functionSwitch != None:
msg = 'Duplicate Function Declaration: {}'
msg = msg.format(self.functionSwitch)
return msg
def code_gen(self, line):
program = []
for function in self.functions:
function_table[function.get_name()]["stack_position"] = len(program)
function_table[function.get_name()]['function_address'] = len(program) + line
program += function.code_gen(line + len(program))
# Our first instruction is to set the PC to the address of the 'main' function
return program
#end DefinitionsNode
class FunctionNode(ASTnode):
def __init__(self, name, parameters, returnType, body):
ASTnode.__init__(self)
self.bodyNode = body
self.typeNode = returnType
self.formals = parameters
self.identifierNode = FunctionIdentifierNode(name)
self.outputType = self.typeNode.get_outputType()
push(self.bodyNode, self.information)
push(self.typeNode, self.information)
push(self.formals, self.information)
push(self.identifierNode, self.information)
def get_name(self):
return self.identifierNode.__str__()
def get_formals(self):
return self.formals.get_formals()
def __str__(self):
return "function " + str(self.identifierNode) + " " + str(self.formals) + " " + str(
self.typeNode) + " \n" + str(self.bodyNode) + " "
def check_formals(self):
current_function = function_record[-1]
formal_list = []
for formal in self.formals.get_formals():
if formal[0].get_value() in formal_list:
msg = "Duplicate parameter {} in function {}."
msg = msg.format(formal[0].get_value(), current_function)
return msg
push(formal[0].get_value(),formal_list)
def typeCheck(self):
if self.outputType != self.bodyNode.get_outputType():
msg = "Failed typecheck on FunctionNode: {}\n"
msg += "Make sure function's body has the same output "
msg += "as the function's declared type:\n"
msg += "Function {}'s declared type: {}\n"
msg += "Body output type: {}\n"
bodyOutputType = self.bodyNode.outputType
if self.bodyNode.outputType == "":
self.bodyNode.outputType = "None"
msg = msg.format(self.identifierNode.get_value(), self.identifierNode.get_value(), self.outputType, self.bodyNode.outputType)
return msg
msg = self.check_formals()
return msg
def code_gen(self, line):#need to clean up comments here...
current_function = str(self.identifierNode)
self.start_address = line
program = []
#print( 'Function', current_function, 'is at', line)
# frame_size represents the size of the stack frame, which might vary for each function
# (1 space for return address, 1 for stack pointer, and one for each argument)
frame_size = 3 + len(self.formals.get_formals())
# Create a new starting point for temporary variables
temp_vars.append(frame_size)
program += self.bodyNode.code_gen(program, line)
program.append('LD 7,0(6) : '+current_function+' FunctionNode line return')
# Function has been generated, remove the temp var counter from the list
temp_vars.pop( )
# Insert an instruction which says return to the address of the caller
return program
#end FunctionNode
class FormalsNode(ASTnode):
def __init__(self, parameters):
ASTnode.__init__(self)
self.formals = []
#inserting a set of tuples: (identifierNode, typeNode)
while len(parameters) > 0:
push(top(parameters), self.formals)
pop(parameters) # perhaps change this!!
self.information = self.formals
def __str__(self):
self.returnString = " ("
for pair in self.formals:
self.returnString += str(pair[0]) + " : " + str(pair[1])
if pair != self.formals[-1]:# !!! this could be a problem!
self.returnString += ", "
self.returnString += ")"
return self.returnString
def get_formals(self):
return self.formals
#end FormalsNode
class BodyNode(ASTnode):
def __init__(self, expressions):
ASTnode.__init__(self)
self.expressions = expressions
self.information = self.expressions
def __str__(self):
returnString = str()
for expression in self.expressions:
returnString += str(expression) + "\n"
returnString += "\n"
return returnString
def typeCheck(self):
self.outputType = str()
expressionSwitch = 0
for node in self.expressions:
if isinstance(node, ExpressionNode) or isinstance(node, BodyNode):
if expressionSwitch == 0:
self.outputType = node.get_outputType()
expressionSwitch = 1
elif expressionSwitch == 1:
if node.get_outputType() != self.outputType:
msg = "Failed typecheck on BodyNode"
msg.format()
return msg
def code_gen(self, program, line):
program = []
for expression in reversed(self.expressions):
program += expression.code_gen(program, line)
line += 1
return program
#end BodyNode
class ExpressionNode(ASTnode):
def __init__(self, expression):
ASTnode.__init__(self)
self.expression = expression
push(self.expression, self.information)
def __str__(self):
return " " + str(self.expression) + " "
def typeCheck(self):
self.outputType = self.expression.get_outputType()
def code_gen(self, program, line):
program = self.expression.code_gen(program, line)
self.place = self.expression.place
return program
#end ExpressionNode
class ActualsNode(ASTnode):
def __init__(self, actuals_list):
ASTnode.__init__(self)
self.actuals = [] # list of expressions
while (len(actuals_list) > 0):
push(top(actuals_list), self.actuals)
pop(actuals_list)
self.information = self.actuals
def __str__(self):
self.returnString = str()
for i in self.actuals:
self.returnString += str(i)
if i != self.actuals[-1]: # !!! this could be a problem...
self.returnString += ", "
return self.returnString
#end ActualsNode
class FunctionCallNode(ASTnode):
def __init__(self, functionName, arguments):
ASTnode.__init__(self)
self.actualsNode = arguments
self.identifierNode = FunctionIdentifierNode(functionName)
push(self.actualsNode, self.information)
push(self.identifierNode, self.information)
def __str__(self):
self.returnString = str(self.identifierNode)
self.returnString += " ("
self.returnString += str(self.actualsNode)
self.returnString += ")"
return self.returnString
def typeCheck(self):
current_function = function_record[-1]
if self.identifierNode.get_value() in function_table:
self.outputType = function_table[self.identifierNode.get_value()]["functionNode"].get_outputType()
else:
msg = "Function call {} is undefined.\n"
msg += "This function call occurs in {}.\n"
msg = msg.format(self.identifierNode.get_value(),current_function)
return msg
def code_gen(self, a, b):
program = []
# First generate code for the expressions in the actual, saving the
# values as temporary spots in the stack frame
for actual in self.actualsNode.actuals:
program += actual.code_gen(a,b)
# Get the next open memory space in the stack (we will use this to find
# the starting point for the next activation record)
self.place = get_open_place()
# Take each of the values we calculated from the actual parameters and
# copy them into the stack frame of the function we are going to call
for index, actual in enumerate(self.actualsNode.actuals):
program.append( 'LD 5,' + str(actual.place) + '(6) ; load actual #' + str(index) )
program.append( 'ST 5,' + str(self.place + 3 + index) + '(6)' )
# (Compute and) Copy the return address into the new stack frame
program.append( 'LDC 1,4(4)' )
program.append( 'ADD 1,7,1' )
program.append( 'ST 1,' + str(self.place) + '(6)' )
# Save the (current) stack pointer into the next place in the activation record
program.append( 'ST 6,' + str(self.place + 1) + '(6)')
# Sets the (new) stack pointer to the start of the next activation record
program.append( 'LDA 6,' + str(self.place) + '(6)' )
# Add a placeholder instruction which says to load the address of the
# desired function (in IMEM) into the program counter
program.append( 'FUNCTION-CALL ' + self.identifierNode.get_value() )# str(function_table[self.identifierNode.get_value()]['functionNode'].start_address ))
# At this point, the function will execute and store the value it
# evaluates to in register 0, and then set the program counter back
# to "here" so I can keep executing the following instructions:
# Restore the stack pointer to its "old" value
program.append( 'LD 6,1(6)')
# Store the returned value to the stack frame
program.append( 'ST 0,' + str(self.place) + '(6)' )
return program
#!!!! may have a logical error where the compiler accepts formals of the same name for a function declaration
#end FunctionCallNode
class PrintStatementNode(ASTnode):
def __init__(self, expressions_list):
ASTnode.__init__(self)
self.expressions = expressions_list # list of expression nodes
self.information = self.expressions
def __str__(self):
self.returnString = "print("
for expression in self.expressions:
self.returnString += str(expression)
self.returnString += ")"
return self.returnString
def code_gen(self, program, line):
program = []
for expr in self.expressions:
program += expr.code_gen(program, line)
program.append('OUT 0,0,0 : PrintStatementNode output')
return program
#end PrintStatementNode
class IfNode(ASTnode):
def __init__(self, ifExpression, thenExpression, elseExpression):
ASTnode.__init__(self)
self.expr2 = elseExpression
self.expr1 = thenExpression
self.condition = ifExpression
push(self.expr2, self.information)
push(self.expr1, self.information)
push(self.condition, self.information)
def __str__(self):
self.returnString = "if " + str(self.condition) + "\n"
self.returnString += "then " + str(self.expr1) + "\n"
self.returnString += "else " + str(self.expr2) + "\n"
return self.returnString
def typeCheck(self):
current_function = function_record[-1]
if self.condition.get_outputType() == "boolean": #use outputType accessor here?
if self.expr1.get_outputType() == self.expr2.get_outputType():
self.outputType = self.expr1.get_outputType()
else:
msg = "If statement in function {} has inconsistent output type:\n"
msg += "Then clause output type: {}\n"
msg += "Else clause output type: {}\n"
msg = msg.format(current_function, self.expr1.get_outputType(), self.expr2.get_outputType())
return msg
else:
msg = "If statement requires a boolean conditional."
msg = msg.format()
return msg
def code_gen(self, program, line):
#An if statement is comprised of a conditional, then clause, and an else clause.
condition_code = self.condition.code_gen(program,line)
then_code = self.expr1.code_gen(program,line)
else_code = self.expr2.code_gen(program,line)
#add these to program where appropriate...
self.place = get_open_place()
#evaluate the condition:
program = condition_code
#decide what to do based on conditional result
#if the result is 1, execute else_code
else_start = str(len(then_code)+1)
program = program + ['LD 0,' + str(self.condition.place) + '(6) : result of an if-statement condition',
'JEQ 0,'+else_start+'(7)']#if r0 is not zero, then jump to line x;
#line x might need to be dynamically generated...
#perhaps count the amount of lines that exist in then_code and else_code
#execute the then clause
program = program + then_code
#jump to end of if statement
else_end = str(len(else_code))
program = program + ['LDA 7,'+else_end+'(7) : jump to next evaluation']
lineXPosition = len(program)
#execute the else clause
program = program + else_code
lineX = program[lineXPosition]
newLineX = lineX + "; line x"
program[lineXPosition] = newLineX
#store the result of if statement
program = program + ['ST 0,' + str(self.place) + '(6)']
return program
#end IfNode.code_gen()
#end IfNode
# --- Expressions have values... --- #
class ValueNode(ASTnode):
def __init__(self, value):
ASTnode.__init__(self)
self.value = value
push(self.value, self.information)
def __str__(self):
self.returnString = str(self.value)
return self.returnString
def get_value(self):
return self.value
#end ValueNode
class IdentifierNode(ValueNode):
def __init__(self, name):
ValueNode.__init__(self, name)
def typeCheck(self):
existBool = 0
current_function = function_record[-1]
formals = function_table[current_function]["functionNode"].get_formals()
for index, formal in enumerate(formals):
if self.value == formal[0].get_value():
existBool = 1
self.outputType = formal[1].get_value()
self.formal_position = index
if existBool != 1:
msg = "Identifier {} referred in {} has no declaration in function definition."
msg = msg.format(self.value, current_function)
return msg
def code_gen(self,a,b):
self.place = 3 + self.formal_position
#identifiers will always be in an expressionnode...
#this is a short term fix for outputting an actual through a print statement.
#very ineffecient
return ['LD 0,'+str(self.place)+'(6) : identifier load']
return []
#end IdentifierNode
class FunctionIdentifierNode(IdentifierNode):
def __init__(self, node):
IdentifierNode.__init__(self, node.value) # introduce some sort of accessor
def typeCheck(self):
pass
#end FunctionIdentifierNode
class NumberLiteralNode(ValueNode):
def __init__(self, number):
ValueNode.__init__(self, number)
self.outputType = "integer"
def code_gen(self, program, line):
# Get a relative (to stack pointer) address to save this constant to
self.place = get_open_place( )
# Load the constant value into register 0, and then save this
# register to the temporary variable location 'place'
program = ['LDC 0,' + str(self.value) + '(4) : NumberLiteralNode constant',
'ST 0,' + str(self.place) + '(6) : NumberLiteralNode storage']
return program
#end NumberLiteralNode
class BooleanLiteralNode(ValueNode):
def __init__(self, boolValue):
ValueNode.__init__(self, boolValue)
self.outputType = "boolean"
def code_gen(self, program, line):
opCode_dict = {"true": "1", "false": "0"}
self.place = get_open_place( )
program = ['LDC 0,' + opCode_dict[self.value] + '(4) : BooleanLiteralNode value',
'ST 0,' + str(self.place) + '(6) : BooleanLiteralNode storage']
return program
#end BooleanLiteralNode
class TypeNode(ValueNode):
def __init__(self, typeValue):
ValueNode.__init__(self, typeValue)
self.outputType = typeValue # !! possible introduction of another property type
#end TypeNode
#The remaining nodes are all subclasses of this Operator node
class Operator(ValueNode):
def __init__(self, operand):
ValueNode.__init__(self, operand)
self.operatorType = str()
#end Operator superclass
class UnaryOperator(Operator):
def __init__(self, operand):
Operator.__init__(self, operand)
self.operatorType = "UnaryOperator"
def __str__(self):
self.returnString = " " + self.operatorType + " "
self.returnString += str(self.value) + " "
return self.returnString
def build_error(self):
current_function = function_record[-1]
msg = "{} expression within function {} expecting {}, received {}({})."
msg = msg.format(self.operatorType, current_function, self.outputType, self.value, self.value.outputType)
return msg
#end UnaryOperator superclass
# -- # Unary Operators:
class NotNode(UnaryOperator):
def __init__(self, operand):
UnaryOperator.__init__(self, operand)
self.operatorType = "not"
self.outputType = "boolean"
def typeCheck(self):
if self.value.outputType != "boolean":
return self.build_error()
def code_gen(self, program, line):
program = self.value.code_gen(program, line)
self.place = get_open_place()
program = program + ['LD 0,'+str(self.value.place)+'(6)',
#if reg 0 is 1: store 1; else: store 0
'JEQ 0,3(7)',#jump if equal to zero
'LDC 0,0(4)',#not equal to zero thus change to zero
'ST 0,' + str(self.place) + '(6)',
'LDA 7,1(7)',
'LDC 0,1(4)',
'ST 0,' + str(self.place) + '(6)'
]
return program
#end NotNode
class NegationNode(UnaryOperator):
def __init__(self, operand):
UnaryOperator.__init__(self, operand)
self.operatorType = "negate"
self.outputType = "integer"
def typeCheck(self):
if self.value.outputType != "integer":
return self.build_error()
def code_gen(self, program, line):
program = self.value.code_gen(program, line)#descend to a boolean literal
self.place = get_open_place()
program = program + ['LD 0,'+str(self.value.place)+'(6)',
'SUB 0,4,0',
'ST 0,'+str(self.place)+'(6)'
]
return program
#end NegationNode
class BinaryOperator(UnaryOperator):
def __init__(self, leftOperand, rightOperand):
self.operatorType = "BinaryOperator"
UnaryOperator.__init__(self, rightOperand)
self.value1 = leftOperand
push(self.value1, self.information)
self.get_value = self.get_values
def __str__(self):
self.returnString = str(self.value1) + " "
self.returnString += self.operatorType + " "
self.returnString += str(self.value) + " "
return self.returnString
def get_values(self):
return (self.value1, self.value)
def build_error(self):
current_function = function_record[-1]
msg = "{} expression within function {} expecting {}s, received {}({}) and {}({})."
msg = msg.format(self.operatorType, current_function, self.outputType, self.value1, self.value1.outputType, self.value, self.value.outputType)
return msg
#end BinaryOperator superclass
class BooleanConnective(BinaryOperator):
def __init__(self, leftOperand, rightOperand):
BinaryOperator.__init__(self, leftOperand, rightOperand)
# NotNode is excluded from this class
self.outputType = "boolean"
def typeCheck(self):
if self.value.outputType != "boolean" or self.value1.outputType != "boolean":
return self.build_error()
#end BooleanConnective superclass
class BooleanComparison(BinaryOperator):
def __init__(self, leftOperand, rightOperand):
BinaryOperator.__init__(self, leftOperand, rightOperand)
self.outputType = "boolean"
def typeCheck(self):
if self.value.outputType != "integer" or self.value1.outputType != "integer":
return self.build_error()
#end BooleanComparison superclass
class ArithmeticOperation(BinaryOperator):
def __init__(self, leftOperand, rightOperand):
BinaryOperator.__init__(self, leftOperand, rightOperand)
# NegateNode is excluded from this class
self.outputType = "integer"
def typeCheck(self): # code duplication
if self.value.outputType != "integer" or self.value1.outputType != "integer":
return self.build_error()
def code_gen(self, program, line):
opCode_dict = {'+' : 'ADD', '-' : 'SUB', '*' : 'MUL', '/' : 'DIV'}
left, right = super().get_values()
# Generate the code for the left and right-hand sides of the addition
# (also updating the 'place' values for both)
program = left.code_gen(program, line) + right.code_gen(program, line)
# Get the next open place for me to save the result of the addition
self.place = get_open_place()
# Load the values for the left and right sides into registers 0 and 1,
# compute the sum, and save to self.place
program = program + ['LD 0,' + str(left.place) + '(6) : ArithmeticOperation left operand',
'LD 1,' + str(right.place) + '(6) : ArithmeticOperation right operand',
opCode_dict[self.operatorType] +' 0,0,1', # Add registers 0 and 1, saving the result in register 0
'ST 0,' + str(self.place) + '(6)']
return program
#end ArithmeticOperation superclass
class LessThanNode(BooleanComparison):
def __init__(self, leftOperand, rightOperand):
BooleanComparison.__init__(self, leftOperand, rightOperand)
self.operatorType = "<"
self.outputType = "boolean"
def code_gen(self, program, line):
right, left = super().get_values()
program = left.code_gen(program,line) + right.code_gen(program, line)
self.place = get_open_place()
program = program + ['LD 0,' + str(left.place) + '(6) : LessThanNode left operand',
'LD 1,' + str(right.place) + '(6) : LessThanNode right operand',
#subtract r0 by r1. If the restult is less than zero, then r0 less than r1
'SUB 2,1,0',
'JLT 2,3(7) : jump to next line x',#if r0 is less than r1, then jump to line x,
'LDC 0,0(4) : LessThanNode evaluates to false',#load 0 into register 0; this test is false
'ST 0,' + str(self.place) + '(6)',
'LDA 7,2(7) : jump to next evaluation',#jump past else statement
'LDC 0,1(4) : line x; LessThanNode evaluates to true',#line x: load 1 into register 0; this test is true
'ST 0,' + str(self.place) + '(6)']
return program
#end LessThanNode
class EqualToNode(BooleanComparison):
def __init__(self, leftOperand, rightOperand):
BooleanComparison.__init__(self, leftOperand, rightOperand)
self.operatorType = "="
self.outputType = "boolean"
def code_gen(self, program, line):
right, left = super().get_values()
program = left.code_gen(program,line) + right.code_gen(program, line)
self.place = get_open_place()
program = program + ['LD 0,' + str(left.place) + '(6) : EqualNode left operand',
'LD 1,' + str(right.place) + '(6) : EqualNode right operand',
#subtract r0 by r1. If the result is not zero, then they are not equal
'SUB 2,0,1',
'JNE 2, 3(7) : jump to next line x',#if not equal to zero, go to line x
'LDC 0,1(4) : EqualNode evaluates to true', #load 1 into register 0; this test is true
'ST 0,' + str(self.place) + '(6)',
'LDA 7,2(7) : jump to next evaluation',#jump past else statement
'LDC 0,0(4) : line x; EqualNode evaluates to false', #line x: load 0 into register 0; this test is false
'ST 0,' + str(self.place) + '(6)']
return program
#end EqualToNode
class OrNode(BooleanConnective):
def __init__(self, leftOperand, rightOperand):
BooleanConnective.__init__(self, leftOperand, rightOperand)
self.operatorType = "or"
self.outputType = "boolean"
def code_gen(self, program, line):
right, left = super().get_values()
program = left.code_gen(program, line) + right.code_gen(program, line)
self.place = get_open_place()
program = program + ['LD 0,' + str(left.place) + '(6) : OrNode left operand',
'LD 1,' + str(right.place) + '(6) : OrNode right operand',
'JNE 0,4(7) : jump to next line x',#if left side is not zero, go to line x
'JNE 1,3(7) : jump to next line x',#if right side is not zero, go to line x
'LDC 0,0(4) : OrNode evaulates to false',#load 0 into register 0; this test is false
'ST 0,' + str(self.place) + '(6)',
'LDA 7,2(7) : jump to next evaluation',#jump past else statement
'LDC 0,1(4) : line x; OrNode evaulates to true',#line x: load 1 into register 0; this test is true
'ST 0,' + str(self.place) + '(6)']
return program
#end OrNode
class PlusNode(ArithmeticOperation):
def __init__(self, leftOperand, rightOperand):
ArithmeticOperation.__init__(self, leftOperand, rightOperand)
self.operatorType = "+"
self.outputType = "integer"
#end PlusNode
class MinusNode(ArithmeticOperation):
def __init__(self, leftOperand, rightOperand):
ArithmeticOperation.__init__(self, leftOperand, rightOperand)
self.operatorType = "-"
self.outputType = "integer"
#end MinusNode
class AndNode(BooleanConnective):
def __init__(self, leftOperand, rightOperand):
BooleanConnective.__init__(self, leftOperand, rightOperand)
self.operatorType = "and"
self.outputType = "boolean"
def code_gen(self,program,line):
right, left = super().get_values()
program = left.code_gen(program,line) + right.code_gen(program,line)
self.place = get_open_place()
program = program + ['LD 0,' + str(left.place) + '(6) : AndNode left operand',
'LD 1,' + str(right.place) + '(6) : AndNode right operand',
'JEQ 0,4(7) : jump to next line x',#if left is equal to 0, go to line x
'JEQ 1,3(7) : jump to next line x',#if right is equal to 0, go to line x
'LDC 0,1(4) : AndNode evaluates to true',#load 1 into register 0; this test is true
'ST 0,' + str(self.place) + '(6)',
'LDA 7,2(7) : jump to next evaulation',#jump past else statement
'LDC 0,0(4) : line x; AndNode evaulates to false',#line x: load 0 into register 0; this test is false
'ST 0,' + str(self.place) + '(6)']
return program
#end AndNode
class MultiplyNode(ArithmeticOperation):
def __init__(self, leftOperand, rightOperand):
ArithmeticOperation.__init__(self, leftOperand, rightOperand)
self.operatorType = "*"
self.outputType = "integer"
#end MultiplyNode
class DivisionNode(ArithmeticOperation):
def __init__(self, leftOperand, rightOperand):
ArithmeticOperation.__init__(self, leftOperand, rightOperand)
self.operatorType = "/"
self.outputType = "integer"
#end DivisionNode
|
{"/src/AST_node.py": ["/src/errors.py", "/src/stack_operations.py"], "/src/scanner.py": ["/src/k_token.py", "/src/errors.py"], "/src/drivers/code_gen_validate.py": ["/src/parser.py", "/src/scanner.py", "/src/code_generator.py"], "/src/parser.py": ["/src/scanner.py", "/src/errors.py", "/src/parse_table.py", "/src/k_token.py", "/src/AST_node.py", "/src/stack_operations.py"], "/src/drivers/parse_validate.py": ["/src/parser.py", "/src/scanner.py"], "/src/parse_table.py": ["/src/k_token.py", "/src/AST_node.py"], "/src/tests.py": ["/src/scanner.py"]}
|
39,647
|
alanmmckay/KLEINcompiler
|
refs/heads/master
|
/programs/class-programs/python-equivalents/print-one.py
|
def main():
print(1)
return 1
print( main( ))
|
{"/src/AST_node.py": ["/src/errors.py", "/src/stack_operations.py"], "/src/scanner.py": ["/src/k_token.py", "/src/errors.py"], "/src/drivers/code_gen_validate.py": ["/src/parser.py", "/src/scanner.py", "/src/code_generator.py"], "/src/parser.py": ["/src/scanner.py", "/src/errors.py", "/src/parse_table.py", "/src/k_token.py", "/src/AST_node.py", "/src/stack_operations.py"], "/src/drivers/parse_validate.py": ["/src/parser.py", "/src/scanner.py"], "/src/parse_table.py": ["/src/k_token.py", "/src/AST_node.py"], "/src/tests.py": ["/src/scanner.py"]}
|
39,648
|
alanmmckay/KLEINcompiler
|
refs/heads/master
|
/src/scanner.py
|
from src.k_token import Token, TokenType
from src.errors import LexicalError
keywords = ["function", "boolean", "if", "then", "else", "not", "and", "or", "integer", "print"]
boolean = ["true", "false"]
# take print out of keywords if we implement this:
# primitive = ["main", "print"]
class Scanner:
"""Read tokens from an input stream"""
def __init__(self, program_str):
self.program_str = program_str
self.pos = 0
self.lookahead = None
self.line = 1
def peek(self):
if not self.lookahead:
self.lookahead = self.get_next_token()
return self.lookahead
def next_token(self):
if self.lookahead:
answer = self.lookahead
self.lookahead = None
return answer
else:
return self.get_next_token()
# --------------------------------------------------------
def get_next_token(self):
#self.skip_whitespace()
#self.skip_comment()
#self.skip_whitespace()
self.skip_irrelevant()
# This would be the state to handle end of file.
if self.pos >= len(self.program_str):
return Token(TokenType.EOF)
# This is a state to handle lone zeros
# Needs to be fixed so if anything int or alpha follows throws error
if self.program_str[self.pos] == '0':
self.pos += 1
return Token(TokenType.NUMBER, "0")
# This chunk handles our operator state
# -----------------------------------------------------
if self.program_str[self.pos] == '=':
self.pos += 1
return Token(TokenType.OPERATORS, "=")
if self.program_str[self.pos] == '+':
self.pos += 1
return Token(TokenType.OPERATORS, "+")
if self.program_str[self.pos] == '-':
self.pos += 1
return Token(TokenType.OPERATORS, "-")
if self.program_str[self.pos] == '<':
self.pos += 1
return Token(TokenType.OPERATORS, "<")
if self.program_str[self.pos] == '*':
self.pos += 1
return Token(TokenType.OPERATORS, "*")
if self.program_str[self.pos] == '/':
self.pos += 1
return Token(TokenType.OPERATORS, "/")
# -------------------------------------------------
# This section handles our delimeter state as well as comments
# --------------------------------------------------
if self.program_str[self.pos] == '(':
self.pos += 1
return Token(TokenType.DELIMETER, "(")
if self.program_str[self.pos] == ')':
self.pos += 1
return Token(TokenType.DELIMETER, ")")
if self.program_str[self.pos] == ',':
self.pos += 1
return Token(TokenType.DELIMETER, ",")
if self.program_str[self.pos] == ':':
self.pos += 1
return Token(TokenType.DELIMETER, ":")
# -------------------------------------------------
# this section handles our Keyword, Boolean, Primitive and Word states.
# -------------------------------------------------
if self.program_str[self.pos].isalpha():
word = self.get_word()
if word in keywords:
return Token(TokenType.KEYWORD, word)
elif word in boolean:
return Token(TokenType.BOOLEAN, word)
# elif word in primitive:
# return Token(TokenType.PRIMITIVE, word)
else:
return Token(TokenType.WORD, word)
# --------------------------------------------
# This would be the section where we handle our integer state
# -------------------------------------------------------
if self.program_str[self.pos] in '123456789':
number = self.get_number()
return Token(TokenType.NUMBER, number)
# -------------------------------------------------------
# if no token matches, signal an error
msg = 'invalid character: {} on line {}'.format(self.program_str[self.pos], self.line)
LexicalError(msg, self.program_str, self.pos)
#raise LexicalError(msg, self.pos, program_str)
# --------------------------------------------------------
def skip_whitespace(self):
if(self.pos < len(self.program_str)):
while self.pos < len(self.program_str) and \
self.is_whitespace(self.program_str[self.pos]):
self.pos += 1
return -1
else:
return 1
def is_whitespace(self, ch):
if ch == '\n':
self.line += 1
return ch in ' \n\t\r'
def get_word(self):
start = self.pos
while self.pos < len(self.program_str) and \
self.program_str[self.pos].isalpha() or self.program_str[self.pos] in "0123456789_":
self.pos += 1
if (self.pos - start) > 256:
msg = 'IDENTIFIER exceeds 256 character limit on line {} \n IDENTIFIER: {}'
msg = msg.format(self.line, self.program_str[start: self.pos])
raise LexicalError(msg, self.program_str, self.pos)
return self.program_str[start: self.pos]
def get_number(self):
start = self.pos
while self.pos < len(self.program_str) and \
self.program_str[self.pos] in '0123456789':
self.pos += 1
if int(self.program_str[start: self.pos]) > 2147483647:
msg = "INTEGER out of bounds on line {} \n INTEGER: {} \n must be within range +/- 2147483647"
msg = msg.format(self.line, self.program_str[start: self.pos])
raise LexicalError(msg, self.program_str, self.pos)
return int(self.program_str[start: self.pos])
def skip_comment(self):#treat line whitespace
if(self.pos < len(self.program_str)):
if(self.program_str[self.pos] == '('):
if(self.program_str[self.pos+1] == '*'):
self.pos += 1
self.pos += 1
while(self.pos < len(self.program_str)):
if(self.program_str[self.pos] == '\n'):
self.line += 1
if(self.program_str[self.pos] == '*'):
self.pos += 1
if self.program_str[self.pos] == ')':
self.pos += 1
return -1
else:
self.pos += 1
if self.pos >= len(self.program_str):
self.pos -= 1
return -1
else:
return 1
return 2
def skip_irrelevant(self):
varWhiteSpace = 0
varCommentSpace = 0
while(varWhiteSpace < 2 and varCommentSpace < 2):
varWhiteSpace += self.skip_whitespace()
varCommentSpace += self.skip_comment()
def get_program_string(self):
return self.program_str
def get_current_line(self):
return self.line
|
{"/src/AST_node.py": ["/src/errors.py", "/src/stack_operations.py"], "/src/scanner.py": ["/src/k_token.py", "/src/errors.py"], "/src/drivers/code_gen_validate.py": ["/src/parser.py", "/src/scanner.py", "/src/code_generator.py"], "/src/parser.py": ["/src/scanner.py", "/src/errors.py", "/src/parse_table.py", "/src/k_token.py", "/src/AST_node.py", "/src/stack_operations.py"], "/src/drivers/parse_validate.py": ["/src/parser.py", "/src/scanner.py"], "/src/parse_table.py": ["/src/k_token.py", "/src/AST_node.py"], "/src/tests.py": ["/src/scanner.py"]}
|
39,649
|
alanmmckay/KLEINcompiler
|
refs/heads/master
|
/src/code_generator.py
|
class Generator:
def __init__(self, ast):
self.ast = ast
def generate(self):
line = 0
program = self.ast.code_gen(line)
program_str = ''
for line_num, stmt in enumerate(program):
program_str += str(line_num) + ': ' + stmt + '\n'
return program_str
|
{"/src/AST_node.py": ["/src/errors.py", "/src/stack_operations.py"], "/src/scanner.py": ["/src/k_token.py", "/src/errors.py"], "/src/drivers/code_gen_validate.py": ["/src/parser.py", "/src/scanner.py", "/src/code_generator.py"], "/src/parser.py": ["/src/scanner.py", "/src/errors.py", "/src/parse_table.py", "/src/k_token.py", "/src/AST_node.py", "/src/stack_operations.py"], "/src/drivers/parse_validate.py": ["/src/parser.py", "/src/scanner.py"], "/src/parse_table.py": ["/src/k_token.py", "/src/AST_node.py"], "/src/tests.py": ["/src/scanner.py"]}
|
39,650
|
alanmmckay/KLEINcompiler
|
refs/heads/master
|
/programs/class-programs/python-equivalents/circular-prime.py
|
# maximum recursion depth exceeded in comparison
import math
def main(x):
return circularPrimesTo(x)
def circularPrimesTo(x):
return circularPrimesToHelper(x+1,2,0)
def circularPrimesToHelper(top,x,count):
if x<top:
if isCircularPrime(x):
return circularPrimesToHelper(top,x+1,count+1)
else:
return circularPrimesToHelper(top,x+1,count)
else:
return count
def isCircularPrime(x):
if isCircularPrimeHelper(x,math.log10(x)+1):
return report(x)
else:
return False
def isCircularPrimeHelper(x,turns):
if turns==0:
return True
else:
return isPrime(x) and isCircularPrimeHelper(rotate(x),turns-1)
def report(x):
print(x)
return True
def rotate(x):
return x/10+((x%10)*(10**math.log10(x)))
def isPrime(n):
return not hasDivisorFrom(2,n)
def hasDivisorFrom(i,n):
if i<n:
return divides(i,n) or hasDivisorFrom(i+1,n)
else:
return False
def divides(a,b):
return (b%a==0)
|
{"/src/AST_node.py": ["/src/errors.py", "/src/stack_operations.py"], "/src/scanner.py": ["/src/k_token.py", "/src/errors.py"], "/src/drivers/code_gen_validate.py": ["/src/parser.py", "/src/scanner.py", "/src/code_generator.py"], "/src/parser.py": ["/src/scanner.py", "/src/errors.py", "/src/parse_table.py", "/src/k_token.py", "/src/AST_node.py", "/src/stack_operations.py"], "/src/drivers/parse_validate.py": ["/src/parser.py", "/src/scanner.py"], "/src/parse_table.py": ["/src/k_token.py", "/src/AST_node.py"], "/src/tests.py": ["/src/scanner.py"]}
|
39,651
|
alanmmckay/KLEINcompiler
|
refs/heads/master
|
/programs/class-programs/python-equivalents/divide.py
|
def main ( a , b , n ):
if n == 0:
return a
else:
printAndDivide( a , b, n )
def printAndDivide( a , b , n ):
print(( 10 * a ) // b )
return main (MOD(a*10, b), b, n-1)
def MOD( m , n ):
return m - m // n * n
|
{"/src/AST_node.py": ["/src/errors.py", "/src/stack_operations.py"], "/src/scanner.py": ["/src/k_token.py", "/src/errors.py"], "/src/drivers/code_gen_validate.py": ["/src/parser.py", "/src/scanner.py", "/src/code_generator.py"], "/src/parser.py": ["/src/scanner.py", "/src/errors.py", "/src/parse_table.py", "/src/k_token.py", "/src/AST_node.py", "/src/stack_operations.py"], "/src/drivers/parse_validate.py": ["/src/parser.py", "/src/scanner.py"], "/src/parse_table.py": ["/src/k_token.py", "/src/AST_node.py"], "/src/tests.py": ["/src/scanner.py"]}
|
39,652
|
alanmmckay/KLEINcompiler
|
refs/heads/master
|
/src/drivers/code_gen_validate.py
|
from sys import argv, path
import os
path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)) + '/../../'))
path.insert(0, os.getcwd())
from src.parser import Parser
from src.scanner import Scanner
# have to import some sort of sem analyzer how ever we decide to do that
from src.code_generator import Generator
# define file path
FILE_PATH = argv[1]
# turn prgm into a string and strip return off FILE_PATH
FILE_PATH = FILE_PATH.strip("\r")
with open(FILE_PATH, "r") as klein:
klein_program = klein.read()
# run program through scanner
s = Scanner(klein_program)
# run s through parser
p = Parser(s)
# gen = Generator(ast)
# get a tree
ast = p.parse()
# put that tree into generator objet
gen = Generator(ast)
# run code gen on the node
program = gen.generate()
# output to a tm file
#FILE_PATH = FILE_PATH.strip(".kln")
FILE_PATH = FILE_PATH[0:-4]
filename = FILE_PATH + ".tm"
output = open(filename, "w")
output.write(program)
print("TM code saved to file {}".format(filename))
|
{"/src/AST_node.py": ["/src/errors.py", "/src/stack_operations.py"], "/src/scanner.py": ["/src/k_token.py", "/src/errors.py"], "/src/drivers/code_gen_validate.py": ["/src/parser.py", "/src/scanner.py", "/src/code_generator.py"], "/src/parser.py": ["/src/scanner.py", "/src/errors.py", "/src/parse_table.py", "/src/k_token.py", "/src/AST_node.py", "/src/stack_operations.py"], "/src/drivers/parse_validate.py": ["/src/parser.py", "/src/scanner.py"], "/src/parse_table.py": ["/src/k_token.py", "/src/AST_node.py"], "/src/tests.py": ["/src/scanner.py"]}
|
39,653
|
alanmmckay/KLEINcompiler
|
refs/heads/master
|
/programs/class-programs/python-equivalents/is-tanton-pal.py
|
#MOD is undefined
def main( x ):
return is_tanton_pal_bin(binary_for(x))
def is_tanton_pal_bin( x ):
if is_palindrome( x ):
return True
else:
return is_tanton_pal_bin( add_boolean( x , reverse( x )) )
def binary_for( n ):
if n < 2:
return n
else:
return 10 * binary_for( n // 2 ) + MOD( n , 2 )
def decimal_for( n ):
if n < 10:
return n
else:
return 2 * decimal_for( n // 10) + MOD( n , 10 )
def add_boolean( m , n ):
return binary_for( decimal_for( m ) + decimal_for( n ))
def is_palindrome( n ):
return n == reverse ( n )
def reverse( n ):
return reverseL( n , 0 )
def reverseL( n , nR ):
if n == 0:
return nR
else:
return reverseL( n // 10 , 10 * nR + MOD ( n , 10 ))
|
{"/src/AST_node.py": ["/src/errors.py", "/src/stack_operations.py"], "/src/scanner.py": ["/src/k_token.py", "/src/errors.py"], "/src/drivers/code_gen_validate.py": ["/src/parser.py", "/src/scanner.py", "/src/code_generator.py"], "/src/parser.py": ["/src/scanner.py", "/src/errors.py", "/src/parse_table.py", "/src/k_token.py", "/src/AST_node.py", "/src/stack_operations.py"], "/src/drivers/parse_validate.py": ["/src/parser.py", "/src/scanner.py"], "/src/parse_table.py": ["/src/k_token.py", "/src/AST_node.py"], "/src/tests.py": ["/src/scanner.py"]}
|
39,654
|
alanmmckay/KLEINcompiler
|
refs/heads/master
|
/programs/class-programs/python-equivalents/fibonacci.py
|
def main ( elementWanted):
if elementWanted < 1 :
return 0
else:
return addNext( 1 , elementWanted , 0 , 1 )
def addNext( currentElement, elementWanted , previousSum , currentSum):
if elementWanted == currentElement:
return currentSum
else:
return addNext ( currentElement + 1 , elementWanted , currentSum , previousSum + currentSum )
|
{"/src/AST_node.py": ["/src/errors.py", "/src/stack_operations.py"], "/src/scanner.py": ["/src/k_token.py", "/src/errors.py"], "/src/drivers/code_gen_validate.py": ["/src/parser.py", "/src/scanner.py", "/src/code_generator.py"], "/src/parser.py": ["/src/scanner.py", "/src/errors.py", "/src/parse_table.py", "/src/k_token.py", "/src/AST_node.py", "/src/stack_operations.py"], "/src/drivers/parse_validate.py": ["/src/parser.py", "/src/scanner.py"], "/src/parse_table.py": ["/src/k_token.py", "/src/AST_node.py"], "/src/tests.py": ["/src/scanner.py"]}
|
39,655
|
alanmmckay/KLEINcompiler
|
refs/heads/master
|
/programs/class-programs/python-equivalents/is-excellent.py
|
def MOD( m , n ):
return m - n * (m//n)
def EXP( m , n ):
if n == 0:
return 1
else:
return m * EXP( m , n-1 )
def ODD( n ):
if 0 < n:
return ( 2 * (n//2) ) < n
else:
return ODD( -n )
def length( n ):
if n < 10:
return 1
else:
return 1 + length( n // 10)
def a( n ):
return n // EXP( 10, length(n)//2 )
def b( n ):
return MOD( n, EXP(10, length(n)//2) )
def excellentDiff( a , b ):
return b * b - a * a
def isExcellentSwitch( n , length ):
if ODD( length ):
return False
else:
return n == excellentDiff( a(n), b(n) )
def isExcellent( n ):
return isExcellentSwitch( n, length(n) )
def main( n ):
return isExcellent( n )
|
{"/src/AST_node.py": ["/src/errors.py", "/src/stack_operations.py"], "/src/scanner.py": ["/src/k_token.py", "/src/errors.py"], "/src/drivers/code_gen_validate.py": ["/src/parser.py", "/src/scanner.py", "/src/code_generator.py"], "/src/parser.py": ["/src/scanner.py", "/src/errors.py", "/src/parse_table.py", "/src/k_token.py", "/src/AST_node.py", "/src/stack_operations.py"], "/src/drivers/parse_validate.py": ["/src/parser.py", "/src/scanner.py"], "/src/parse_table.py": ["/src/k_token.py", "/src/AST_node.py"], "/src/tests.py": ["/src/scanner.py"]}
|
39,656
|
alanmmckay/KLEINcompiler
|
refs/heads/master
|
/src/parser.py
|
from src.scanner import Scanner
from src.errors import ParseError
from src.parse_table import *
from src.k_token import Token, TokenType
from src.AST_node import *
from src.stack_operations import *
class Parser:
def __init__(self, scanner):
self.scanner = scanner
self.debug_stack_string = str()
self.debug_semantic_string = str()
def parse(self):
parse_stack = []
semantic_stack = []
push_rule([NonTerminal.Program, TokenType.EOF], parse_stack)
while parse_stack:
A = top(parse_stack)
self.debug_stack_string += "Current Stack: " + str(parse_stack) + "\n"
self.debug_stack_string += "Top of Stack: " + str(A) + "\n"
if isinstance(A, TokenType):
#print()
#print(semantic_stack)
t = self.scanner.next_token()
#print(str(t.token_value) + " " + str(t.token_type))
self.debug_stack_string += "Token Type: " + str(t.token_type) + "\n"
self.debug_stack_string += "Token Value: " + str(t.token_value) + "\n"
if A == t.token_type:
pop(parse_stack)
#####################################
#putting information worth keeping onto the stack
#this information will be housed within the relevant nodes
#Does this factor boolean literals and types?
if t.is_number() or t.is_word() or t.token_value == 'integer' or t.token_value == 'boolean' or t.token_value == 'true' or t.token_value == 'false':
push(t.value(), semantic_stack)
#####################################
else:
msg = 'token mismatch: {} and {}'
msg = msg.format(A, t)
raise ParseError(msg, self.scanner.get_program_string(), self.debug_stack_string)
elif isinstance(A, NonTerminal):
t = self.scanner.peek()
self.debug_stack_string += "Token Type: " + str(t.token_type) + "\n"
if ((t.token_type == TokenType.OPERATORS) or (t.token_type == TokenType.DELIMETER) or (
t.token_type == TokenType.KEYWORD)):
terminal = StaticTerminal(t)
terminal = terminal.value
else:
terminal = t.token_type
self.debug_stack_string += "Terminal Value: " + str(terminal) + "\n"
self.debug_stack_string += "Indexing into table: " + str(A) + ", " + str(terminal) + "\n"
rule = parse_table.get((A, terminal))
if rule is not None:
pop(parse_stack)
push_rule(rule, parse_stack)
else:
msg = 'cannot expand {} on {}'
msg = msg.format(A, t)
raise ParseError(msg, self.scanner.get_program_string(), self.debug_stack_string)
################################################
elif isinstance(A, SemanticAction):
#decide which type of node needs to be made
objectClass = class_factory.get(A)
#print(objectClass)
#create a node using that class
node = nodeBuilder(semantic_stack,objectClass)
#print(node)
#put that node into the semantic stack
push(node, semantic_stack)
#pop the semantic rule off the parse stack
pop(parse_stack)
#print()
self.debug_semantic_string += "---New Node: \n"
self.debug_semantic_string += str(type(top(semantic_stack))) + "\n"
self.debug_semantic_string += str(node) + "\n\n"
###############################################
else:
msg = 'invalid item on parse_stack: {}'
msg = msg.format(A)
raise ParseError(msg, self.scanner.get_program_string(), self.debug_stack_string)
self.debug_stack_string += "semantic stack: \n"
'''for i in semantic_stack:
self.debug_stack_string += str(i) + "\n"'''
self.debug_stack_string += "\n"
if not t.is_eof():
msg = 'unexpected token at end: {}'
msg = msg.format(t)
raise ParseError(msg, self.scanner.get_program_string(), self.debug_stack_string)
#################################################
elif len(semantic_stack) != 1:
msg = 'unexpected number of AST nodes: {}'
raise ParseError(msg, self.scanner.get_program_string(), self.debug_stack_string)
else:
# print statement here for a check
# print(self.debug_semantic_string)
#print(semantic_stack)
result = top(semantic_stack).process_node()
if isinstance(result, list):
msg = top(result).get_message()
raise SemanticError(msg, self.scanner.get_program_string(),self.debug_semantic_string)
#print(result.information[0].information[0].information[0].information[0])
return top(semantic_stack)
################################################
# return True
|
{"/src/AST_node.py": ["/src/errors.py", "/src/stack_operations.py"], "/src/scanner.py": ["/src/k_token.py", "/src/errors.py"], "/src/drivers/code_gen_validate.py": ["/src/parser.py", "/src/scanner.py", "/src/code_generator.py"], "/src/parser.py": ["/src/scanner.py", "/src/errors.py", "/src/parse_table.py", "/src/k_token.py", "/src/AST_node.py", "/src/stack_operations.py"], "/src/drivers/parse_validate.py": ["/src/parser.py", "/src/scanner.py"], "/src/parse_table.py": ["/src/k_token.py", "/src/AST_node.py"], "/src/tests.py": ["/src/scanner.py"]}
|
39,657
|
alanmmckay/KLEINcompiler
|
refs/heads/master
|
/programs/class-programs/python-equivalents/is-cantor-number-v4.py
|
def main( n ):
return (n < 2) or ((2 < n) and main(n / 3) and ((n - n/3 * 3) < 2))
|
{"/src/AST_node.py": ["/src/errors.py", "/src/stack_operations.py"], "/src/scanner.py": ["/src/k_token.py", "/src/errors.py"], "/src/drivers/code_gen_validate.py": ["/src/parser.py", "/src/scanner.py", "/src/code_generator.py"], "/src/parser.py": ["/src/scanner.py", "/src/errors.py", "/src/parse_table.py", "/src/k_token.py", "/src/AST_node.py", "/src/stack_operations.py"], "/src/drivers/parse_validate.py": ["/src/parser.py", "/src/scanner.py"], "/src/parse_table.py": ["/src/k_token.py", "/src/AST_node.py"], "/src/tests.py": ["/src/scanner.py"]}
|
39,658
|
alanmmckay/KLEINcompiler
|
refs/heads/master
|
/src/drivers/parse_validate.py
|
from sys import argv, path
import os
path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)) + '/../../'))
path.insert(0, os.getcwd())
from src.parser import Parser
from src.scanner import Scanner
# define file path
FILE_PATH = argv[1]
# turn prgm into a string and strip return off FILE_PATH
FILE_PATH = FILE_PATH.strip("\r")
with open(FILE_PATH, "r") as klein:
klein_program = klein.read()
# run program through scanner
s = Scanner(klein_program)
# run s through parser
p = Parser(s)
result = p.parse()
if result:
print("Valid Program")
else:
print("Invalid Program")
|
{"/src/AST_node.py": ["/src/errors.py", "/src/stack_operations.py"], "/src/scanner.py": ["/src/k_token.py", "/src/errors.py"], "/src/drivers/code_gen_validate.py": ["/src/parser.py", "/src/scanner.py", "/src/code_generator.py"], "/src/parser.py": ["/src/scanner.py", "/src/errors.py", "/src/parse_table.py", "/src/k_token.py", "/src/AST_node.py", "/src/stack_operations.py"], "/src/drivers/parse_validate.py": ["/src/parser.py", "/src/scanner.py"], "/src/parse_table.py": ["/src/k_token.py", "/src/AST_node.py"], "/src/tests.py": ["/src/scanner.py"]}
|
39,659
|
alanmmckay/KLEINcompiler
|
refs/heads/master
|
/src/errors.py
|
import sys
sys.tracebacklimit=0
class Error():
def __init__(self, program):
#Every error will need to be passed the program:
#-this is accessed by the scanner's get_program_string method
self.program_string = program
self.error_message = "\n--\n"
self.error_type = ""
self.file_name = "error.txt"
#error_string is manipulated by each subclass
self.error_string = ""
#output_string is only manipulated by this superclass
self.output_string = ""
#output represents the file to be written
self.output = ""
def open_file(self):
self.output = open(self.file_name, "w")
def write_file(self, error):
self.output_string = "!--- "+self.error_type+" ERROR! ---!\n\n\n"
self.output_string += "---> Input Program: \n"
self.output_string += self.program_string
self.output_string += "\n\n\n---> Error Information: \n"
self.output_string += error
self.output.write(self.output_string)
def close_file(self):
self.output.close()
def throw_error(self):
self.error_message +="\n--\nError log written to "+self.file_name
raise ValueError(self.error_message)
def output_error(self):
self.open_file()
self.write_file(self.error_string)
self.close_file()
self.throw_error()
pass
#end class Error()
class GeneralError(Error):
def __init__(self, msg, program):
Error.__init__(self, program)
self.error_message += msg
self.error_string = msg
self.output_error()
#end class GeneralError
class LexicalError(Error):
def __init__(self, msg, program, position):
Error.__init__(self, program)
self.error_type = "SCANNER"
self.file_name = "scanner_error.txt"
self.error_message += msg
self.error_string = msg
self.error_string += "-> Input Program remaining to be scanned: \n"
self.pos = position
while self.pos < len(program):
self.error_string += program[self.pos]
self.pos += 1
self.output_error()
#end class LexicalError
# errors thrown by the parser
class ParseError(Error):
def __init__(self, msg, program, trace):
Error.__init__(self,program)
self.error_type = "PARSER"
self.file_name = "parser_error.txt"
self.error_message += msg
self.error_string = msg
self.error_string += "\n-> Parse Stack Trace: \n"
self.error_string += trace
self.output_error()
#end class ParseError
# errors thrown by the type checker
class SemanticError(Error):
def __init__(self, msg, program, trace):
Error.__init__(self,program)
self.error_type = "PARSER; SEMANTIC"
self.file_name = "semantic_error.txt"
self.error_message += msg
self.error_string = msg
self.error_string += "\n-> Semantic Stack Trace: \n"
self.error_string += trace
self.output_error()
|
{"/src/AST_node.py": ["/src/errors.py", "/src/stack_operations.py"], "/src/scanner.py": ["/src/k_token.py", "/src/errors.py"], "/src/drivers/code_gen_validate.py": ["/src/parser.py", "/src/scanner.py", "/src/code_generator.py"], "/src/parser.py": ["/src/scanner.py", "/src/errors.py", "/src/parse_table.py", "/src/k_token.py", "/src/AST_node.py", "/src/stack_operations.py"], "/src/drivers/parse_validate.py": ["/src/parser.py", "/src/scanner.py"], "/src/parse_table.py": ["/src/k_token.py", "/src/AST_node.py"], "/src/tests.py": ["/src/scanner.py"]}
|
39,660
|
alanmmckay/KLEINcompiler
|
refs/heads/master
|
/programs/class-programs/python-equivalents/square-root.py
|
def ABS( n ):
if n < 0:
return -n
else:
return n
def f( x , n ):
return x * x - n
def df( x ):
return 2 * x
def newtonAux( guess , previous, epsilon , n ):
if epsilon < ABS(previous - guess):
return newtonAux( guess - f(guess,n)/df(guess), guess, epsilon, n )
else:
return guess
def newton( guess , epsilon , n ):
return newtonAux( guess - f (guess,n )// df(guess), guess, epsilon, n )
def main( n , epsilon):
return newton( n//2, epsilon, n )
|
{"/src/AST_node.py": ["/src/errors.py", "/src/stack_operations.py"], "/src/scanner.py": ["/src/k_token.py", "/src/errors.py"], "/src/drivers/code_gen_validate.py": ["/src/parser.py", "/src/scanner.py", "/src/code_generator.py"], "/src/parser.py": ["/src/scanner.py", "/src/errors.py", "/src/parse_table.py", "/src/k_token.py", "/src/AST_node.py", "/src/stack_operations.py"], "/src/drivers/parse_validate.py": ["/src/parser.py", "/src/scanner.py"], "/src/parse_table.py": ["/src/k_token.py", "/src/AST_node.py"], "/src/tests.py": ["/src/scanner.py"]}
|
39,661
|
alanmmckay/KLEINcompiler
|
refs/heads/master
|
/src/parse_table.py
|
from enum import Enum
from src.k_token import Token, TokenType
from src.AST_node import *
class NonTerminal(Enum):
Program = 0
Definitions = 1
Def = 2
Formals = 3
Nonempty_Formals = 4
Nonempty_Formals_t = 5
Formal = 6
Body = 7
Type = 8
Expr = 9
Expr_p = 10
Simple_Expr = 11
Simple_Expr_t = 12
Term = 13
Term_t = 14
Factor = 15
Factor_t = 16
Actuals = 17
Nonempty_Actuals = 18
Nonempty_Actuals_t = 19
Literal = 20
Print_Statement = 21
class Terminal(Enum):
Function = 0
OpenParen = 1
CloseParen = 2
Colon = 3
Comma = 4
Integer = 5
Boolean = 6
LessThan = 7
Equals = 8
Or = 9
Plus = 10
Minus = 11
And = 12
Mult = 13
Divide = 14
If = 15
Then = 16
Else = 17
Not = 18
Print = 19
class SemanticAction(Enum):
MakeDefinitions = 0
MakeIdentifier = 1
MakeFunction = 2
MakeFormals = 3
MakeFormal = 4
MakeBody = 5
MakeType = 6
MakeLessThan = 7
MakeEqualTo = 8
MakePlus = 9
MakeMinus = 10
MakeAnd = 11
MakeMultiply = 12
MakeDivision = 13
MakeNegation = 14
MakeIf = 15
MakeNot = 16
MakeFunctionCall = 17
MakeActuals = 18
MakeNonEmptyActuals = 19
MakeNumberLiteral = 20
MakeBooleanLiteral = 21
MakePrintStatement = 22
MakeOr = 23
MakeExpression = 24
MakeProgram = 25
#this was implemented because of some janky behavior occurring
#whilst indexing into the parse table using the Terminal enumeration.
class StaticTerminal():
def __init__(self, token):
self.value = token.token_value
if (self.value == "function"):
self.value = Terminal.Function
elif (self.value == "("):
self.value = Terminal.OpenParen
elif (self.value == ")"):
self.value = Terminal.CloseParen
elif (self.value == ":"):
self.value = Terminal.Colon
elif (self.value == ","):
self.value = Terminal.Comma
elif (self.value == "integer"):
self.value = Terminal.Integer
elif (self.value == "boolean"):
self.value = Terminal.Boolean
elif (self.value == "<"):
self.value = Terminal.LessThan
elif (self.value == "="):
self.value = Terminal.Equals
elif (self.value == "or"):
self.value = Terminal.Or
elif (self.value == "+"):
self.value = Terminal.Plus
elif (self.value == "-"):
self.value = Terminal.Minus
elif (self.value == "and"):
self.value = Terminal.And
elif (self.value == "*"):
self.value = Terminal.Mult
elif (self.value == "/"):
self.value = Terminal.Divide
elif (self.value == "if"):
self.value = Terminal.If
elif (self.value == "then"):
self.value = Terminal.Then
elif (self.value == "else"):
self.value = Terminal.Else
elif (self.value == "not"):
self.value = Terminal.Not
elif (self.value == "print"):
self.value = Terminal.Print
else:
msg = "Error in StaticTerminal class.\n"
msg += "Token: {}\n".format(token)
raise ValueError(msg)
class_factory = {
SemanticAction.MakeDefinitions: DefinitionsNode,
SemanticAction.MakeIdentifier: IdentifierNode,
SemanticAction.MakeFunction: FunctionNode,
SemanticAction.MakeFormals: FormalsNode,
SemanticAction.MakeBody: BodyNode,
SemanticAction.MakeType: TypeNode,
SemanticAction.MakeLessThan: LessThanNode,
SemanticAction.MakeEqualTo: EqualToNode,
SemanticAction.MakePlus: PlusNode,
SemanticAction.MakeMinus: MinusNode,
SemanticAction.MakeAnd: AndNode,
SemanticAction.MakeMultiply: MultiplyNode,
SemanticAction.MakeDivision: DivisionNode,
SemanticAction.MakeNegation: NegationNode,
SemanticAction.MakeIf: IfNode,
SemanticAction.MakeNot: NotNode,
SemanticAction.MakeFunctionCall: FunctionCallNode,
SemanticAction.MakeActuals: ActualsNode,
SemanticAction.MakeNumberLiteral: NumberLiteralNode,
SemanticAction.MakeBooleanLiteral: BooleanLiteralNode,
SemanticAction.MakePrintStatement: PrintStatementNode,
SemanticAction.MakeOr: OrNode,
SemanticAction.MakeExpression : ExpressionNode,
SemanticAction.MakeProgram : ProgramNode
}
parse_table = {
(NonTerminal.Program, Terminal.Function): [NonTerminal.Definitions, SemanticAction.MakeDefinitions, SemanticAction.MakeProgram],
(NonTerminal.Definitions, Terminal.Function): [NonTerminal.Def, NonTerminal.Definitions],
(NonTerminal.Definitions, TokenType.EOF): [],
(NonTerminal.Def, Terminal.Function): [TokenType.KEYWORD, TokenType.WORD, SemanticAction.MakeIdentifier, TokenType.DELIMETER, NonTerminal.Formals, TokenType.DELIMETER, TokenType.DELIMETER, NonTerminal.Type, NonTerminal.Body, SemanticAction.MakeFunction],
(NonTerminal.Formals, TokenType.WORD): [NonTerminal.Nonempty_Formals, SemanticAction.MakeFormals],
(NonTerminal.Formals, Terminal.CloseParen): [],
(NonTerminal.Nonempty_Formals, TokenType.WORD): [NonTerminal.Formal, NonTerminal.Nonempty_Formals_t],
(NonTerminal.Nonempty_Formals_t, Terminal.Comma): [TokenType.DELIMETER, NonTerminal.Nonempty_Formals],
(NonTerminal.Nonempty_Formals_t, Terminal.CloseParen): [],
(NonTerminal.Formal, TokenType.WORD): [TokenType.WORD, SemanticAction.MakeIdentifier, TokenType.DELIMETER, NonTerminal.Type],
(NonTerminal.Body, Terminal.OpenParen): [NonTerminal.Expr],
(NonTerminal.Body, Terminal.Minus): [NonTerminal.Expr],
(NonTerminal.Body, Terminal.If): [NonTerminal.Expr],
(NonTerminal.Body, Terminal.Not): [NonTerminal.Expr],
(NonTerminal.Body, TokenType.NUMBER): [NonTerminal.Expr],
(NonTerminal.Body, TokenType.BOOLEAN): [NonTerminal.Expr],
(NonTerminal.Body, TokenType.WORD): [NonTerminal.Expr],
(NonTerminal.Body, Terminal.Print): [NonTerminal.Print_Statement, NonTerminal.Body, SemanticAction.MakeBody],
(NonTerminal.Type, Terminal.Integer): [TokenType.KEYWORD, SemanticAction.MakeType],
(NonTerminal.Type, Terminal.Boolean): [TokenType.KEYWORD, SemanticAction.MakeType],
(NonTerminal.Expr, Terminal.OpenParen): [NonTerminal.Simple_Expr, NonTerminal.Expr_p, SemanticAction.MakeExpression],
(NonTerminal.Expr, TokenType.NUMBER): [NonTerminal.Simple_Expr, NonTerminal.Expr_p, SemanticAction.MakeExpression],
(NonTerminal.Expr, TokenType.BOOLEAN): [NonTerminal.Simple_Expr, NonTerminal.Expr_p, SemanticAction.MakeExpression],
(NonTerminal.Expr, Terminal.Minus): [NonTerminal.Simple_Expr, NonTerminal.Expr_p, SemanticAction.MakeExpression],
(NonTerminal.Expr, Terminal.If): [NonTerminal.Simple_Expr, NonTerminal.Expr_p, SemanticAction.MakeExpression],
(NonTerminal.Expr, Terminal.Not): [NonTerminal.Simple_Expr, NonTerminal.Expr_p, SemanticAction.MakeExpression],
(NonTerminal.Expr, TokenType.WORD): [NonTerminal.Simple_Expr, NonTerminal.Expr_p, SemanticAction.MakeExpression],
(NonTerminal.Expr_p, Terminal.Function): [],
(NonTerminal.Expr_p, Terminal.CloseParen): [],
(NonTerminal.Expr_p, Terminal.Comma): [],
(NonTerminal.Expr_p, Terminal.LessThan): [TokenType.OPERATORS, NonTerminal.Expr, SemanticAction.MakeLessThan],
(NonTerminal.Expr_p, Terminal.Equals): [TokenType.OPERATORS, NonTerminal.Expr, SemanticAction.MakeEqualTo],
(NonTerminal.Expr_p, Terminal.And): [],
(NonTerminal.Expr_p, Terminal.Mult): [],
(NonTerminal.Expr_p, Terminal.Divide): [],
(NonTerminal.Expr_p, Terminal.Then): [],
(NonTerminal.Expr_p, Terminal.Else): [],
(NonTerminal.Simple_Expr, Terminal.OpenParen): [NonTerminal.Term, NonTerminal.Simple_Expr_t],
(NonTerminal.Simple_Expr, TokenType.NUMBER): [NonTerminal.Term, NonTerminal.Simple_Expr_t],
(NonTerminal.Simple_Expr, TokenType.BOOLEAN): [NonTerminal.Term, NonTerminal.Simple_Expr_t],
(NonTerminal.Simple_Expr, Terminal.Minus): [NonTerminal.Term, NonTerminal.Simple_Expr_t],
(NonTerminal.Simple_Expr, Terminal.If): [NonTerminal.Term, NonTerminal.Simple_Expr_t],
(NonTerminal.Simple_Expr, Terminal.Not): [NonTerminal.Term, NonTerminal.Simple_Expr_t],
(NonTerminal.Simple_Expr, TokenType.WORD): [NonTerminal.Term, NonTerminal.Simple_Expr_t],
(NonTerminal.Simple_Expr_t, Terminal.LessThan): [],
(NonTerminal.Simple_Expr_t, Terminal.Equals): [],
(NonTerminal.Simple_Expr_t, Terminal.Or): [TokenType.KEYWORD, NonTerminal.Simple_Expr, SemanticAction.MakeOr],
(NonTerminal.Simple_Expr_t, Terminal.Plus): [TokenType.OPERATORS, NonTerminal.Simple_Expr, SemanticAction.MakePlus],
(NonTerminal.Simple_Expr_t, Terminal.Minus): [TokenType.OPERATORS, NonTerminal.Simple_Expr, SemanticAction.MakeMinus],
(NonTerminal.Term, Terminal.OpenParen): [NonTerminal.Factor, NonTerminal.Term_t],
(NonTerminal.Term, TokenType.NUMBER): [NonTerminal.Factor, NonTerminal.Term_t],
(NonTerminal.Term, TokenType.BOOLEAN): [NonTerminal.Factor, NonTerminal.Term_t],
(NonTerminal.Term, Terminal.Minus): [NonTerminal.Factor, NonTerminal.Term_t],
(NonTerminal.Term, Terminal.If): [NonTerminal.Factor, NonTerminal.Term_t],
(NonTerminal.Term, Terminal.Not): [NonTerminal.Factor, NonTerminal.Term_t],
(NonTerminal.Term, TokenType.WORD): [NonTerminal.Factor, NonTerminal.Term_t],
(NonTerminal.Term_t, Terminal.Or): [],
(NonTerminal.Term_t, Terminal.Plus): [],
(NonTerminal.Term_t, Terminal.Minus): [],
(NonTerminal.Term_t, Terminal.And): [TokenType.KEYWORD, NonTerminal.Term, SemanticAction.MakeAnd],
(NonTerminal.Term_t, Terminal.Mult): [TokenType.OPERATORS, NonTerminal.Term, SemanticAction.MakeMultiply],
(NonTerminal.Term_t, Terminal.Divide): [TokenType.OPERATORS, NonTerminal.Term, SemanticAction.MakeDivision],
(NonTerminal.Factor, Terminal.OpenParen): [TokenType.DELIMETER, NonTerminal.Expr, TokenType.DELIMETER],
(NonTerminal.Factor, TokenType.NUMBER): [NonTerminal.Literal],
(NonTerminal.Factor, TokenType.BOOLEAN): [NonTerminal.Literal],
(NonTerminal.Factor, Terminal.Minus): [TokenType.OPERATORS, NonTerminal.Factor, SemanticAction.MakeNegation],
(NonTerminal.Factor, Terminal.If): [TokenType.KEYWORD, NonTerminal.Expr, TokenType.KEYWORD, NonTerminal.Expr, TokenType.KEYWORD, NonTerminal.Expr, SemanticAction.MakeIf],
(NonTerminal.Factor, Terminal.Not): [TokenType.KEYWORD, NonTerminal.Factor, SemanticAction.MakeNot],
(NonTerminal.Factor, TokenType.WORD): [TokenType.WORD, SemanticAction.MakeIdentifier, NonTerminal.Factor_t],
(NonTerminal.Factor_t, Terminal.OpenParen): [TokenType.DELIMETER, NonTerminal.Actuals, TokenType.DELIMETER, SemanticAction.MakeFunctionCall],
(NonTerminal.Factor_t, Terminal.And): [],
(NonTerminal.Factor_t, Terminal.Mult): [],
(NonTerminal.Factor_t, Terminal.Divide): [],
(NonTerminal.Actuals, TokenType.NUMBER): [NonTerminal.Nonempty_Actuals, SemanticAction.MakeActuals],
(NonTerminal.Actuals, TokenType.BOOLEAN): [NonTerminal.Nonempty_Actuals, SemanticAction.MakeActuals],
(NonTerminal.Actuals, Terminal.Minus): [NonTerminal.Nonempty_Actuals, SemanticAction.MakeActuals],
(NonTerminal.Actuals, Terminal.If): [NonTerminal.Nonempty_Actuals, SemanticAction.MakeActuals],
(NonTerminal.Actuals, Terminal.Not): [NonTerminal.Nonempty_Actuals, SemanticAction.MakeActuals],
(NonTerminal.Actuals, TokenType.WORD): [NonTerminal.Nonempty_Actuals, SemanticAction.MakeActuals],
(NonTerminal.Nonempty_Actuals, TokenType.NUMBER): [NonTerminal.Expr, NonTerminal.Nonempty_Actuals_t],
(NonTerminal.Nonempty_Actuals, TokenType.BOOLEAN): [NonTerminal.Expr, NonTerminal.Nonempty_Actuals_t],
(NonTerminal.Nonempty_Actuals, Terminal.Minus): [NonTerminal.Expr, NonTerminal.Nonempty_Actuals_t],
(NonTerminal.Nonempty_Actuals, Terminal.If): [NonTerminal.Expr, NonTerminal.Nonempty_Actuals_t],
(NonTerminal.Nonempty_Actuals, Terminal.Not): [NonTerminal.Expr, NonTerminal.Nonempty_Actuals_t],
(NonTerminal.Nonempty_Actuals, TokenType.WORD): [NonTerminal.Expr, NonTerminal.Nonempty_Actuals_t],
(NonTerminal.Nonempty_Actuals_t, Terminal.CloseParen): [],
(NonTerminal.Nonempty_Actuals_t, Terminal.Comma): [TokenType.DELIMETER, NonTerminal.Nonempty_Actuals],
(NonTerminal.Literal, TokenType.NUMBER): [TokenType.NUMBER, SemanticAction.MakeNumberLiteral],
(NonTerminal.Literal, TokenType.BOOLEAN): [TokenType.BOOLEAN, SemanticAction.MakeBooleanLiteral],
(NonTerminal.Print_Statement, Terminal.Print): [TokenType.KEYWORD, TokenType.DELIMETER, NonTerminal.Expr,
TokenType.DELIMETER, SemanticAction.MakePrintStatement],
(NonTerminal.Program, TokenType.EOF): [],
(NonTerminal.Nonempty_Formals, Terminal.CloseParen): [NonTerminal.Formal, NonTerminal.Nonempty_Formals_t],
(NonTerminal.Expr_p, Terminal.Or): [],
(NonTerminal.Expr_p, Terminal.Plus): [],
(NonTerminal.Expr_p, Terminal.Minus): [],
(NonTerminal.Expr_p, TokenType.EOF): [],
(NonTerminal.Simple_Expr_t, Terminal.And): [],
(NonTerminal.Simple_Expr_t, Terminal.Mult): [],
(NonTerminal.Simple_Expr_t, Terminal.Divide): [],
(NonTerminal.Simple_Expr_t, Terminal.And): [],
(NonTerminal.Simple_Expr_t, Terminal.Function): [],
(NonTerminal.Simple_Expr_t, Terminal.Then): [],
(NonTerminal.Simple_Expr_t, Terminal.Else): [],
(NonTerminal.Simple_Expr_t, Terminal.CloseParen): [],
(NonTerminal.Simple_Expr_t, Terminal.Comma): [],
(NonTerminal.Simple_Expr_t, TokenType.EOF): [],
(NonTerminal.Term_t, Terminal.Function): [],
(NonTerminal.Term_t, Terminal.CloseParen): [],
(NonTerminal.Term_t, Terminal.Comma): [],
(NonTerminal.Term_t, Terminal.LessThan): [],
(NonTerminal.Term_t, Terminal.Equals): [],
(NonTerminal.Term_t, Terminal.Then): [],
(NonTerminal.Term_t, Terminal.Else): [],
(NonTerminal.Term_t, TokenType.EOF): [],
(NonTerminal.Factor_t, Terminal.Or): [],
(NonTerminal.Factor_t, Terminal.Plus): [],
(NonTerminal.Factor_t, Terminal.Minus): [],
(NonTerminal.Factor_t, Terminal.LessThan): [],
(NonTerminal.Factor_t, Terminal.Equals): [],
(NonTerminal.Factor_t, Terminal.Function): [],
(NonTerminal.Factor_t, Terminal.Then): [],
(NonTerminal.Factor_t, Terminal.Else): [],
(NonTerminal.Factor_t, Terminal.CloseParen): [],
(NonTerminal.Factor_t, Terminal.Comma): [],
(NonTerminal.Factor_t, TokenType.EOF): [],
(NonTerminal.Actuals, Terminal.OpenParen): [NonTerminal.Nonempty_Actuals, SemanticAction.MakeActuals],
(NonTerminal.Actuals, Terminal.CloseParen): [],
(NonTerminal.Nonempty_Actuals, Terminal.OpenParen): [NonTerminal.Expr, NonTerminal.Nonempty_Actuals_t],
(NonTerminal.Body, TokenType.EOF): []
}
# should there be a (NonTerminal.Program, TokenType.EOF) index?
|
{"/src/AST_node.py": ["/src/errors.py", "/src/stack_operations.py"], "/src/scanner.py": ["/src/k_token.py", "/src/errors.py"], "/src/drivers/code_gen_validate.py": ["/src/parser.py", "/src/scanner.py", "/src/code_generator.py"], "/src/parser.py": ["/src/scanner.py", "/src/errors.py", "/src/parse_table.py", "/src/k_token.py", "/src/AST_node.py", "/src/stack_operations.py"], "/src/drivers/parse_validate.py": ["/src/parser.py", "/src/scanner.py"], "/src/parse_table.py": ["/src/k_token.py", "/src/AST_node.py"], "/src/tests.py": ["/src/scanner.py"]}
|
39,662
|
alanmmckay/KLEINcompiler
|
refs/heads/master
|
/programs/class-programs/python-equivalents/printAndDivide.py
|
import sys
def printAndDivide ( a, b, n ):
print( 10 * a // b)
return main ( MOD(a*10, b), b, n-1)
def MOD ( m, n):
return m - m//n * n
sys.argv = ['printAndDivide', '1' , '2', '3' ]
def main ( a, b, n ):
if n == 0 :
return a
else:
return printAndDivide( a, b, n )
print( main( int(sys.argv[1]), int(sys.argv[2]), int(sys.argv[3])))
|
{"/src/AST_node.py": ["/src/errors.py", "/src/stack_operations.py"], "/src/scanner.py": ["/src/k_token.py", "/src/errors.py"], "/src/drivers/code_gen_validate.py": ["/src/parser.py", "/src/scanner.py", "/src/code_generator.py"], "/src/parser.py": ["/src/scanner.py", "/src/errors.py", "/src/parse_table.py", "/src/k_token.py", "/src/AST_node.py", "/src/stack_operations.py"], "/src/drivers/parse_validate.py": ["/src/parser.py", "/src/scanner.py"], "/src/parse_table.py": ["/src/k_token.py", "/src/AST_node.py"], "/src/tests.py": ["/src/scanner.py"]}
|
39,663
|
alanmmckay/KLEINcompiler
|
refs/heads/master
|
/programs/class-programs/python-equivalents/horner-param.py
|
def main( coeff3, coeff2 , coeff1 , coeff0 , x ):
return horner( x, 3, 0, coeff3, coeff2, coeff1, coeff0 )
def horner( x , n , value , coeff3 , coeff2 , coeff1 , coeff0 ):
if n < 0:
return value
else:
return horner( x , n - 1 , (value * x) + coefficient(n, coeff3, coeff2, coeff1, coeff0),
coeff3, coeff2, coeff1, coeff0 )
def coefficient( i , coeff3 , coeff2 , coeff1 , coeff0 ):
if i < 1:
return coeff0
elif i < 2:
return coeff1
elif i < 3:
return coeff2
else:
return coeff3
|
{"/src/AST_node.py": ["/src/errors.py", "/src/stack_operations.py"], "/src/scanner.py": ["/src/k_token.py", "/src/errors.py"], "/src/drivers/code_gen_validate.py": ["/src/parser.py", "/src/scanner.py", "/src/code_generator.py"], "/src/parser.py": ["/src/scanner.py", "/src/errors.py", "/src/parse_table.py", "/src/k_token.py", "/src/AST_node.py", "/src/stack_operations.py"], "/src/drivers/parse_validate.py": ["/src/parser.py", "/src/scanner.py"], "/src/parse_table.py": ["/src/k_token.py", "/src/AST_node.py"], "/src/tests.py": ["/src/scanner.py"]}
|
39,664
|
alanmmckay/KLEINcompiler
|
refs/heads/master
|
/programs/class-programs/python-equivalents/divisibleByParts.py
|
import math
def MOD(m,n):
return m-(m/n)*n
def divisibleByParts(left, right):
print("n/10=",left,"MOD(n,10)",right)
return divisibleByDifference((left-right)*2)
def divisibleByDifference(diff):
print("diff=", diff)
if((diff == 7) or (diff == 0) or (diff == -7) or (diff == -14)):
return True
else:
if(diff<14):
return False
else:
main(diff)
def main(n):
return divisibleByParts(n/10, MOD(n,10))
inputString = "Input value (input 0 to quit): "
x = input(inputString)
if(x != 0):
while(True):
x = int(x)
if(x == 0):
break
print(main(x))
x = input(inputString)
|
{"/src/AST_node.py": ["/src/errors.py", "/src/stack_operations.py"], "/src/scanner.py": ["/src/k_token.py", "/src/errors.py"], "/src/drivers/code_gen_validate.py": ["/src/parser.py", "/src/scanner.py", "/src/code_generator.py"], "/src/parser.py": ["/src/scanner.py", "/src/errors.py", "/src/parse_table.py", "/src/k_token.py", "/src/AST_node.py", "/src/stack_operations.py"], "/src/drivers/parse_validate.py": ["/src/parser.py", "/src/scanner.py"], "/src/parse_table.py": ["/src/k_token.py", "/src/AST_node.py"], "/src/tests.py": ["/src/scanner.py"]}
|
39,665
|
alanmmckay/KLEINcompiler
|
refs/heads/master
|
/src/tests.py
|
import unittest
from src.scanner import Scanner
class ScannerTestCases(unittest.TestCase):
def test_peek_past_whitespace(self):
'''Find literal past whitespace.'''
s = Scanner(' += ')
self.assertTrue(s.peek().is_operator(), 'whitespace before ...')
def test_literal_tokens_with_whitespace(self):
'''Find two literals inside whitespace.'''
s = Scanner(' += ')
self.assertTrue(s.next_token().is_operator())
self.assertTrue(s.next_token().is_operator())
self.assertTrue(s.next_token().is_eof())
def test_word_past_whitespace(self):
'''Find word past whitespace.'''
s = Scanner(' hello:1 ')
self.assertTrue(s.peek().is_word(), 'peek past whitespace')
next_token = s.next_token()
self.assertTrue(next_token.is_word(), 'right token type')
self.assertEqual(next_token.value(), 'hello', 'right token value')
self.assertTrue(s.next_token().is_delimeter())
next_token = s.next_token()
self.assertTrue(next_token.is_number(), 'right token type')
self.assertEqual(next_token.value(), 1)
self.assertTrue(s.next_token().is_eof(), 'EOF after word')
def test_keyword_past_whitespace(self):
'''Find word past whitespace.'''
s = Scanner(' if ')
self.assertTrue(s.peek().is_keyword(), 'peek past whitespace')
next_token = s.next_token()
self.assertTrue(next_token.is_keyword(), 'right token type')
self.assertEqual(next_token.value(), 'if', 'right token value')
self.assertTrue(s.next_token().is_eof(), 'EOF after word')
def test_boolean_past_whitespace(self):
'''Find word past whitespace.'''
s = Scanner(' true ')
self.assertTrue(s.peek().is_boolean(), 'peek past whitespace')
next_token = s.next_token()
self.assertTrue(next_token.is_boolean(), 'right token type')
self.assertEqual(next_token.value(), 'true', 'right token value')
self.assertTrue(s.next_token().is_eof(), 'EOF after word')
def test_word_within_iterals(self):
'''Find word within two literals.'''
s = Scanner('+hello=')
self.assertTrue(s.next_token().is_operator())
self.assertTrue(s.next_token().is_word())
self.assertTrue(s.next_token().is_operator())
def test_two_words(self):
'''Find two words.'''
s = Scanner(' tyler rahe')
next_token = s.next_token()
self.assertTrue(next_token.is_word(), 'first token right')
self.assertEqual(next_token.value(), 'tyler')
next_token = s.next_token()
self.assertTrue(next_token.is_word(), 'second token right')
self.assertEqual(next_token.value(), 'rahe')
def test_word_with_number(self):
'''Find word and number.'''
s = Scanner(' tyler12')
next_token = s.next_token()
self.assertTrue(next_token.is_word(), 'first token right')
self.assertEqual(next_token.value(), 'tyler')
next_token = s.next_token()
self.assertTrue(next_token.is_number(), 'second token right')
self.assertEqual(next_token.value(), 12)
def test_one_number(self):
'''Find number.'''
s = Scanner('42')
next_token = s.next_token()
self.assertTrue(next_token.is_number(), 'found right token')
self.assertEqual(next_token.value(), 42)
def test_two_numbers(self):
'''Find two numbers in whitespace.'''
s = Scanner(' 3540 \n\t 4550 ')
next_token = s.next_token()
self.assertTrue(next_token.is_number(), 'found right token')
self.assertEqual(next_token.value(), 3540)
next_token = s.next_token()
self.assertTrue(next_token.is_number(), 'found right token')
self.assertEqual(next_token.value(), 4550)
def test_assignment(self):
'''Recognize tokens in an assignment statement.'''
s = Scanner(' klien=3\n')
next_token = s.next_token()
self.assertTrue(next_token.is_word(), 'first token right')
self.assertEqual(next_token.value(), 'klien')
self.assertTrue(s.next_token().is_operator())
next_token = s.next_token()
self.assertTrue(next_token.is_number(), 'found right token')
self.assertEqual(next_token.value(), 3)
def test_addition_spec(self):
'''Recognize tokens in a addition specification.'''
s = Scanner(' 1+100\t')
next_token = s.next_token()
self.assertTrue(next_token.is_number(), 'found right token')
self.assertEqual(next_token.value(), 1)
self.assertTrue(s.next_token().is_operator())
next_token = s.next_token()
self.assertTrue(next_token.is_number(), 'found right token')
self.assertEqual(next_token.value(), 100)
def test_lessthan_spec(self):
'''Recognize tokens in a less than specification.'''
s = Scanner(' 1<100\t')
next_token = s.next_token()
self.assertTrue(next_token.is_number(), 'found right token')
self.assertEqual(next_token.value(), 1)
self.assertTrue(s.next_token().is_operator())
next_token = s.next_token()
self.assertTrue(next_token.is_number(), 'found right token')
self.assertEqual(next_token.value(), 100)
def test_perens(self):
'''Find two perens inside whitespace.'''
s = Scanner(' (=) ')
self.assertTrue(s.next_token().is_delimeter())
self.assertTrue(s.next_token().is_operator())
self.assertTrue(s.next_token().is_delimeter())
self.assertTrue(s.next_token().is_eof())
|
{"/src/AST_node.py": ["/src/errors.py", "/src/stack_operations.py"], "/src/scanner.py": ["/src/k_token.py", "/src/errors.py"], "/src/drivers/code_gen_validate.py": ["/src/parser.py", "/src/scanner.py", "/src/code_generator.py"], "/src/parser.py": ["/src/scanner.py", "/src/errors.py", "/src/parse_table.py", "/src/k_token.py", "/src/AST_node.py", "/src/stack_operations.py"], "/src/drivers/parse_validate.py": ["/src/parser.py", "/src/scanner.py"], "/src/parse_table.py": ["/src/k_token.py", "/src/AST_node.py"], "/src/tests.py": ["/src/scanner.py"]}
|
39,666
|
alanmmckay/KLEINcompiler
|
refs/heads/master
|
/programs/class-programs/python-equivalents/divisible-By-Seven.py
|
import math
def main ( n ):
return divisibleByParts ( n // 10 , MOD (n, 10 ))
def divisibleByParts ( left , right ):
return divisibleByDifference ( left - right * 2)
def divisibleByDifference ( diff ):
if ((diff == 7) or (diff == 0) or (diff == -7) or (diff == -14)):
return True
else:
if diff < 14:
return False
else:
return main( diff )
def MOD (m , n ):
return m - m // n * n
|
{"/src/AST_node.py": ["/src/errors.py", "/src/stack_operations.py"], "/src/scanner.py": ["/src/k_token.py", "/src/errors.py"], "/src/drivers/code_gen_validate.py": ["/src/parser.py", "/src/scanner.py", "/src/code_generator.py"], "/src/parser.py": ["/src/scanner.py", "/src/errors.py", "/src/parse_table.py", "/src/k_token.py", "/src/AST_node.py", "/src/stack_operations.py"], "/src/drivers/parse_validate.py": ["/src/parser.py", "/src/scanner.py"], "/src/parse_table.py": ["/src/k_token.py", "/src/AST_node.py"], "/src/tests.py": ["/src/scanner.py"]}
|
39,667
|
alanmmckay/KLEINcompiler
|
refs/heads/master
|
/programs/class-programs/python-equivalents/horner.py
|
def main ( x ):
return horner ( x , 3 , 0 )
def horner( x , n , value ):
if n < 0:
return value
else:
return horner( x , n - 1 , (value * x) + coefficient(n) )
def coefficient( i ):
if i < 1:
return 9
elif i < 2:
return 2
elif i < 3:
return -4
else:
return 1
|
{"/src/AST_node.py": ["/src/errors.py", "/src/stack_operations.py"], "/src/scanner.py": ["/src/k_token.py", "/src/errors.py"], "/src/drivers/code_gen_validate.py": ["/src/parser.py", "/src/scanner.py", "/src/code_generator.py"], "/src/parser.py": ["/src/scanner.py", "/src/errors.py", "/src/parse_table.py", "/src/k_token.py", "/src/AST_node.py", "/src/stack_operations.py"], "/src/drivers/parse_validate.py": ["/src/parser.py", "/src/scanner.py"], "/src/parse_table.py": ["/src/k_token.py", "/src/AST_node.py"], "/src/tests.py": ["/src/scanner.py"]}
|
39,668
|
alanmmckay/KLEINcompiler
|
refs/heads/master
|
/doc/parser/factoryConcept.py
|
##This is the basic rundown of how the semantic action
##portion of the parse algorithm will create the node objects.
class anASTNode():
def __init__(self, stack):
self.test = stack.pop()
def get_test(self):
return self.test
factory = {
1 : anASTNode
}
stack = [1,2,"three",4,(4+1), 6]
while(len(stack) > 0):
testClass = factory[1]
testObject = testClass(stack)
print(testObject.get_test())
print(stack)
|
{"/src/AST_node.py": ["/src/errors.py", "/src/stack_operations.py"], "/src/scanner.py": ["/src/k_token.py", "/src/errors.py"], "/src/drivers/code_gen_validate.py": ["/src/parser.py", "/src/scanner.py", "/src/code_generator.py"], "/src/parser.py": ["/src/scanner.py", "/src/errors.py", "/src/parse_table.py", "/src/k_token.py", "/src/AST_node.py", "/src/stack_operations.py"], "/src/drivers/parse_validate.py": ["/src/parser.py", "/src/scanner.py"], "/src/parse_table.py": ["/src/k_token.py", "/src/AST_node.py"], "/src/tests.py": ["/src/scanner.py"]}
|
39,669
|
alanmmckay/KLEINcompiler
|
refs/heads/master
|
/programs/class-programs/python-equivalents/sieve.py
|
def main( n ):
return sieveAt( 2 , n )
def sieveAt( current , max ):
if max < current:
return True
else:
return doSieveAt( current , max )
def doSieveAt( current , max ):
if isPrime(current):
print (current)
else:
print ("0")
return sieveAt(current+1 , max )
def isPrime( n ):
return not hasDivisorFrom(2, n)
def hasDivisorFrom( i , n ):
if i < n:
return divides(i, n) or hasDivisorFrom(i+1, n)
else:
return False
def divides( a , b ):
return rem( b , a ) == 0
def rem( num , den ):
if num < den:
return num
else:
return rem( num - den , den )
|
{"/src/AST_node.py": ["/src/errors.py", "/src/stack_operations.py"], "/src/scanner.py": ["/src/k_token.py", "/src/errors.py"], "/src/drivers/code_gen_validate.py": ["/src/parser.py", "/src/scanner.py", "/src/code_generator.py"], "/src/parser.py": ["/src/scanner.py", "/src/errors.py", "/src/parse_table.py", "/src/k_token.py", "/src/AST_node.py", "/src/stack_operations.py"], "/src/drivers/parse_validate.py": ["/src/parser.py", "/src/scanner.py"], "/src/parse_table.py": ["/src/k_token.py", "/src/AST_node.py"], "/src/tests.py": ["/src/scanner.py"]}
|
39,670
|
alanmmckay/KLEINcompiler
|
refs/heads/master
|
/programs/class-programs/python-equivalents/modulus-by-hand.py
|
def MOD( m, n):
if m < n:
return m
else:
MOD(m-n, n)
def main( m, n):
print(m / n)
return MOD(m,n)
|
{"/src/AST_node.py": ["/src/errors.py", "/src/stack_operations.py"], "/src/scanner.py": ["/src/k_token.py", "/src/errors.py"], "/src/drivers/code_gen_validate.py": ["/src/parser.py", "/src/scanner.py", "/src/code_generator.py"], "/src/parser.py": ["/src/scanner.py", "/src/errors.py", "/src/parse_table.py", "/src/k_token.py", "/src/AST_node.py", "/src/stack_operations.py"], "/src/drivers/parse_validate.py": ["/src/parser.py", "/src/scanner.py"], "/src/parse_table.py": ["/src/k_token.py", "/src/AST_node.py"], "/src/tests.py": ["/src/scanner.py"]}
|
39,671
|
alanmmckay/KLEINcompiler
|
refs/heads/master
|
/programs/class-programs/python-equivalents/is-cantor-number.py
|
def main ( n ):
return has_no_2s(to_base3(n))
def to_base3( n ):
if n < 3:
return n
else:
return 10 * to_base3(n / 3) + MOD(n, 3)
def has_no_2s( n ):
if n < 10:
return n < 2
else:
return has_no_2s(n / 10) and has_no_2s(MOD(n, 10))
def MOD( m , n ):
return m - m/n * n
|
{"/src/AST_node.py": ["/src/errors.py", "/src/stack_operations.py"], "/src/scanner.py": ["/src/k_token.py", "/src/errors.py"], "/src/drivers/code_gen_validate.py": ["/src/parser.py", "/src/scanner.py", "/src/code_generator.py"], "/src/parser.py": ["/src/scanner.py", "/src/errors.py", "/src/parse_table.py", "/src/k_token.py", "/src/AST_node.py", "/src/stack_operations.py"], "/src/drivers/parse_validate.py": ["/src/parser.py", "/src/scanner.py"], "/src/parse_table.py": ["/src/k_token.py", "/src/AST_node.py"], "/src/tests.py": ["/src/scanner.py"]}
|
39,672
|
alanmmckay/KLEINcompiler
|
refs/heads/master
|
/programs/class-programs/python-equivalents/farey.py
|
def main( xNum , xDen , N ):
print( fareyNum( xNum , xDen , N ) )
print (fareyDen( xNum , xDen , N ))
def fareyNum( xNum , xDen , N ):
return fareySelectNum(N,whileLoopFor(1, xNum, xDen, N, 0, 1, 1, 1),
whileLoopFor(2, xNum, xDen, N, 0, 1, 1, 1),
whileLoopFor(3, xNum, xDen, N, 0, 1, 1, 1),
whileLoopFor(4, xNum, xDen, N, 0, 1, 1, 1))
def fareyDen( xNum , xDen , N ):
return fareySelectDen(N, whileLoopFor(1, xNum, xDen, N, 0, 1, 1, 1),
whileLoopFor(2, xNum, xDen, N, 0, 1, 1, 1),
whileLoopFor(3, xNum, xDen, N, 0, 1, 1, 1),
whileLoopFor(4, xNum, xDen, N, 0, 1, 1, 1))
def fareySelectNum( N , a , b , c , d ):
if greater( b , N ):
return c
else:
return a
def fareySelectDen( N , a , b , c , d ):
if greater ( b , N ):
return d
else:
return b
def whileLoopFor(selector , xNum , xDen , N , a , b , c , d ):
if greater( b , N ) or greater ( d , N ):
if selector == 1:
return a
elif selector == 2:
return b
elif selector == 3:
return c
else:
return d
elif fractionEqual ( xNum , xDen , a + c , b + d ):
if selector + 1:
return a + c
elif selector == 2:
return b + d
elif selector == 3:
return a + c
else:
return b + d
elif fractionGreater( xNum , xDen, a + c , b + d ):
return whileLoopFor( selector , xNum , xDen , N , a + c , b + d , c , d)
else:
return whileLoopFor( selector , xNum , xDen , N , a , b , a + c , b + d)
def fractionEqual( x , xd , y , yd ):
if x * yd == y * xd:
return True
else:
return False
def fractionGreater( x , xd , y , yd ):
return greater( x * yd , y * xd )
def greater( x , y ):
if not (( x < y ) or ( x == y )):
return True
else:
return False
|
{"/src/AST_node.py": ["/src/errors.py", "/src/stack_operations.py"], "/src/scanner.py": ["/src/k_token.py", "/src/errors.py"], "/src/drivers/code_gen_validate.py": ["/src/parser.py", "/src/scanner.py", "/src/code_generator.py"], "/src/parser.py": ["/src/scanner.py", "/src/errors.py", "/src/parse_table.py", "/src/k_token.py", "/src/AST_node.py", "/src/stack_operations.py"], "/src/drivers/parse_validate.py": ["/src/parser.py", "/src/scanner.py"], "/src/parse_table.py": ["/src/k_token.py", "/src/AST_node.py"], "/src/tests.py": ["/src/scanner.py"]}
|
39,673
|
alanmmckay/KLEINcompiler
|
refs/heads/master
|
/programs/class-programs/python-equivalents/public-private.py
|
def main ( publicKey , privateKey ):
if publicKey == 0:
return factor( 2147481647 , 2047483747 )
else:
return factor( publicKey , privateKey )
def factor( publicKey , privateKey ):
return displayAndPrint( publicKey , privateKey , gcd( publicKey , privateKey) )
def displayAndPrint( publicKey , privateKey , commonFactor ):
print( publicKey // commonFactor )
print( privateKey // commonFactor )
return commonFactor
def gcd( a , b ):
if b == 0:
return a
else:
return gcd( b , remainder( a , b ) )
def remainder( a , b ):
if a < b:
return a
else:
return remainder( a - b , b )
|
{"/src/AST_node.py": ["/src/errors.py", "/src/stack_operations.py"], "/src/scanner.py": ["/src/k_token.py", "/src/errors.py"], "/src/drivers/code_gen_validate.py": ["/src/parser.py", "/src/scanner.py", "/src/code_generator.py"], "/src/parser.py": ["/src/scanner.py", "/src/errors.py", "/src/parse_table.py", "/src/k_token.py", "/src/AST_node.py", "/src/stack_operations.py"], "/src/drivers/parse_validate.py": ["/src/parser.py", "/src/scanner.py"], "/src/parse_table.py": ["/src/k_token.py", "/src/AST_node.py"], "/src/tests.py": ["/src/scanner.py"]}
|
39,674
|
alanmmckay/KLEINcompiler
|
refs/heads/master
|
/programs/class-programs/python-equivalents/generate-excellent.py
|
# bad if
def MOD( m , n ):
return m - n *( m// n )
def EXP( m , n ):
if n == 0:
return 1
else:
return m * EXP( m , n-1 )
def ODD( n ):
if 0 < n:
return (2 * ( n // 2 )) < n
else:
return ODD( -n )
def LE( p , q ):
return ( p < q ) or ( p == q )
def SQRT( n ):
return SQRTSEARCH( n , 0 , n )
def SQRTSEARCH( n , low , high ):
if LE( high, low + 1 ):
if LE( n - (low * low) or (high * high) - n ):
return low
else:
return high
else:
return SQRTSPLIT( n, low, high, (low + high)// 2 )
def SQRTSPLIT( n , low , high , mid ):
if LE( mid * mid , n ):
return SQRTSEARCH( n, mid, high )
else:
return SQRTSEARCH( n, low, mid )
def EVEN( n ):
return n == (2 * (n//2))
def ISROOT( r , n ):
return n == r*r
def length( n ):
if n < 10:
return 1
else:
return 1 + length( n // 10 )
def a( n ):
return n // EXP(10, length(n)// 2 )
def excellentDiff( a , b ):
return b * b - a * a
def isExcellentSwitch( n , length):
if ODD(length):
return False
else:
return n == excellentDiff(a(n), b(n))
def isExcellent( n ):
return isExcellentSwitch( n , length(n) )
def printCandidateAndContinue( a , n , upper , candidate ):
print(candidate)
return aLoop( a + 1 , n , upper )
def aLoop3( a , n , upper , det , root , candidate):
if ISROOT(root, det) and EVEN(root + 1) and isExcellent(candidate):
return printCandidateAndContinue(a, n, upper, candidate)
else:
return aLoop(a+1, n, upper)
def aLoop2( a , n , upper , det , root ):
return aLoop3(a , n , upper , det , root , a * EXP(10, n) + ((root + 1) // 2))
def aLoop1( a , n , upper , det ):
return aLoop2(a, n, upper, det, SQRT(det))
def aLoop( a , n , upper ):
if a < upper:
return aLoop1(a , n , upper , 4*EXP(a , 2) + 4*EXP(10 , n)* a + 1 )
else:
return True
def createLoop( a , n ):
return aLoop( a , n , 10 * a )
def main ( length):
return createLoop(EXP(10 , length // 2 - 1 ), length//2 )
|
{"/src/AST_node.py": ["/src/errors.py", "/src/stack_operations.py"], "/src/scanner.py": ["/src/k_token.py", "/src/errors.py"], "/src/drivers/code_gen_validate.py": ["/src/parser.py", "/src/scanner.py", "/src/code_generator.py"], "/src/parser.py": ["/src/scanner.py", "/src/errors.py", "/src/parse_table.py", "/src/k_token.py", "/src/AST_node.py", "/src/stack_operations.py"], "/src/drivers/parse_validate.py": ["/src/parser.py", "/src/scanner.py"], "/src/parse_table.py": ["/src/k_token.py", "/src/AST_node.py"], "/src/tests.py": ["/src/scanner.py"]}
|
39,675
|
alanmmckay/KLEINcompiler
|
refs/heads/master
|
/src/stack_operations.py
|
def top(stack):
return stack[-1]
def pop(stack):
stack.pop()
def push_rule(lst, stack):
for element in reversed(lst):
stack.append(element)
def push(lst, stack):
stack.append(lst)
|
{"/src/AST_node.py": ["/src/errors.py", "/src/stack_operations.py"], "/src/scanner.py": ["/src/k_token.py", "/src/errors.py"], "/src/drivers/code_gen_validate.py": ["/src/parser.py", "/src/scanner.py", "/src/code_generator.py"], "/src/parser.py": ["/src/scanner.py", "/src/errors.py", "/src/parse_table.py", "/src/k_token.py", "/src/AST_node.py", "/src/stack_operations.py"], "/src/drivers/parse_validate.py": ["/src/parser.py", "/src/scanner.py"], "/src/parse_table.py": ["/src/k_token.py", "/src/AST_node.py"], "/src/tests.py": ["/src/scanner.py"]}
|
39,676
|
alanmmckay/KLEINcompiler
|
refs/heads/master
|
/programs/class-programs/python-equivalents/russian-peasant.py
|
def MOD(m,n):
return(m%n)
def multWithAccum(m,n,accum):
if n==0:
return(accum)
elif MOD(n,2)==1:
return(multWithAccum(m*2,n//2,accum+m))
else:
return(multWithAccum(m*2,n//2,accum))
def mult(m,n):
return(multWithAccum(m,n,0))
def main(m,n):
print(m)
return(mult(m,n))
answer=main(9,6)
print("The value is : ",answer)
|
{"/src/AST_node.py": ["/src/errors.py", "/src/stack_operations.py"], "/src/scanner.py": ["/src/k_token.py", "/src/errors.py"], "/src/drivers/code_gen_validate.py": ["/src/parser.py", "/src/scanner.py", "/src/code_generator.py"], "/src/parser.py": ["/src/scanner.py", "/src/errors.py", "/src/parse_table.py", "/src/k_token.py", "/src/AST_node.py", "/src/stack_operations.py"], "/src/drivers/parse_validate.py": ["/src/parser.py", "/src/scanner.py"], "/src/parse_table.py": ["/src/k_token.py", "/src/AST_node.py"], "/src/tests.py": ["/src/scanner.py"]}
|
39,698
|
hairtonvanda18/ChessEngineAI
|
refs/heads/main
|
/ChessMain.py
|
import pygame as p
import ChessEngine
import ChessAi
import time
WIDTH = HEIGHT = 400
DIMENSION = 5
SQ_SIZE = HEIGHT // DIMENSION
MAX_FPS = 15
IMAGES = {}
def loadImages():
pieces = ["bR","bN","bB","bQ","bK",
"bp","wp","wR","wN","wB","wQ","wK"]
for piece in pieces:
IMAGES[piece] =p.transform.scale( p.image.load("Images/"+piece+".png"),(SQ_SIZE,SQ_SIZE))
def highlightSquares(screen,gs,validmoves,sqSelected):
if sqSelected != ():
r, c = sqSelected
if gs.board[r][c][0] == ('w' if gs.whiteToMove else 'b'):
s = p.Surface((SQ_SIZE,SQ_SIZE))
s.set_alpha(100)
s.fill(p.Color("blue"))
screen.blit(s,(c*SQ_SIZE,r*SQ_SIZE))
s.fill(p.Color("yellow"))
for move in validmoves:
if move.startRow == r and move.startCol == c:
screen.blit(s,(move.endCol*SQ_SIZE,move.endRow*SQ_SIZE))
def main():
p.init()
screen = p.display.set_mode((WIDTH,HEIGHT))
clock = p.time.Clock()
screen.fill(p.Color("white"))
gs = ChessEngine.GameState()
validmoves = gs.getValidMoves()
moveMade = False
loadImages()
running = True
animate = False
sqSelected = ()
playerClicks = []
gameOver = False
playerOne = True
playerTwo = False
while running:
humansTurn = (gs.whiteToMove and playerOne) or (not gs.whiteToMove and playerTwo)
for e in p.event.get():
if e.type == p.QUIT:
player1 = open("player1.txt", "w")
player2 = open("player2.txt", "w")
player1.write("Start game \n")
player2.write("Start game \n")
player1.write("White \n")
player2.write("Black \n")
if len(gs.moveLog) > 0:
for i in range(len(gs.moveLog)):
if gs.moveLog[i].pieceMoved[0] == 'w':
player1.write(f"{gs.moveLog[i].getChessNotation()}\n")
player2.write(f"White played {gs.moveLog[i].getChessNotation()}\n")
else:
player2.write(f"{gs.moveLog[i].getChessNotation()}\n")
player1.write(f"Black played {gs.moveLog[i].getChessNotation()}\n")
running = False
elif e.type == p.MOUSEBUTTONDOWN:
if not gameOver and humansTurn:
location = p.mouse.get_pos()
col = location[0]//SQ_SIZE
row = location[1]//SQ_SIZE
if sqSelected == (row,col):
sqSelected = ()
playerClicks = []
else:
sqSelected = (row,col)
playerClicks.append(sqSelected)
if len(playerClicks) == 2:
move = ChessEngine.Move(playerClicks[0],playerClicks[1],gs.board)
for i in range(len(validmoves)):
if move == validmoves[i]:
gs.makeMove(validmoves[i])
moveMade = True
animate = True
sqSelected = ()
playerClicks = []
if not moveMade:
playerClicks = [sqSelected]
elif e.type == p.KEYDOWN:
if e.key == p.K_z:
gs.undoMove()
moveMade = True
animate = False
gameOver = False
if e.key == p.K_r:
gs = ChessEngine.GameState()
validmoves= gs.getValidMoves()
sqSelected = ()
playerClicks = ()
moveMade = False
animate = False
gameOver = False
if not gameOver and not humansTurn:
AIMove = ChessAi.findBestMoveMinMax(gs,validmoves)
if AIMove is None:
AIMove = ChessAi.findRandomMove(validmoves)
gs.makeMove(AIMove)
print(end-start)
moveMade = True
animate = True
if moveMade:
if animate:
animateMove(gs.moveLog[-1],screen,gs.board,clock)
validmoves = gs.getValidMoves()
moveMade = False
animate = False
drawGameState(screen,gs,validmoves,sqSelected)
if gs.checkMate:
gameOver = True
if gs.whiteToMove:
drawText(screen,"As Pretas ganham por Checkmate")
else:
drawText(screen,"As Brancas ganham por Checkmate")
elif gs.staleMate:
gameOver = True
drawText(screen,"Stalemate")
clock.tick(MAX_FPS)
p.display.flip()
def drawGameState(screen,gs,validmoves,sqSelected):
drawBoard(screen)
drawPieces(screen,gs.board)
highlightSquares(screen,gs,validmoves,sqSelected)
def drawBoard(screen):
global colors
colors = [p.Color("gray"),p.Color("white")]
for r in range(DIMENSION):
for c in range(DIMENSION):
color = colors[((r+c)%2)]
p.draw.rect(screen,color,p.Rect(c*SQ_SIZE,r*SQ_SIZE,SQ_SIZE,SQ_SIZE))
def drawPieces(screen,board):
for r in range(DIMENSION):
for c in range(DIMENSION):
piece = board[r][c]
if piece != "--":
screen.blit(IMAGES[piece],p.Rect(c*SQ_SIZE,r*SQ_SIZE,SQ_SIZE,SQ_SIZE))
def animateMove(move,screen,board,clock):
global colors
dR = move.endRow - move.startRow
dC = move.endCol - move.startCol
framesPerSquare = 10
frameCount = (abs(dR) + abs(dC)) * framesPerSquare
for frame in range(frameCount+1):
r,c = ((move.startRow + dR*frame/frameCount,move.startCol + dC*frame/frameCount))
drawBoard(screen)
drawPieces(screen,board)
color = colors[(move.endRow + move.endCol) % 2]
endSquare = p.Rect(move.endCol*SQ_SIZE,move.endRow*SQ_SIZE,SQ_SIZE,SQ_SIZE)
p.draw.rect(screen,color,endSquare)
if move.pieceCaptured != "--":
screen.blit(IMAGES[move.pieceCaptured],endSquare)
screen.blit(IMAGES[move.pieceMoved],p.Rect(c*SQ_SIZE,r*SQ_SIZE,SQ_SIZE,SQ_SIZE))
p.display.flip()
clock.tick(60)
def drawText(screen,text):
font = p.font.SysFont('Helvitca', 20, True, False)
textObject = font.render(text,0,p.Color('Black'))
textLocation= p.Rect(0,0,WIDTH,HEIGHT).move(WIDTH/2- textObject.get_width()/2,HEIGHT/2-textObject.get_height()/2)
screen.blit(textObject,textLocation)
main()
|
{"/ChessMain.py": ["/ChessEngine.py", "/ChessAi.py"]}
|
39,699
|
hairtonvanda18/ChessEngineAI
|
refs/heads/main
|
/ChessAi.py
|
import random
pieceScore = {"K": 0,"Q": 10,"R": 5, "B": 3, "N" :3, "p": 1}
CHECKMATE = 1000
STALEMATE = 0
DEPTH = 2
def findRandomMove(validmoves):
return validmoves[random.randint(0,len(validmoves)-1)]
def findBestMove(gs,validmoves):
turnMultiplier = 1 if gs.whiteToMove else -1
oppMinMaxScore = CHECKMATE
bestPlayerMove = None
random.shuffle(validmoves)
for playerMove in validmoves:
gs.makeMove(playerMove)
oppMoves=gs.getValidMoves()
if gs.staleMate:
oppMaxScore= STALEMATE
elif gs.checkMate:
oppMaxScore = -CHECKMATE
else:
oppMaxScore = -CHECKMATE
for oppMove in oppMoves:
gs.makeMove(oppMove)
gs.getValidMoves()
if gs.checkMate:
score = CHECKMATE
elif gs.staleMate:
score = STALEMATE
else:
score = -turnMultiplier * scoreMaterial(gs.board)
if score > oppMaxScore:
oppMaxScore= score
gs.undoMove()
if oppMaxScore < oppMinMaxScore:
oppMinMaxScore = oppMaxScore
bestPlayerMove = playerMove
gs.undoMove()
return bestPlayerMove
def findBestMoveMinMax(gs,validmoves):
global nextMove
nextMove = None
findMoveMinMax(gs,validmoves,DEPTH,gs.whiteToMove)
return nextMove
def findMoveMinMax(gs,validmoves,depth,whiteToMove):
global nextMove
if depth == 0:
return scoreMaterial(gs.board)
if whiteToMove:
maxScore = -CHECKMATE
for move in validmoves:
gs.makeMove(move)
nextMoves = gs.getValidMoves()
score = findMoveMinMax(gs,nextMoves,depth-1, False)
if score > maxScore:
maxScore = score
if depth == DEPTH:
nextMove = move
gs.undoMove()
return maxScore
else:
minScore = CHECKMATE
for move in validmoves:
gs.makeMove(move)
nextMoves = gs.getValidMoves()
score = findMoveMinMax(gs,nextMoves,depth-1, True)
if score < minScore:
minScore = score
if depth == DEPTH:
nextMove = move
gs.undoMove()
return minScore
def scoreBoard(gs):
if gs.checkMate:
if gs.whiteToMove:
return -CHECKMATE
else:
return CHECKMATE
elif gs.staleMate:
return STALEMATE
score = 0
for row in gs.board:
for square in row:
if square[0] == 'w':
score += pieceScore[square[1]]
elif square[0] == 'b':
score -= pieceScore[square[1]]
return score
def scoreMaterial(board):
score = 0
for row in board:
for square in row:
if square[0] == 'w':
score += pieceScore[square[1]]
elif square[0] == 'b':
score -= pieceScore[square[1]]
return score
|
{"/ChessMain.py": ["/ChessEngine.py", "/ChessAi.py"]}
|
39,700
|
hairtonvanda18/ChessEngineAI
|
refs/heads/main
|
/ChessEngine.py
|
import time
class GameState():
def __init__(self):
self.board=[
["bR","bN","bB","bQ","bK"],
["bp","bp","bp","bp","bp"],
["--","--","--","--","--"],
["wp","wp","wp","wp","wp"],
["wR","wN","wB","wQ","wK"]
]
self.moveFunctions = {'p':self.getPawnMoves,"R":self.getRookMoves,"N":self.getNightMoves,"K":self.getKingMoves,"Q":self.getQueenMoves,"B":self.getBishopMoves}
self.whiteToMove = True
self.moveLog=[]
self.whiteKingLocation=(4,4)
self.blackKingLocation=(0,4)
self.checkMate = False
self.staleMate = False
def makeMove(self,move):
self.board[move.startRow][move.startCol] = "--"
self.board[move.endRow][move.endCol] = move.pieceMoved
self.moveLog.append(move)
self.whiteToMove = not self.whiteToMove
if move.pieceMoved == "wK":
self.whiteKingLocation = (move.endRow,move.endCol)
elif move.pieceMoved == "bK":
self.blackKingLocation =(move.endRow,move.endCol)
if move.isPawnPromotion:
self.board[move.endRow][move.endCol] = move.pieceMoved[0] + 'Q'
def undoMove(self):
if len(self.moveLog) != 0:
move = self.moveLog.pop()
self.board[move.startRow][move.startCol] = move.pieceMoved
self.board[move.endRow][move.endCol] = move.pieceCaptured
self.whiteToMove = not self.whiteToMove
if move.pieceMoved == "wK":
self.whiteKingLocation = (move.startRow,move.startCol)
elif move.pieceMoved == "bK":
self.blackKingLocation =(move.startRow,move.startCol)
self.checkMate = False
self.staleMate = False
def getValidMoves(self):
moves = self.getAllPossibleMoves()
for i in range(len(moves)-1,-1,-1):
self.makeMove(moves[i])
self.whiteToMove = not self.whiteToMove
if self.inCheck():
moves.remove(moves[i])
self.whiteToMove = not self.whiteToMove
self.undoMove()
if len(moves) == 0:
if self.inCheck():
self.checkMate = True
else:
self.staleMate = True
else:
self.checkMate = False
self.staleMate = False
return moves
def inCheck(self):
if self.whiteToMove:
return self.squareUnderAttack(self.whiteKingLocation[0],self.whiteKingLocation[1])
else:
return self.squareUnderAttack(self.blackKingLocation[0],self.blackKingLocation[1])
def squareUnderAttack(self,r,c):
self.whiteToMove = not self.whiteToMove
oppMoves = self.getAllPossibleMoves()
self.whiteToMove = not self.whiteToMove
for move in oppMoves:
if move.endRow == r and move.endCol == c:
return True
return False
def getAllPossibleMoves(self):
moves = []
for r in range(len(self.board)):
for c in range(len(self.board[r])):
turn = self.board[r][c][0]
if (turn == "w" and self.whiteToMove) or (turn == "b" and not self.whiteToMove):
piece = self.board[r][c][1]
self.moveFunctions[piece](r,c,moves)
return moves
def getPawnMoves(self,r,c,moves):
if self.whiteToMove:
if self.board[r-1][c] == "--":
moves.append(Move((r,c),(r-1,c),self.board))
if c-1 >= 0:
if self.board[r-1][c-1][0] == 'b':
moves.append(Move((r,c),(r-1,c-1),self.board))
if c+1 <= 4:
if self.board[r-1][c+1][0] == 'b':
moves.append(Move((r,c),(r-1,c+1),self.board))
else:
if self.board[r+1][c] == "--":
moves.append(Move((r,c),(r+1,c),self.board))
if c-1 >= 0:
if self.board[r+1][c-1][0] == 'w':
moves.append(Move((r,c),(r+1,c-1),self.board))
if c+1 <= 4:
if self.board[r+1][c+1][0] == 'w':
moves.append(Move((r,c),(r+1,c+1),self.board))
def getRookMoves(self,r,c,moves):
directions = ((-1,0), (0,-1), (1,0), (0,1))
enemyColor = "b" if self.whiteToMove else "w"
for d in directions:
for i in range(1,5):
endRow = r + d[0] * i
endCol = c + d[1] * i
if 0 <= endRow < 5 and 0 <= endCol < 5:
endPiece = self.board[endRow][endCol]
if endPiece == "--":
moves.append(Move((r,c),(endRow,endCol),self.board))
elif endPiece[0] == enemyColor:
moves.append(Move((r,c),(endRow,endCol),self.board))
break
else:
break
else:
break
def getBishopMoves(self,r,c,moves):
directions=((-1,-1), (-1,1), (1,-1), (1,1))
enemyColor = "b" if self.whiteToMove else "w"
for d in directions:
for i in range(1,5):
endRow = r + d[0] * i
endCol = c + d[1] * i
if 0 <= endRow < 5 and 0 <= endCol < 5:
endPiece = self.board[endRow][endCol]
if endPiece == "--":
moves.append(Move((r,c),(endRow,endCol),self.board))
elif endPiece[0] == enemyColor:
moves.append(Move((r,c),(endRow,endCol),self.board))
break
else:
break
else:
break
def getNightMoves(self,r,c,moves):
knightsMoves=((-2,-1),(-2,1),(-1,-2),(-1,2),(1,-2),(1,2),(2,-1),(2,1))
allyColor = "w" if self.whiteToMove else "b"
for m in knightsMoves:
endRow = r + m[0]
endCol = c + m[1]
if 0 <= endRow < 5 and 0 <= endCol < 5:
endPiece = self.board[endRow][endCol]
if endPiece[0] != allyColor:
moves.append(Move((r,c),(endRow,endCol),self.board))
def getQueenMoves(self,r,c,moves):
self.getRookMoves(r,c,moves)
self.getBishopMoves(r,c,moves)
def getKingMoves(self,r,c,moves):
kingMoves = ((-1,-1),(-1,0),(-1,1),(0,-1),(0,1),(1,-1),(1,0),(1,1))
allyColor = "w" if self.whiteToMove else "b"
for i in range(5):
endRow = r + kingMoves[i][0]
endCol = c + kingMoves[i][1]
if 0 <= endRow < 5 and 0 <= endCol <5:
endPiece = self.board[endRow][endCol]
if endPiece[0] != allyColor:
moves.append(Move((r,c),(endRow,endCol),self.board))
class Move():
ranksToRows = {"1": 4,"2": 3,"3": 2,"4": 1,"5": 0}
rowsToRanks = {v: k for k, v in ranksToRows.items()}
filesToCols = {"a":0,"b":1,"c":2,"d":3,"e":4}
colsToFiles = {v: k for k, v in filesToCols.items()}
def __init__(self,startSq,endSq,board):
self.startRow = startSq[0]
self.startCol = startSq[1]
self.endRow = endSq[0]
self.endCol = endSq[1]
self.pieceMoved = board[self.startRow][self.startCol]
self.pieceCaptured = board[self.endRow][self.endCol]
self.isPawnPromotion = False
if (self.pieceMoved =="wp" and self.endRow == 0) or (self.pieceMoved =="bp" and self.endRow == 4):
self.isPawnPromotion = True
self.moveId = self.startRow*1000 + self.startCol * 100 + self.endRow*10 + self.endCol
def __eq__(self,other):
if isinstance(other,Move):
return self.moveId == other.moveId
return False
def getChessNotation(self):
return self.getRankFile(self.startRow,self.startCol) + self.getRankFile(self.endRow,self.endCol)
def getRankFile(self,r,c):
return self.colsToFiles[c] + self.rowsToRanks[r]
|
{"/ChessMain.py": ["/ChessEngine.py", "/ChessAi.py"]}
|
39,703
|
Rallstad/adventofcode_2020
|
refs/heads/main
|
/6.py
|
from utils import readFile
import bisect
lines = readFile('inputs/inputTask6.txt')
def listDistinctGroupChars(group):
joined = ''.join(group)
distinct = list(set(joined))
return distinct
def findCommonGroupAnswers(group):
listList = []
for elem in group:
distinct = list(set(elem))
listList.append(distinct)
result = set(listList[0])
for s in listList[1:]:
result.intersection_update(s)
return len(result)
def calcGroupValue(group):
count = 0
l = listDistinctGroupChars(group)
for elem in l:
count = count + 1
return count
def getLastLine():
secondLastLine = None
astLine = None
with open('inputs/inputTask6.txt') as infile:
secondLastLine, lastLine = infile.readline(), infile.readline()
for line in infile:
secondLastLine = lastLine
lastLine = line
return lastLine, secondLastLine
def getTotalDistinct(lines):
group = []
totalDistinct = 0
for line in lines:
if line:
group.append(line)
else:
groupValue = calcGroupValue(group)
totalDistinct = totalDistinct + groupValue
group = []
groupValue = calcGroupValue(group)
totalDistinct = totalDistinct + groupValue
group = []
return totalDistinct
def getTotalCommon(lines):
group = []
totalCommon = 0
for line in lines:
if line:
group.append(line)
else:
common = findCommonGroupAnswers(group)
totalCommon = totalCommon + common
group = []
common = findCommonGroupAnswers(group)
totalCommon = totalCommon + common
group = []
return totalCommon
print(getTotalDistinct(lines))
print(getTotalCommon(lines))
|
{"/6.py": ["/utils.py"], "/16.py": ["/utils.py"], "/13.py": ["/utils.py"], "/5.py": ["/utils.py"], "/10.py": ["/utils.py"], "/12.py": ["/utils.py"], "/15.py": ["/utils.py"], "/3.py": ["/utils.py"], "/2.py": ["/utils.py"], "/11.py": ["/utils.py"], "/4.py": ["/utils.py"], "/9.py": ["/utils.py"], "/14.py": ["/utils.py"], "/7.py": ["/utils.py"], "/8.py": ["/utils.py"], "/1.py": ["/utils.py"]}
|
39,704
|
Rallstad/adventofcode_2020
|
refs/heads/main
|
/16.py
|
from utils import readFile
import re
lines = readFile('inputs/inputTask16.txt')
def GetValidRanges(lines):
valids = []
rangeMinMax = []
for line in lines:
if re.findall('your ticket', line):
break
elif line == '':
continue
else:
line = line.split(':')
ranges = line[1].split('or')
for r in ranges:
r = r.strip()
rangeMinMax = r.split('-')
for num in range(int(rangeMinMax[0]), int(rangeMinMax[1])+1):
if num not in valids:
valids.append(num)
return valids
def GetNearbyTicketList(lines):
get_lines = False
nearbyTicketsList = []
for line in lines:
if line.startswith('nearby tickets:'):
get_lines = True
continue
if get_lines:
splitline = line.split(',')
nearbyTicketsList.append(splitline)
return nearbyTicketsList
def CalcTicketErrorRate(lines): #it is importaint to choose good names for yourself
errorRate = 0
valids = GetValidRanges(lines)
nearbyTicketsList = GetNearbyTicketList(lines)
for nearbyTicket in nearbyTicketsList:
for num in nearbyTicket:
if int(num) not in valids:
print(num)
errorRate += int(num)
return errorRate
# part b
def GetValidTickets(lines, valids):
nearbyTicketsList = GetNearbyTicketList(lines)
for nearbyTicket in nearbyTicketsList:
for num in nearbyTicket:
if int(num) not in valids:
nearbyTicketsList.remove(nearbyTicket)
print("nearbyTicketsList")
print(nearbyTicketsList)
return nearbyTicketsList
# list fields and their ranges
def ListFieldRanges(lines):
validsList = []
tmp = []
for line in lines:
if re.findall('your ticket', line):
break
elif line == '':
continue
else:
line = line.split(':')
ranges = line[1].split('or')
for r in ranges:
r = r.strip()
rangeMinMax = r.split('-')
tmp.append(line[0])
for num in range(int(rangeMinMax[0]), int(rangeMinMax[1])+1):
tmp.append(num)
validsList.append(tmp)
tmp = []
print(validsList)
return validsList
def MultiplyDepartureNumbers(lines): #it is importaint to choose good names for yourself
nearbyTicketsList = GetNearbyTicketList(lines)
valids = GetValidRanges(lines)
validNearbyTickets = GetValidTickets(lines, valids)
validsList = ListFieldRanges(lines)
inRange = True
fields = [0]*20
count = 0
for i in range(0, len(validsList)):
for c in range(len(nearbyTicketsList)):
if inRange == True:
break
for nearbyTicket in nearbyTicketsList:
if count == 20:
count = 0
inRange = True
break
elif int(nearbyTicket[count]) not in validsList[i]:
inRange == False
break
if inRange == True:
print(validsList[i][0])
fields.append(validsList[i][0])
inRange = False
count = 0
print(fields)
return fields
print(MultiplyDepartureNumbers(lines))
#print(CalcTicketErrorRate(lines))
|
{"/6.py": ["/utils.py"], "/16.py": ["/utils.py"], "/13.py": ["/utils.py"], "/5.py": ["/utils.py"], "/10.py": ["/utils.py"], "/12.py": ["/utils.py"], "/15.py": ["/utils.py"], "/3.py": ["/utils.py"], "/2.py": ["/utils.py"], "/11.py": ["/utils.py"], "/4.py": ["/utils.py"], "/9.py": ["/utils.py"], "/14.py": ["/utils.py"], "/7.py": ["/utils.py"], "/8.py": ["/utils.py"], "/1.py": ["/utils.py"]}
|
39,705
|
Rallstad/adventofcode_2020
|
refs/heads/main
|
/13.py
|
from utils import readFile
departures = readFile('inputs/inputTask13.txt')
def GetEarliestDeparture(departures):
currentTimestamp = departures[0]
departures = departures[1].split(',')
earliestDeparture = float('inf')
afterCurrentTimestamp = 0
for departure in departures:
if departure != 'x':
departure = int(departure)
while afterCurrentTimestamp <= int(currentTimestamp):
afterCurrentTimestamp += departure
if afterCurrentTimestamp < earliestDeparture:
earliestDeparture = afterCurrentTimestamp
buss2Take = departure
afterCurrentTimestamp = 0
num = (earliestDeparture - int(currentTimestamp)) * buss2Take
return num
def FindFirstDeparturesInSequence(departures):
departures = departures[1].split(',')
earliestDeparture = float('inf')
afterCurrentTimestamp = 0
index = []
sequence = 0
timestamp = 0
#populate index to find departures times that are not 'x'
count = 0
for departure in range(len(departures)):
if departures[departure] != 'x':
index.append(departure)
count += 1
else:
index.append(-1)
print(count)
inSequence = 1
multiplier = 1
timestamp = 100000000000000
reqAcc = False
while reqAcc == False:
inSequence = 1
timestamp += int(departures[0])
iCount = 1
for i in index[1:]:
if i != -1:
if (timestamp + index[iCount]) % int(departures[iCount]) == 0:
inSequence += 1
iCount += 1
if inSequence == count:
reqAcc = True
#inSequence = 1
timestamp += int(departures[0])
return timestamp
#print(GetEarliestDeparture(departures))
print(FindFirstDeparturesInSequence(departures))
|
{"/6.py": ["/utils.py"], "/16.py": ["/utils.py"], "/13.py": ["/utils.py"], "/5.py": ["/utils.py"], "/10.py": ["/utils.py"], "/12.py": ["/utils.py"], "/15.py": ["/utils.py"], "/3.py": ["/utils.py"], "/2.py": ["/utils.py"], "/11.py": ["/utils.py"], "/4.py": ["/utils.py"], "/9.py": ["/utils.py"], "/14.py": ["/utils.py"], "/7.py": ["/utils.py"], "/8.py": ["/utils.py"], "/1.py": ["/utils.py"]}
|
39,706
|
Rallstad/adventofcode_2020
|
refs/heads/main
|
/5.py
|
from utils import readFile
import bisect
lines = readFile('inputs/inputTask5.txt')
def getSeatBinary(line):
seatsInBinary = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
index = 0
for letter in line:
if letter == 'B' and index <= 6:
seatsInBinary[index] = 1
elif letter == 'R':
seatsInBinary[index] = 1
index = index + 1
return seatsInBinary
def getSeatID(seatsInBinary):
row = 0
col = 0
indexRow = 6
indexCol = 2
for elem in seatsInBinary:
if indexRow >= 0:
row = row + elem * 2**indexRow
indexRow = indexRow - 1
else:
col = col + elem * 2**indexCol
indexCol = indexCol - 1
seatID = row * 8 + col
return int(seatID)
def getBinaryValue(index, seatsInBinary):
value = 0
for elem in seatsInBinary:
if index >= 0:
value = value + elem * 2**index
index = index - 1
return value
def getHighestID(lines):
highest = 0
for line in lines:
binary = getSeatBinary(line)
seatID = getSeatID(binary)
if seatID >= highest:
highest = seatID
return highest
def getLowestID(lines):
lowest = getHighestID(lines)
for line in lines:
binary = getSeatBinary(line)
seatID = getSeatID(binary)
if seatID <= lowest:
lowest = seatID
return lowest
def getYourSeatID(lines):
IDs = []
missingIDs = []
lowest = getLowestID(lines)
highest = getHighestID(lines)
count = lowest
print(lowest)
print(highest)
for line in lines:
binary = getSeatBinary(line)
index = len(binary) - 1
ID = getSeatID(binary)
if ID not in IDs:
bisect.insort(IDs, ID)
for elem in IDs:
if count in IDs:
count = count + 1
continue
else:
missingIDs.append(count)
count = count + 1
return missingIDs
print(getYourSeatID(lines))
print(getHighestID(lines))
|
{"/6.py": ["/utils.py"], "/16.py": ["/utils.py"], "/13.py": ["/utils.py"], "/5.py": ["/utils.py"], "/10.py": ["/utils.py"], "/12.py": ["/utils.py"], "/15.py": ["/utils.py"], "/3.py": ["/utils.py"], "/2.py": ["/utils.py"], "/11.py": ["/utils.py"], "/4.py": ["/utils.py"], "/9.py": ["/utils.py"], "/14.py": ["/utils.py"], "/7.py": ["/utils.py"], "/8.py": ["/utils.py"], "/1.py": ["/utils.py"]}
|
39,707
|
Rallstad/adventofcode_2020
|
refs/heads/main
|
/10.py
|
from utils import readFile
import math
lines = readFile('inputs/inputTask10.txt')
lines = [int(line) for line in lines]
print(lines)
#for line in lines:
#print(line)
def FindChain(lines):
singleHop = 1
doubleHop = 2
tripleHop = 3
numSingleHop = 0
numDoubleHop = 0
numTripleHop = 0
deviceJolts = 3
minJolt = min(lines)
maxJolt = max(lines)
current = minJolt
if current == 1:
numSingleHop = numSingleHop + 1
elif current == 2:
numDoubleHop = numDoubleHop + 1
elif current == 3:
numTripleHop = numTripleHop + 1
for line in lines:
if current == maxJolt:
numTripleHop = numTripleHop + 1
total = numSingleHop * numTripleHop
return total
if (current + singleHop) in lines:
current = current + singleHop
numSingleHop = numSingleHop + 1
elif (current + doubleHop) in lines:
current = current + doubleHop
numDoubleHop = numDoubleHop + 1
elif (current + tripleHop) in lines:
current = current + tripleHop
numTripleHop = numTripleHop + 1
else:
return False
print(FindChain(lines))
|
{"/6.py": ["/utils.py"], "/16.py": ["/utils.py"], "/13.py": ["/utils.py"], "/5.py": ["/utils.py"], "/10.py": ["/utils.py"], "/12.py": ["/utils.py"], "/15.py": ["/utils.py"], "/3.py": ["/utils.py"], "/2.py": ["/utils.py"], "/11.py": ["/utils.py"], "/4.py": ["/utils.py"], "/9.py": ["/utils.py"], "/14.py": ["/utils.py"], "/7.py": ["/utils.py"], "/8.py": ["/utils.py"], "/1.py": ["/utils.py"]}
|
39,708
|
Rallstad/adventofcode_2020
|
refs/heads/main
|
/12.py
|
from utils import readFile
import re
instructions = readFile('inputs/inputTask12.txt')
#I must say, this way of writing function and variable names, I do not like
def RotateShip(pos, command, magnitude, newDirection):
if command == 'L':
magnitude = -magnitude
newDirection = (newDirection + magnitude) % 360
pos.update({'F':newDirection})
return newDirection
def RotateWaypoint(waypoint, command, magnitude, newDirection):
if command == 'L':
magnitude = -magnitude
if magnitude == 90 or magnitude == -270:
tmpE = waypoint['E']
tmpS = waypoint['S']
tmpW = waypoint['W']
tmpN = waypoint['N']
waypoint.update({'S':tmpE})
waypoint.update({'W':tmpS})
waypoint.update({'N':tmpW})
waypoint.update({'E':tmpN})
return waypoint
if magnitude == 180 or magnitude == -180:
tmpE = waypoint['E']
tmpS = waypoint['S']
tmpW = waypoint['W']
tmpN = waypoint['N']
waypoint.update({'E':tmpW})
waypoint.update({'N':tmpS})
waypoint.update({'W':tmpE})
waypoint.update({'S':tmpN})
return waypoint
if magnitude == 270 or magnitude == -90:
tmpE = waypoint['E']
tmpS = waypoint['S']
tmpW = waypoint['W']
tmpN = waypoint['N']
waypoint.update({'E':tmpS})
waypoint.update({'S':tmpW})
waypoint.update({'W':tmpN})
waypoint.update({'N':tmpE})
return waypoint
return waypoint
def MoveInCommandDirection(pos, command, magnitude):
pos[command] += magnitude
def UpdateWaypoint(waypoint, command, magnitude):
waypoint[command] += magnitude
return waypoint
def MoveForwardByWaypoint(actualPos, command, magnitude, waypoint):
actualPos['E'] += magnitude * waypoint['E']
actualPos['S'] += magnitude * waypoint['S']
actualPos['W'] += magnitude * waypoint['W']
actualPos['N'] += magnitude * waypoint['N']
def MoveForwardInCurrentDirection(pos, command, magnitude):
if pos['F'] == 0:
pos['E'] += magnitude
elif pos['F'] == 90:
pos['S'] += magnitude
elif pos['F'] == 180:
pos['W'] += magnitude
elif pos['F'] == 270:
pos['N'] += magnitude
def GetShipPos(instructions):
directions = [0, 90, 180, 270] # from left to right: E S W N
pos = {'E':0, 'S':0, 'W':0, 'N':0, 'F':0} # F is current heading
actualPos = {'E':0, 'S':0, 'W':0, 'N':0, 'F':0}
waypoint = {'E':1, 'S':0, 'W':0, 'N':1, 'F':0} # F is current heading. Value is units
shipDirection = pos['F']
newDirection = 0
for instruction in instructions:
command = instruction[0]
magnitude = int(re.findall('[0-9]+', instruction)[0])
if command == 'F':
MoveForwardByWaypoint(actualPos, command, magnitude, waypoint)
MoveForwardInCurrentDirection(pos, command, magnitude)
elif command in pos:
MoveInCommandDirection(pos, command, magnitude)
waypoint = UpdateWaypoint(waypoint, command, magnitude)
elif command not in pos.keys():
waypoint = RotateWaypoint(waypoint, command, magnitude, newDirection)
newDirection = RotateShip(pos, command, magnitude, newDirection)
totalWaypoint = abs(actualPos['E'] - actualPos['W']) + abs(actualPos['S'] - actualPos['N'])
totalShipPos = abs(pos['E'] - pos['W']) + abs(pos['S'] - pos['N'])
return totalWaypoint, totalShipPos
print(GetShipPos(instructions))
|
{"/6.py": ["/utils.py"], "/16.py": ["/utils.py"], "/13.py": ["/utils.py"], "/5.py": ["/utils.py"], "/10.py": ["/utils.py"], "/12.py": ["/utils.py"], "/15.py": ["/utils.py"], "/3.py": ["/utils.py"], "/2.py": ["/utils.py"], "/11.py": ["/utils.py"], "/4.py": ["/utils.py"], "/9.py": ["/utils.py"], "/14.py": ["/utils.py"], "/7.py": ["/utils.py"], "/8.py": ["/utils.py"], "/1.py": ["/utils.py"]}
|
39,709
|
Rallstad/adventofcode_2020
|
refs/heads/main
|
/15.py
|
from utils import readFile
import re
line = readFile('inputs/inputTask15.txt')
def GetLastTwoInstances(refNumber, prevNumbers):
lastTwoSpoken = []
count = 0
for num in prevNumbers:
if num == refNumber:
lastTwoSpoken.append(count)
if len(lastTwoSpoken) > 2:
lastTwoSpoken.pop(0)
count += 1
return lastTwoSpoken
def GetNthNumber(line, iteration):
prevNumbers = []
startNumbers = line[0].split(',')
startNumbers = [int(startNumber) for startNumber in startNumbers]
currNum = -1
nextNum = -1
lastSpokenDict = {}
beforeLastSpokenDict = {}
count = 1
for num in startNumbers:
prevNumbers.append(num)
if num not in lastSpokenDict:
lastSpokenDict.update({num: count})
beforeLastSpokenDict.update({num: count})
count += 1
currNum = startNumbers[len(startNumbers)-1]
print(lastSpokenDict)
print(prevNumbers)
for num in range(len(startNumbers), iteration):
lastSpokenDict.update({currNum: count})
lastTwo = GetLastTwoInstances(currNum, prevNumbers)
if currNum in prevNumbers[:len(prevNumbers)-1]:
lastSpokenIndex = lastSpokenDict[currNum]
currNum = abs(lastTwo[0] - lastTwo[1])
prevNumbers.append(currNum)
elif currNum not in prevNumbers[:len(prevNumbers) - 1]:
currNum = 0
prevNumbers.append(currNum)
lastSpokenDict.update({currNum: count})
count += 1
return(currNum)
def main():
iteration = 2020
taskA = GetNthNumber(line, iteration)
iteration = 30000000
taskB = GetNthNumber(line, iteration)
return taskA, taskB
print(main())
|
{"/6.py": ["/utils.py"], "/16.py": ["/utils.py"], "/13.py": ["/utils.py"], "/5.py": ["/utils.py"], "/10.py": ["/utils.py"], "/12.py": ["/utils.py"], "/15.py": ["/utils.py"], "/3.py": ["/utils.py"], "/2.py": ["/utils.py"], "/11.py": ["/utils.py"], "/4.py": ["/utils.py"], "/9.py": ["/utils.py"], "/14.py": ["/utils.py"], "/7.py": ["/utils.py"], "/8.py": ["/utils.py"], "/1.py": ["/utils.py"]}
|
39,710
|
Rallstad/adventofcode_2020
|
refs/heads/main
|
/3.py
|
from utils import readFile
lineArr = readFile('inputs/inputTask3.txt')
def task3a(lineArr):
treeCount = 0
elem = 0
for line in lineArr:
if line[elem] == "#":
treeCount = treeCount + 1
elem = (elem + 3) % 31
return treeCount
def task3b(lineArr):
vStepSize = [1, 1, 1, 1, 2]
hStepSize = [1, 3, 5, 7, 1]
treeCountProduct = 1
for step in range(len(vStepSize)):
treeCount = 0
elem = 0
for line in lineArr[::vStepSize[step]]:
if line[elem] == "#":
treeCount = treeCount + 1
elem = (elem + hStepSize[step]) % 31
treeCountProduct = treeCountProduct * treeCount
return treeCountProduct
print(task3a(lineArr))
print(task3b(lineArr))
|
{"/6.py": ["/utils.py"], "/16.py": ["/utils.py"], "/13.py": ["/utils.py"], "/5.py": ["/utils.py"], "/10.py": ["/utils.py"], "/12.py": ["/utils.py"], "/15.py": ["/utils.py"], "/3.py": ["/utils.py"], "/2.py": ["/utils.py"], "/11.py": ["/utils.py"], "/4.py": ["/utils.py"], "/9.py": ["/utils.py"], "/14.py": ["/utils.py"], "/7.py": ["/utils.py"], "/8.py": ["/utils.py"], "/1.py": ["/utils.py"]}
|
39,711
|
Rallstad/adventofcode_2020
|
refs/heads/main
|
/2.py
|
from utils import readFile
passwords = readFile('inputs/inputTask2.txt')
def Task2a(passwords):
count = 0
for p in passwords:
limitLow = p.split("-")[0]
limitHigh = p.split('-')[1].split(' ')[0]
reqChar = p.split(' ')[1].split(':')[0]
passString = p.split(' ')[2]
passStringCharCount = passString.count(reqChar)
if passStringCharCount >= int(limitLow) and passStringCharCount <= int(limitHigh):
count = count + 1
return count
def Task2b(passwords):
count = 0
for p in passwords:
first = int(p.split("-")[0])-1
second = int(p.split('-')[1].split(' ')[0])-1
reqChar = p.split(' ')[1].split(':')[0]
passString = p.split(' ')[2]
if (passString[first] == reqChar and passString[second] != reqChar) or (passString[first] != reqChar and passString[second] == reqChar):
count = count + 1
return count
print(Task2a(passwords))
print(Task2b(passwords))
|
{"/6.py": ["/utils.py"], "/16.py": ["/utils.py"], "/13.py": ["/utils.py"], "/5.py": ["/utils.py"], "/10.py": ["/utils.py"], "/12.py": ["/utils.py"], "/15.py": ["/utils.py"], "/3.py": ["/utils.py"], "/2.py": ["/utils.py"], "/11.py": ["/utils.py"], "/4.py": ["/utils.py"], "/9.py": ["/utils.py"], "/14.py": ["/utils.py"], "/7.py": ["/utils.py"], "/8.py": ["/utils.py"], "/1.py": ["/utils.py"]}
|
39,712
|
Rallstad/adventofcode_2020
|
refs/heads/main
|
/11.py
|
from utils import readFile
import copy
lines = readFile('inputs/inputTask11.txt')
def convert2Array(lines):
lineCount = 0
for line in lines:
for spot in line:
splitted = [spot for spot in line]
lines[lineCount] = splitted
lineCount += 1
return lines
def PadLines(lines):
# pad the top and bottom of grid of lines to avoid some corner cases
pad = '0'*len(lines[0])
lines.insert(0,pad)
lines.insert(len(lines)+1,pad)
lineCount = 0
for line in lines:
line = '0' + line + '0'
lines[lineCount] = line
lineCount += 1
return lines
def CheckAdjacentSeats(splittedLines, lineCount, spotCount, acceptedOccupied):
adjacentCount = 0
if splittedLines[lineCount][spotCount - 1] == '#':
adjacentCount += 1
if splittedLines[lineCount][spotCount + 1] == '#':
adjacentCount += 1
if splittedLines[lineCount - 1][spotCount - 1] == '#':
adjacentCount += 1
if splittedLines[lineCount - 1][spotCount + 1] == '#':
adjacentCount += 1
if splittedLines[lineCount + 1][spotCount - 1] == '#':
adjacentCount += 1
if splittedLines[lineCount + 1][spotCount + 1] == '#':
adjacentCount += 1
if splittedLines[lineCount - 1][spotCount] == '#':
adjacentCount += 1
if splittedLines[lineCount + 1][spotCount] == '#':
adjacentCount += 1
if adjacentCount > acceptedOccupied:
return False
else:
return True
def ChangeSeating(splittedLines):
changedSeating = 0
lineCount = 0
spotCount = 0
newLines = copy.deepcopy(splittedLines)
for line in splittedLines:
for spot in line:
if spot == 'L':
acceptedOccupied = 0
if CheckAdjacentSeats(newLines, lineCount, spotCount, acceptedOccupied):
splittedLines[lineCount][spotCount] = '#'
changedSeating += 1
elif spot == '#':
acceptedOccupied = 3
if not CheckAdjacentSeats(newLines, lineCount, spotCount, acceptedOccupied):
splittedLines[lineCount][spotCount] = 'L'
changedSeating += 1
spotCount += 1
spotCount = 0
lineCount += 1
return changedSeating, splittedLines
def CountOccupied(lines):
occupied = 0
for line in lines:
for spot in line:
if spot == '#':
occupied += 1
return occupied
def mainA(lines):
paddedLines = PadLines(lines)
splittedLines = convert2Array(paddedLines)
count = 0
while True:
changedSeating, changedLines = ChangeSeating(splittedLines)
newLines = changedLines[:]
if changedSeating == 0:
occupied = CountOccupied(changedLines)
print(occupied)
return occupied
#mainA(lines)
#part two
def CheckSeatsInSight(splittedLines, lineCount, spotCount, acceptedOccupied):
adjacentCount = 0
endReached = 0
step1 = 1
step2 = 1
step3 = 1
step4 = 1
step5 = 1
step6 = 1
step7 = 1
step8 = 1
while endReached <= 7:
if step1 > 0:
if splittedLines[lineCount][spotCount - step1] == '#':
adjacentCount += 1
endReached += 1
step1 = 0
elif splittedLines[lineCount][spotCount - step1] == 'L':
endReached += 1
step1 = 0
elif splittedLines[lineCount][spotCount - step1] == '.':
step1 +=1
elif step1 > 0:
step1 = 0
endReached += 1
if step2 > 0:
if splittedLines[lineCount][spotCount + step2] == '#':
adjacentCount += 1
endReached += 1
step2 = 0
elif splittedLines[lineCount][spotCount + step2] == 'L':
endReached += 1
step2 = 0
elif splittedLines[lineCount][spotCount + step2] == '.':
step2 +=1
elif step2 > 0:
step2 = 0
endReached += 1
if step3 > 0:
if splittedLines[lineCount - step3][spotCount - step3] == '#':
adjacentCount += 1
endReached += 1
step3 = 0
elif splittedLines[lineCount - step3][spotCount - step3] == 'L':
endReached += 1
step3 = 0
elif splittedLines[lineCount - step3][spotCount - step3] == '.':
step3 +=1
elif step3 > 0:
step3 = 0
endReached += 1
if step4 > 0:
if splittedLines[lineCount - step4][spotCount + step4] == '#':
adjacentCount += 1
endReached += 1
step4 = 0
elif splittedLines[lineCount - step4][spotCount + step4] == 'L':
endReached += 1
step4 = 0
elif splittedLines[lineCount - step4][spotCount + step4] == '.':
step4 +=1
elif step4 > 0:
step4 = 0
endReached += 1
if step5 > 0:
if splittedLines[lineCount + step5][spotCount - step5] == '#':
adjacentCount += 1
endReached += 1
step5 = 0
elif splittedLines[lineCount + step5][spotCount - step5] == 'L':
endReached += 1
step5 = 0
elif splittedLines[lineCount + step5][spotCount - step5] == '.':
step5 +=1
elif step5 > 0:
step5 = 0
endReached += 1
if step6 > 0:
if splittedLines[lineCount + step6][spotCount + step6] == '#':
adjacentCount += 1
endReached += 1
step6 = 0
elif splittedLines[lineCount + step6][spotCount + step6] == 'L':
endReached += 1
step6 = 0
elif splittedLines[lineCount + step6][spotCount + step6] == '.':
step6 +=1
elif step6 > 0:
step6 = 0
endReached += 1
if step7 > 0:
if splittedLines[lineCount - step7][spotCount] == '#':
adjacentCount += 1
endReached += 1
step7 = 0
elif splittedLines[lineCount - step7][spotCount] == 'L':
endReached += 1
step7 = 0
elif splittedLines[lineCount - step7][spotCount] == '.':
step7 +=1
elif step7 > 0:
step7 = 0
endReached += 1
if step8 > 0:
if splittedLines[lineCount + step8][spotCount] == '#':
adjacentCount += 1
endReached += 1
step8 = 0
elif splittedLines[lineCount + step8][spotCount] == 'L':
endReached += 1
step8 = 0
elif splittedLines[lineCount + step8][spotCount] == '.':
step8 +=1
elif step8 > 0:
step8 = 0
endReached += 1
if adjacentCount > acceptedOccupied:
return False
else:
return True
def ChangeSeating(splittedLines):
changedSeating = 0
lineCount = 0
spotCount = 0
newLines = copy.deepcopy(splittedLines)
for line in splittedLines:
for spot in line:
if spot == 'L':
acceptedOccupied = 0
if CheckSeatsInSight(newLines, lineCount, spotCount, acceptedOccupied):
splittedLines[lineCount][spotCount] = '#'
changedSeating += 1
elif spot == '#':
acceptedOccupied = 4
if not CheckSeatsInSight(newLines, lineCount, spotCount, acceptedOccupied):
splittedLines[lineCount][spotCount] = 'L'
changedSeating += 1
spotCount += 1
spotCount = 0
lineCount += 1
return changedSeating, splittedLines
def mainB(lines):
paddedLines = PadLines(lines)
splittedLines = convert2Array(paddedLines)
while True:
changedSeating, changedLines = ChangeSeating(splittedLines)
newLines = copy.deepcopy(splittedLines)
if changedSeating == 0:
occupied = CountOccupied(changedLines)
print(occupied)
return occupied
mainB(lines)
|
{"/6.py": ["/utils.py"], "/16.py": ["/utils.py"], "/13.py": ["/utils.py"], "/5.py": ["/utils.py"], "/10.py": ["/utils.py"], "/12.py": ["/utils.py"], "/15.py": ["/utils.py"], "/3.py": ["/utils.py"], "/2.py": ["/utils.py"], "/11.py": ["/utils.py"], "/4.py": ["/utils.py"], "/9.py": ["/utils.py"], "/14.py": ["/utils.py"], "/7.py": ["/utils.py"], "/8.py": ["/utils.py"], "/1.py": ["/utils.py"]}
|
39,713
|
Rallstad/adventofcode_2020
|
refs/heads/main
|
/4.py
|
from utils import readFile
import re
lines = readFile('inputs/inputTask4.txt')
def getPassportsWithCorrectElements(lines):
values = ['byr', 'iyr', 'eyr', 'hgt', 'hcl', 'ecl', 'pid']
passport = []
count = 0
validPassports = 0
for line in lines:
if line:
for word in line.split():
passport.append(word)
else:
for elem in passport:
if elem.split(':')[0] in values:
count = count + 1
if count >= len(values):
validPassports = validPassports + 1
count = 0
passport = []
for elem in passport:
if elem.split(':')[0] in values:
count = count + 1
if count >= len(values):
validPassports = validPassports + 1
count = 0
passport = []
return validPassports
def checkIfValidPassport(passport):
validEntries = 0
values = ['byr', 'iyr', 'eyr', 'hgt', 'hcl', 'ecl', 'pid']
eyeColor = ['amb', 'blu', 'brn', 'gry', 'grn', 'hzl', 'oth']
for elem in passport:
entry = elem.split(':')[0]
entryValue = elem.split(':')[1]
if entry == 'byr':
if 1920 <= int(entryValue) <= 2002:
validEntries = validEntries + 1
elif entry == 'iyr':
if 2010 <= int(entryValue) <= 2020:
validEntries = validEntries + 1
elif entry == 'eyr':
if 2020 <= int(entryValue) <= 2030:
validEntries = validEntries + 1
elif entry == 'hgt':
if re.search('cm$', entryValue):
num = re.findall('[0-9]{1,3}', entryValue)
if 150 <= int(num[0]) <= 193:
validEntries = validEntries + 1
elif re.search('in$', entryValue):
num = re.findall('[0-9]{1,3}', entryValue)
if 59 <= int(num[0]) <= 76:
validEntries = validEntries + 1
elif entry == 'hcl':
if entryValue[0] == "#" and re.findall('[a-z0-9]', entryValue.split('#')[1]) and len(entryValue.split('#')[1]) == 6:
validEntries = validEntries + 1
elif entry == 'ecl':
if entryValue in eyeColor:
validEntries = validEntries + 1
elif entry == 'pid':
if re.match("[0-9]", entryValue) and len(entryValue) == 9:
validEntries = validEntries + 1
else:
continue
if validEntries >= len(values):
return 1
return 0
def getPassportsWithValidValues(lines):
passport = []
validPassports = 0
for line in lines:
if line:
for word in line.split():
passport.append(word)
else:
if len(passport) < 7:
passport = []
continue
if checkIfValidPassport(passport):
validPassports = validPassports + 1
passport = []
return validPassports
#getPassportsWithCorrectElements(lines)
print(getPassportsWithCorrectElements(lines))
#getPassportsWithValidValues(lines)
print(getPassportsWithValidValues(lines))
|
{"/6.py": ["/utils.py"], "/16.py": ["/utils.py"], "/13.py": ["/utils.py"], "/5.py": ["/utils.py"], "/10.py": ["/utils.py"], "/12.py": ["/utils.py"], "/15.py": ["/utils.py"], "/3.py": ["/utils.py"], "/2.py": ["/utils.py"], "/11.py": ["/utils.py"], "/4.py": ["/utils.py"], "/9.py": ["/utils.py"], "/14.py": ["/utils.py"], "/7.py": ["/utils.py"], "/8.py": ["/utils.py"], "/1.py": ["/utils.py"]}
|
39,714
|
Rallstad/adventofcode_2020
|
refs/heads/main
|
/9.py
|
from utils import readFile
import math
lines = readFile('inputs/inputTask9.txt')
def populatePreamble(lines):
preamble = []
count = 0
for line in lines:
if count == 25:
break
preamble.append(lines[count])
count = count + 1
return preamble
def moveList(count, lines, preamble):
preamble.remove(preamble[0])
preamble.append(lines[count])
return preamble
def isNumberSumOfPrevious(lines, preamble, nextLine):
for i in range(0,len(preamble)):
for j in range(0,len(preamble)):
value = int(preamble[i]) + int(preamble[j])
if value == int(nextLine):
return True
return False
def findNumber(lines):
preamble = populatePreamble(lines)
count = len(preamble)
for line in lines:
nextLine = int(lines[count])
if not isNumberSumOfPrevious(lines, preamble, nextLine):
return nextLine
if len(preamble) == 25:
preamble = moveList(count, lines, preamble)
#print(len(preamble))
else:
return nextLine
count = count + 1
print("Ingen feil")
return 0
print(findNumber(lines))
def findContiguous(lines):
preamble = populatePreamble(lines)
count = len(preamble)
num = int(findNumber(lines))
targetValue = 0
preambleIndex = 0
for i in range(0,len(lines)):
for line in preamble:
targetValue = targetValue + int(line)
if targetValue == num:
maxVal = max(preamble[:preambleIndex])
minVal = min(preamble[:preambleIndex])
return int(maxVal) + int(minVal)
preambleIndex = preambleIndex + 1
targetValue = 0
preambleIndex = 0
preamble = moveList(count, lines, preamble)
count = count + 1
print("Ingen feil")
return 0
print(findContiguous(lines))
|
{"/6.py": ["/utils.py"], "/16.py": ["/utils.py"], "/13.py": ["/utils.py"], "/5.py": ["/utils.py"], "/10.py": ["/utils.py"], "/12.py": ["/utils.py"], "/15.py": ["/utils.py"], "/3.py": ["/utils.py"], "/2.py": ["/utils.py"], "/11.py": ["/utils.py"], "/4.py": ["/utils.py"], "/9.py": ["/utils.py"], "/14.py": ["/utils.py"], "/7.py": ["/utils.py"], "/8.py": ["/utils.py"], "/1.py": ["/utils.py"]}
|
39,715
|
Rallstad/adventofcode_2020
|
refs/heads/main
|
/utils.py
|
def readFile(input_file):
with open(input_file) as f:
lines = f.read().splitlines()
numArr = [line for line in lines]
return numArr
|
{"/6.py": ["/utils.py"], "/16.py": ["/utils.py"], "/13.py": ["/utils.py"], "/5.py": ["/utils.py"], "/10.py": ["/utils.py"], "/12.py": ["/utils.py"], "/15.py": ["/utils.py"], "/3.py": ["/utils.py"], "/2.py": ["/utils.py"], "/11.py": ["/utils.py"], "/4.py": ["/utils.py"], "/9.py": ["/utils.py"], "/14.py": ["/utils.py"], "/7.py": ["/utils.py"], "/8.py": ["/utils.py"], "/1.py": ["/utils.py"]}
|
39,716
|
Rallstad/adventofcode_2020
|
refs/heads/main
|
/14.py
|
from utils import readFile
import re
lines = readFile('inputs/inputTask14.txt')
def GetMemAddress(l):
print((l[0].split('['))[1].split(']')[0])
return int((l[0].split('['))[1].split(']')[0])
def GetMemBinaryValue(l, bit):
binaryValueList = []
value = int(l[1].strip())
binaryValue = "{0:b}".format(value)
for elem in binaryValue:
binaryValueList.append(elem)
while len(binaryValueList) != bit:
binaryValueList.insert(0,'0')
print("binaryValueList")
print(binaryValueList)
return binaryValueList
#def ConvertBinary2Int(binaryList):
def GetMaskList(l, maskList):
binaryMask = l[1]
maskList = []
print("binaryMask")
print(binaryMask)
for b in binaryMask:
maskList.append(b)
print("maskList")
print(maskList)
return maskList
def MaskValue(maskList, memoryBinaryList):
count = 0
for mask in maskList:
if 'X' not in mask:
memoryBinaryList[count] = mask
count += 1
maskedMemoryBinaryList = memoryBinaryList
return maskedMemoryBinaryList
def MaskValueWithFloating(maskList, memoryBinaryList):
count = 0
binaryListList = []
print("masklist")
print(maskList)
for mask in maskList:
if 'X' not in mask:
memoryBinaryList[count] = mask
elif 'X' in mask:
memoryBinaryList[count] = 0
binaryListList.append(memoryBinaryList)
memoryBinaryList[count] = 1
binaryListList.append(memoryBinaryList)
count += 1
return binaryListList
def CalcSumOfMemValues(lines):
maskList = []
total = 0
bit = 36
memAddressDict = {}
memAddressDictUsingFloatingBit = {}
for line in lines:
l = line.split(' = ')
if l[0] == 'mask':
maskList = GetMaskList(l, maskList)
else:
memoryAddress = GetMemAddress(l)
memoryBinaryList = GetMemBinaryValue(l, bit)
maskedMemoryBinaryList = MaskValue(maskList, memoryBinaryList)
maskedMemoryBinaryListListWithFloating = MaskValueWithFloating(maskList, memoryBinaryList)
value = int("".join(str(i) for i in maskedMemoryBinaryList),2) #convert binary list maskedMemoryBinaryList to integer
memAddressDict[memoryAddress] = value
print(maskedMemoryBinaryListListWithFloating)
return
for v in memAddressDict:
print("dictionary")
print(memAddressDict[v])
total += memAddressDict[v]
return total
print(CalcSumOfMemValues(lines))
|
{"/6.py": ["/utils.py"], "/16.py": ["/utils.py"], "/13.py": ["/utils.py"], "/5.py": ["/utils.py"], "/10.py": ["/utils.py"], "/12.py": ["/utils.py"], "/15.py": ["/utils.py"], "/3.py": ["/utils.py"], "/2.py": ["/utils.py"], "/11.py": ["/utils.py"], "/4.py": ["/utils.py"], "/9.py": ["/utils.py"], "/14.py": ["/utils.py"], "/7.py": ["/utils.py"], "/8.py": ["/utils.py"], "/1.py": ["/utils.py"]}
|
39,717
|
Rallstad/adventofcode_2020
|
refs/heads/main
|
/7.py
|
from utils import readFile
import re
lines = readFile('inputs/inputTask7.txt')
def DigDeep(line, lines):
splitLine = SplitLine(line)
for bag in splitLine[1:]:
if "o other" in splitLine[1]:
continue
if "shiny gold" in bag:
return True
child = SearchChild(bag, lines)
shiny = DigDeep(child, lines)
if shiny == True:
return True
def SearchChild(bag, lines):
bagColor = re.findall('[a-z]+', bag)
bagColor = ' '.join(bagColor)
for line in lines:
if line.startswith(bagColor):
return line
print("something is wrong")
return 0
def SplitLine(line):
sl = line.replace('contain',',')
sl = sl.split(',')
sl[0] = sl[0].replace(' bags ', '')
for elem in range(1,len(sl)):
remove = ''.join(sl[elem].split(' ')[1-2])
sl[elem] = sl[elem][2:]
sl[elem] = sl[elem].replace(remove,'')
sl[elem] = sl[elem].strip()
print(sl)
return sl
def BagWithGoldBags(lines):
shinyCount = 0
for line in lines:
shiny= DigDeep(line, lines)
if shiny == True:
shinyCount += 1
return shinyCount
#print(BagWithGoldBags(lines))
# part b
def SplitIntoBagsAndNumbers(line):
sl = line.replace('contain',',')
sl = sl.split(',')
sl[0] = sl[0].replace(' bags ', '')
for elem in range(1,len(sl)):
sl[elem] = sl[elem].strip()
print("sl")
print(sl)
return sl
def GetNumberOfBagType(elem):
num = ''.join(re.findall('[0-9]+', elem))
if num.isdigit():
return int(num)
def FindFactors(line, lines, factorList):
if "no other bags." in splitLine:
factorList.append("EOL")
return factorList
factorList.append(GetNumberOfBagType(bag)) # multiply number of bags in element with product
child = SearchChild(bag, lines)
print("child")
print(child)
FindFactors(child, lines, factorList)
return factorList
def FindTotalBagCount(lines):
bag = "shiny gold"
line = SearchChild(bag, lines) # find the shiny gold line
factorList = []
totalBagsInShinyGoldBag = 1
splitLine = SplitIntoBagsAndNumbers(line)
for bag in splitLine[1:]:
factorList.append(GetNumberOfBagType(bag)) # multiply number of bags in element with product
factorList = FindFactors(splitLine, lines, factorList)
print(factorList)
numOfBagsInStrain = 1
for factor in factorList:
if factor != 'EOL':
numOfBagsInStrain *= factor
else:
totalBagsInShinyGoldBag += numOfBagsInStrain
print(numOfBagsInStrain)
numOfBagsInStrain = 1
totalBagsInShinyGoldBag *= 2
totalBagsInShinyGoldBag -= 2 #remove 2 because of formula: 2 * 2**x - 1, and the shiny gold bag
return totalBagsInShinyGoldBag
print(FindTotalBagCount(lines))
|
{"/6.py": ["/utils.py"], "/16.py": ["/utils.py"], "/13.py": ["/utils.py"], "/5.py": ["/utils.py"], "/10.py": ["/utils.py"], "/12.py": ["/utils.py"], "/15.py": ["/utils.py"], "/3.py": ["/utils.py"], "/2.py": ["/utils.py"], "/11.py": ["/utils.py"], "/4.py": ["/utils.py"], "/9.py": ["/utils.py"], "/14.py": ["/utils.py"], "/7.py": ["/utils.py"], "/8.py": ["/utils.py"], "/1.py": ["/utils.py"]}
|
39,718
|
Rallstad/adventofcode_2020
|
refs/heads/main
|
/8.py
|
from utils import readFile
import operator
ops = { "+": operator.add, "-": operator.sub } # legger til string operators som operators
lines = readFile('inputs/inputTask8.txt')
def accumulateAcc(lines):
acc = 0
jmp = 1
been = []
index = 0
repeated = False
while not repeated:
if index in been:
repeated = 1
break
else:
been.append(index)
if "jmp" in lines[index]:
if "+" in lines[index]:
jmp = int(lines[index].split("+",1)[1])
index = index + jmp
elif "-" in lines[index]:
jmp = int(lines[index].split("-",1)[1])
index = index - jmp
elif "acc" in lines[index]:
if "+" in lines[index]:
num = int(lines[index].split("+",1)[1])
acc = ops["+"](acc,num)
elif "-" in lines[index]:
num = int(lines[index].split("-",1)[1])
acc = ops["-"](acc,num)
index = index + 1
elif "nop" in lines[index]:
index = index + 1
return acc
def terminateCorrectly(lines):
count = -1
for line in lines:
if count != len(lines):
count = count + 1
else:
print("no valid results..")
return 0
acc = 0
jmp = 0
been = []
index = 0
repeated = False
#change jmp and nop line for line in lines
if "jmp" in lines[count]:
lines[count] = lines[count].replace("jmp", "nop")
elif "nop" in lines[count]:
lines[count] = lines[count].replace("nop", "jmp")
while not repeated:
if index in been:
repeated = 1
break
else:
been.append(index)
if "jmp" in lines[index]:
if "+" in lines[index]:
jmp = int(lines[index].split("+",1)[1])
index = index + jmp
elif "-" in lines[index]:
jmp = int(lines[index].split("-",1)[1])
jmp = (-jmp)
index = index + jmp
elif "acc" in lines[index]:
if "+" in lines[index]:
num = int(lines[index].split("+",1)[1])
acc = ops["+"](acc,num)
elif "-" in lines[index]:
num = int(lines[index].split("-",1)[1])
acc = ops["-"](acc,num)
index = index + 1
elif "nop" in lines[index]:
index = index + 1
if index == len(lines) -1:
print("done")
return acc
#change back the jmp and nop lines
if "jmp" in lines[count]:
lines[count] = lines[count].replace("jmp", "nop")
elif "nop" in lines[count]:
lines[count] = lines[count].replace("nop", "jmp")
#print(terminateCorrectly(lines))
print(accumulateAcc(lines))
|
{"/6.py": ["/utils.py"], "/16.py": ["/utils.py"], "/13.py": ["/utils.py"], "/5.py": ["/utils.py"], "/10.py": ["/utils.py"], "/12.py": ["/utils.py"], "/15.py": ["/utils.py"], "/3.py": ["/utils.py"], "/2.py": ["/utils.py"], "/11.py": ["/utils.py"], "/4.py": ["/utils.py"], "/9.py": ["/utils.py"], "/14.py": ["/utils.py"], "/7.py": ["/utils.py"], "/8.py": ["/utils.py"], "/1.py": ["/utils.py"]}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.