id
stringlengths 3
8
| content
stringlengths 100
981k
|
|---|---|
11500580
|
from helium._impl.util.dictionary import inverse
from unittest import TestCase
class InverseTest(TestCase):
def test_inverse_empty(self):
self.assertEqual({}, inverse({}))
def test_inverse(self):
names_for_ints = {
0: {"zero", "naught"},
1: {"one"}
}
ints_for_names = {
"zero": {0},
"naught" : {0},
"one": {1}
}
self.assertEqual(ints_for_names, inverse(names_for_ints))
|
11500635
|
from setuptools import setup
setup(
name='git_root',
version='0.1',
description='Find the root of your git repo',
url='https://github.com/jtilly/git_root',
author='<NAME>',
author_email='<EMAIL>',
packages=['git_root'],
)
|
11500644
|
import sys
from misc.is_number import is_number
# Validates the background file:
def background_validation(bg_parameter, global_variables):
# required inputs
background_file_path = None
# gets the sub-parameters
sub_params_list = bg_parameter.split(",")
# checks the sub params
for sub_param in sub_params_list:
# Tests if there are two parts to the sub-parameter
if len(sub_param.split("=")) != 2:
print >> sys.stderr, "Error: the background parameter is not in a valid format."
sys.exit(1)
# Tests the file sub-parameter
if sub_param.upper().startswith("file=".upper()):
background_file_path = sub_param.split("=")[1]
# Tests if the background file can be opened:
try:
background_file = open(background_file_path).readlines()
except:
print >> sys.stderr, "Error: the background file: \"" + str(background_file_path) + "\" cannot be opened."
sys.exit(1)
line_counter = 1
gene_IDs_dict = {}
for line in background_file:
line_split = line.rstrip().split("\t")
# Validates the header line
if line_counter == 1:
accepted_col_headers = {"ID":True, "SYMBOL":True, "BIOTYPE":True, "CHROMOSOME":True, "START":True, "STOP":True}
header_dict = {}
for index in range(0,len(line_split)):
header_dict[line_split[index].upper()] = index
if line_split[index].upper() not in accepted_col_headers:
print >> sys.stderr, "Error: the background file column header: " + line_split[index].upper() + " is not an accepted column header, e.g. " + "\t\t".join(accepted_col_headers.keys())
sys.exit(1)
if "ID" not in header_dict:
print >> sys.stderr, "Error: there must be a column called \"ID\" in the background file."
sys.exit(1)
#Sets up the types of background information in the global variables
if "SYMBOL" in header_dict:
global_variables["GENE_SYMBOL_FLAG"] = True
if "BIOTYPE" in header_dict:
global_variables["GENE_BIOTYPE_FLAG"] = True
if "CHROMOSOME" in header_dict:
global_variables["GENE_CHROMOSOME_FLAG"] = True
if "START" in header_dict:
global_variables["GENE_START_FLAG"] = True
if "STOP" in header_dict:
global_variables["GENE_STOP_FLAG"] = True
if global_variables["GENE_CHROMOSOME_FLAG"] and global_variables["GENE_START_FLAG"] and global_variables["GENE_STOP_FLAG"]:
global_variables["GENE_COORDINATES_FLAG"] = True
# Validates the genes
else:
if line_split[header_dict["ID"]] in gene_IDs_dict:
print >> sys.stderr, "Error: line " + str(line_counter) + " of the background file has a duplicate gene ID. Gene IDs MUST be unique."
sys.exit(1)
gene_IDs_dict[line_split[header_dict["ID"]]] =True
if len(line_split) != len(header_dict):
print >> sys.stderr, "Error: line " + str(line_counter) + " of the background file has more columns than the header line."
sys.exit(1)
if global_variables["GENE_START_FLAG"]:
if not is_number(line_split[header_dict["START"]]):
print >> sys.stderr, "Error: line " + str(line_counter) + " of the background file has a start coordinate that is not a number."
sys.exit(1)
if global_variables["GENE_STOP_FLAG"]:
if not is_number(line_split[header_dict["STOP"]]):
print >> sys.stderr, "Error: line " + str(line_counter) + " of the background file has a stop coordinate that is not a number."
sys.exit(1)
line_counter += 1
# tests if the required inputs have been supplied
if background_file_path == None:
print >> sys.stderr, "Error: the background parameter is not in a valid format."
sys.exit(1)
print "validated the background parameter"
return global_variables
|
11500649
|
class RadarrTooManyVariablesException(Exception):
pass
class RadarrInvalidIdSupplied(Exception):
pass
class RadarrInvalidApiKey(Exception):
pass
class RadarrMovieNotFound(Exception):
pass
class RadarrValidationException(Exception):
pass
|
11500697
|
from mysqlsh import mysqlx
mySession
try:
# Connect to server on localhost
mySession = mysqlx.get_session( {
'host': 'localhost', 'port': 33060,
'user': 'mike', 'password': '<PASSWORD>' } )
except Exception as err:
print('The database session could not be opened: %s' % str(err))
try:
myDb = mySession.get_schema('test')
# Use the collection 'my_collection'
myColl = myDb.get_collection('my_collection')
# Find a document
myDoc = myColl.find('name like :param').limit(1).bind('param','S%').execute()
# Print document
print(myDoc.first())
except Exception as err:
print('The following error occurred: %s' % str(err))
finally:
# Close the session in any case
mySession.close()
|
11500702
|
import re
from bs4 import BeautifulSoup
MARKDOWN_CODE_BLOCK = re.compile("```(.*?)```", re.S)
def strip_markdown_code(s):
m = MARKDOWN_CODE_BLOCK.search(s)
if m:
return m.group(1)
return s
def strip_html(s):
return BeautifulSoup(s, "html.parser").get_text()
|
11500735
|
from __future__ import absolute_import, print_function, division#, unicode_literals
import numpy as np
import theano
import theano.tensor as T
from theano.ifelse import ifelse
def nonlinearity(input, x, y, length):
"""
Apply a pointwise nonlinearity to input
The nonlinearity is a picewise linear function.
The graph of the function is given by the vectors x and y.
"""
parts = []
for i in range(length-1):
x1 = x[i]
x2 = x[i+1]
y1 = y[i]
y2 = y[i+1]
#print x1.tag
part = (y2-y1)/(x2-x1)*(theano.tensor.clip(input, x1, x2)-x1)
parts.append(part)
output = y[0]
for part in parts:
output = output + part
return output
def gaussian_filter(input, sigma, window_radius = 40):
"""
Filter input with a Gaussian using mode `nearest`.
input is expected to be three dimensional of type n times x times y
"""
# Construction of 1d kernel
#filter_1d = T.arange(-window_radius, window_radius+1)
# Work around some strange theano bug
filter_1d = T.arange(2*window_radius + 1) - window_radius
filter_1d = T.exp(-0.5*filter_1d**2/sigma**2)
filter_1d = filter_1d / filter_1d.sum()
filter_1d = filter_1d.astype(input.dtype)
W = filter_1d.dimshuffle([0, 'x'])
W2 = filter_1d.dimshuffle(['x', 0])
blur_input = input.dimshuffle(['x', 0, 1, 2])
filter_W = W.dimshuffle(['x', 'x', 0, 1])
filter_W2 = W2.dimshuffle(['x', 'x', 0, 1])
# Construction of filter pipeline
blur_input_start = blur_input[:, :, :1, :]
blur_input_end = blur_input[:, :, -1:, :]
padded_input = T.concatenate([blur_input_start]*window_radius+[blur_input]+[blur_input_end]*window_radius, axis=2)
blur_op = T.nnet.conv2d(padded_input, filter_W, border_mode='valid', filter_shape=[1, 1, None, None])
#x_min = (W.shape[1]-1)//2
#x_max = input.shape[2]+(W.shape[1]-1)//2
#y_min = (W.shape[0]-1)//2+window_radius
#y_max = input.shape[1]+(W.shape[0]-1)//2+window_radius
#cropped_output1 = blur_op[:, :, y_min:y_max, x_min:x_max]
#cropped_output1_start = blur_op[:, :, y_min:y_max, x_min:x_min+1]
#cropped_output1_end = blur_op[:, :, y_min:y_max, x_max-1:x_max]
cropped_output1 = blur_op
cropped_output1_start = blur_op[:, :, :, :1]
cropped_output1_end = blur_op[:, :, :, -1:]
padded_cropped_input = T.concatenate([cropped_output1_start]*window_radius
+ [cropped_output1]
+ [cropped_output1_end] * window_radius, axis=3)
blur_op2 = T.nnet.conv2d(padded_cropped_input, filter_W2, border_mode='valid', filter_shape=[1, 1, None, None])
#x_min2 = (W2.shape[1]-1)//2+window_radius
#x_max2 = input.shape[2]+(W2.shape[1]-1)//2+window_radius
#y_min2 = (W2.shape[0]-1)//2
#y_max2 = input.shape[1]+(W2.shape[0]-1)//2
cropped_output2 = blur_op2[0, :, :, :] # [0, :, y_min2:y_max2, x_min2:x_max2]
return cropped_output2
class Blur(object):
def __init__(self, input, sigma=20.0, window_radius=60):
self.input = input
self.sigma = theano.shared(value=np.array(sigma, dtype=theano.config.floatX), name='sigma')
apply_blur = T.gt(self.sigma, 0.0)
no_blur = T.le(self.sigma, 0.0)
self.output = ifelse(no_blur, input, gaussian_filter(input.dimshuffle('x', 0, 1), self.sigma, window_radius)[0, :, :])
self.params = [self.sigma]
class Nonlinearity(object):
def __init__(self, input, nonlinearity_ys = None):
self.input = input
#self.num_nonlinearity = num_nonlinearity
if nonlinearity_ys is None:
nonlinearity_ys = np.linspace(0, 1, num=20)
nonlinearity_ys = nonlinearity_ys.astype(theano.config.floatX)
self.nonlinearity_xs = theano.shared(value=np.linspace(0, 1, len(nonlinearity_ys)).astype(theano.config.floatX), name='nonlinearity_xs')
self.nonlinearity_ys = theano.shared(value=nonlinearity_ys, name='nonlinearity_ys')
self.output = nonlinearity(input, self.nonlinearity_xs, self.nonlinearity_ys, len(nonlinearity_ys))
self.params = [self.nonlinearity_ys]
class LogNonlinearity(object):
def __init__(self, input, nonlinearity_ys = None):
self.input = input
#self.num_nonlinearity = num_nonlinearity
if nonlinearity_ys is None:
nonlinearity_ys = np.linspace(0, 1, num=20)
nonlinearity_ys = nonlinearity_ys.astype(theano.config.floatX)
self.nonlinearity_xs = theano.shared(value=np.linspace(0, 1, len(nonlinearity_ys)).astype(theano.config.floatX), name='nonlinearity_xs')
self.nonlinearity_ys = theano.shared(value=nonlinearity_ys, name='nonlinearity_ys')
self.output = nonlinearity(input, self.nonlinearity_xs, T.exp(self.nonlinearity_ys), len(nonlinearity_ys))
self.params = [self.nonlinearity_ys]
class CenterBias(object):
def __init__(self, input, centerbias = None, alpha=1.0):
self.input = input
if centerbias is None:
centerbias = np.ones(12)
self.alpha = theano.shared(value = np.array(alpha).astype(theano.config.floatX), name='alpha')
self.centerbias_ys = theano.shared(value=np.array(centerbias, dtype=theano.config.floatX), name='centerbias_ys')
self.centerbias_xs = theano.shared(value=np.linspace(0, 1, len(centerbias), dtype=theano.config.floatX), name='centerbias_xs')
height = T.cast(input.shape[0], theano.config.floatX)
width = T.cast(input.shape[1], theano.config.floatX)
x_coords = (T.arange(width) - 0.5*width) / (0.5*width)
y_coords = (T.arange(height) - 0.5*height) / (0.5*height) + 0.0001 # We cannot have zeros in there because of grad
x_coords = x_coords.dimshuffle('x', 0)
y_coords = y_coords.dimshuffle(0, 'x')
dists = T.sqrt(T.square(x_coords) + self.alpha*T.square(y_coords))
self.max_dist = T.sqrt(1 + self.alpha)
self.dists = dists/self.max_dist
self.factors = nonlinearity(self.dists, self.centerbias_xs, self.centerbias_ys, len(centerbias))
apply_centerbias = T.gt(self.centerbias_ys.shape[0], 2)
self.output = ifelse(apply_centerbias, self.input*self.factors, self.input)
self.params = [self.centerbias_ys, self.alpha]
class AdditiveCenterBias(object):
def __init__(self, input, centerbias = None, alpha=1.0):
self.input = input
if centerbias is None:
centerbias = np.ones(12)
self.alpha = theano.shared(value = np.array(alpha).astype(theano.config.floatX), name='alpha')
self.centerbias_ys = theano.shared(value=np.array(centerbias, dtype=theano.config.floatX), name='centerbias_ys')
self.centerbias_xs = theano.shared(value=np.linspace(0, 1, len(centerbias), dtype=theano.config.floatX), name='centerbias_xs')
height = T.cast(input.shape[0], theano.config.floatX)
width = T.cast(input.shape[1], theano.config.floatX)
x_coords = (T.arange(width) - 0.5*width) / (0.5*width)
y_coords = (T.arange(height) - 0.5*height) / (0.5*height) + 0.0001 # We cannot have zeros in there because of grad
x_coords = x_coords.dimshuffle('x', 0)
y_coords = y_coords.dimshuffle(0, 'x')
dists = T.sqrt(T.square(x_coords) + self.alpha*T.square(y_coords))
self.max_dist = T.sqrt(1 + self.alpha)
self.dists = dists/self.max_dist
self.factors = nonlinearity(self.dists, self.centerbias_xs, self.centerbias_ys, len(centerbias))
apply_centerbias = T.gt(self.centerbias_ys.shape[0], 2)
self.output = ifelse(apply_centerbias, self.input+self.factors, self.input)
self.params = [self.centerbias_ys, self.alpha]
class LogDensity(object):
def __init__(self, input):
self.input = input
self.output = T.log(input / input.sum())
class LogDensityFromLogarithmicScale(object):
def __init__(self, input):
self.input = input
self.output = input - T.log(T.exp(input).sum())
class AverageLogLikelihood(object):
def __init__(self, log_densities, x_inds, y_inds):
self.log_densities = log_densities
self.log_likelihoods = log_densities[y_inds, x_inds]
self.average_log_likelihood = self.log_likelihoods.mean()
class SaliencyMapProcessing(object):
def __init__(self, saliency_map, x_inds = None, y_inds = None,
sigma = 0.0, window_radius = 80, nonlinearity_ys = None, centerbias = None, alpha = 1.0):
self.saliency_map = saliency_map
if x_inds is None:
x_inds = T.lvector('x_inds')
if y_inds is None:
y_inds = T.lvector('y_inds')
class TheanoObjects(object):
pass
self.theano_objects = TheanoObjects()
self.x_inds = x_inds
self.y_inds = y_inds
self.theano_objects.blur = Blur(saliency_map, sigma=sigma, window_radius=window_radius)
self.blur = self.theano_objects.blur.output
self.theano_objects.nonlinearity = Nonlinearity(self.blur, nonlinearity_ys=nonlinearity_ys)
self.nonlinearity = self.theano_objects.nonlinearity.output
self.theano_objects.centerbias = CenterBias(self.nonlinearity, centerbias=centerbias, alpha=alpha)
self.centerbias = self.theano_objects.centerbias.output
self.theano_objects.log_density = LogDensity(self.centerbias)
self.log_density = self.theano_objects.log_density.output
self.theano_objects.average_log_likelihood = AverageLogLikelihood(self.log_density, self.x_inds, self.y_inds)
self.average_log_likelihood = self.theano_objects.average_log_likelihood.average_log_likelihood
self.params = self.theano_objects.blur.params + self.theano_objects.nonlinearity.params + self.theano_objects.centerbias.params
self.blur_radius = self.theano_objects.blur.sigma
self.nonlinearity_ys = self.theano_objects.nonlinearity.nonlinearity_ys
self.centerbias_ys = self.theano_objects.centerbias.centerbias_ys
self.alpha = self.theano_objects.centerbias.alpha
class SaliencyMapProcessingLogNonlinearity(object):
def __init__(self, saliency_map, x_inds = None, y_inds = None,
sigma = 0.0, window_radius = 80, nonlinearity_ys = None, centerbias = None, alpha = 1.0):
self.saliency_map = saliency_map
if x_inds is None:
x_inds = T.lvector('x_inds')
if y_inds is None:
y_inds = T.lvector('y_inds')
class TheanoObjects(object):
pass
self.theano_objects = TheanoObjects()
self.x_inds = x_inds
self.y_inds = y_inds
self.theano_objects.blur = Blur(saliency_map, sigma=sigma, window_radius=window_radius)
self.blur = self.theano_objects.blur.output
self.theano_objects.nonlinearity = LogNonlinearity(self.blur, nonlinearity_ys=nonlinearity_ys)
self.nonlinearity = self.theano_objects.nonlinearity.output
self.theano_objects.centerbias = CenterBias(self.nonlinearity, centerbias=centerbias, alpha=alpha)
self.centerbias = self.theano_objects.centerbias.output
self.theano_objects.log_density = LogDensity(self.centerbias)
self.log_density = self.theano_objects.log_density.output
self.theano_objects.average_log_likelihood = AverageLogLikelihood(self.log_density, self.x_inds, self.y_inds)
self.average_log_likelihood = self.theano_objects.average_log_likelihood.average_log_likelihood
self.params = self.theano_objects.blur.params + self.theano_objects.nonlinearity.params + self.theano_objects.centerbias.params
self.blur_radius = self.theano_objects.blur.sigma
self.nonlinearity_ys = self.theano_objects.nonlinearity.nonlinearity_ys
self.centerbias_ys = self.theano_objects.centerbias.centerbias_ys
self.alpha = self.theano_objects.centerbias.alpha
class SaliencyMapProcessingLogarithmic(object):
def __init__(self, saliency_map, x_inds = None, y_inds = None,
sigma = 0.0, window_radius = 80, nonlinearity_ys = None, centerbias = None, alpha = 1.0):
self.saliency_map = saliency_map
if x_inds is None:
x_inds = T.lvector('x_inds')
if y_inds is None:
y_inds = T.lvector('y_inds')
class TheanoObjects(object):
pass
self.theano_objects = TheanoObjects()
self.x_inds = x_inds
self.y_inds = y_inds
self.theano_objects.blur = Blur(saliency_map, sigma=sigma, window_radius=window_radius)
self.blur = self.theano_objects.blur.output
self.theano_objects.nonlinearity = Nonlinearity(self.blur, nonlinearity_ys=nonlinearity_ys)
self.nonlinearity = self.theano_objects.nonlinearity.output
self.theano_objects.centerbias = AdditiveCenterBias(self.nonlinearity, centerbias=centerbias, alpha=alpha)
self.centerbias = self.theano_objects.centerbias.output
self.theano_objects.log_density = LogDensityFromLogarithmicScale(self.centerbias)
self.log_density = self.theano_objects.log_density.output
self.theano_objects.average_log_likelihood = AverageLogLikelihood(self.log_density, self.x_inds, self.y_inds)
self.average_log_likelihood = self.theano_objects.average_log_likelihood.average_log_likelihood
self.params = self.theano_objects.blur.params + self.theano_objects.nonlinearity.params + self.theano_objects.centerbias.params
self.blur_radius = self.theano_objects.blur.sigma
self.nonlinearity_ys = self.theano_objects.nonlinearity.nonlinearity_ys
self.centerbias_ys = self.theano_objects.centerbias.centerbias_ys
self.alpha = self.theano_objects.centerbias.alpha
|
11500752
|
import numpy as np
import torch
import torch.nn.functional as F
class LMProb:
def __init__(self, model_path):
self.model = torch.load(open(model_path, "rb"), map_location={"cuda:0": "cpu"})
self.model = self.model.cpu()
self.model.eval()
def get_prob(self, nums, verbose=False):
with torch.no_grad():
inp = torch.tensor([int(nums[0])]).long().unsqueeze(0)
hidden = self.model.init_hidden(bsz=1)
log_probs = []
for i in range(1, len(nums)):
output, hidden = self.model(inp, hidden)
# word_weights = output.squeeze().data.double().exp()
# prob = word_weights[nums[i]] / word_weights.sum()
probs = F.softmax(output.squeeze(), dim=-1)
prob = probs[nums[i]]
# append current log prob
log_probs += [torch.log(prob)]
inp.data.fill_(int(nums[i]))
if verbose:
for i in range(len(log_probs)):
print(
f"{nums[i+1]:4d}: P(w|s) = {np.exp(log_probs[i]):8.4f} | logP(w|s) = {log_probs[i]:8.4f}"
)
print(f"=> sum_prob = {sum(log_probs):.4f}")
return sum(log_probs) / len(log_probs)
|
11500827
|
import pandas as pd
from pandas.tseries.holiday import USFederalHolidayCalendar as calendar
from pandas.tseries.offsets import CustomBusinessDay
import numpy as np
import datetime
def make_baseline(x_days, pivot, name="Temperature", freq="15min"):
baseline=pivot[pivot.index.isin(x_days)].mean(axis=0)
baseline_df=baseline.to_frame(name)
return baseline_df
def create_timeseries(df, event_index):
col=[]
df.columns=['demand']
for i in df.index:
hours=int(i)//1
minutes=(i%1)*60
#col.append(event_day+pd.Timedelta(hours=hours, minutes=minutes))
col.append(pd.Timestamp(event_index+' 00:00:00')+pd.Timedelta(hours=hours, minutes=minutes))
df["Time"]=col
adj_df=df.set_index(["Time"])
df=adj_df[adj_df.columns[0]]
return df
def select_demand(data): #removed _
demand = data.filter(regex="demand")
return demand
def create_pivot(data, freq="15min"): #removed _
if freq=="15min": # we are using 15 minute intervals so we can accurately calculate cost
data["date"] = data.index.date
data["combined"]=data.index.hour+(data.index.minute*(1.0/60.0))
data_multi=data.set_index(["date","combined"])
data_multi=data_multi[~data_multi.index.duplicated(keep='last')]
data_pivot = data_multi.unstack()
# remove double index
data_pivot.columns = data_pivot.columns.droplevel(0)
elif freq=="1h":
# add date and hour for new index
data["date"] = data.index.date
data["hour"] = data.index.hour
data_multi=data.set_index(["date","hour"])
data_multi=data_multi[~data_multi.index.duplicated(keep='last')]
# create pivot
data_pivot = data_multi.unstack()
# remove double index
data_pivot.columns = data_pivot.columns.droplevel(0)
return data_pivot
def _remove_event_day(data, event_day, PDP_dates): #removes all event days specified in the _PDP list above
try:
#data = data[~(data.index.date == event_index.date())]
data = data[~(data.index.date == event_day)]
for i in PDP_dates:
data=data[~(data.index.date == i)]
return data
except Exception as e:
print(e)
print("error in _remove_event_day")
return data
def _remove_WE_holidays_NaN(data):
no_WE = ~((data.index.weekday == 5) | (data.index.weekday == 6)) # remove if WE
cal = calendar()
start = datetime.datetime.strftime(data.index.min(),"%Y-%m-%d")
end =datetime.datetime.strftime(data.index.max(),"%Y-%m-%d")
hol_cal = cal.holidays(start=start, end=end)
no_hol = ~data.index.isin(hol_cal) # remove if it is a national holiday
no_NaN = ~data.isna().all(axis=1) # remove if has any NaN for any hour
return data[no_WE & no_hol & no_NaN]
def _get_last_Y_days(data, event_index, Y):
assert data.shape[0] >= Y, "not enough data for {} days".format(Y)
try:
start=data.index[0]
data=data[start:event_index] #test this
data = data.sort_index(ascending=False).iloc[0:Y,:]
return data
except Exception as e:
print(e)
print("data available only for {} days".format(data.shape[0]))
return data
def _get_X_in_Y(data, power_data, X=None, event_start_h=14, event_end_h=18, weather_event_data=None, include_last=False, weather_mapping=False, weather_data=None, method='max', ):
#choses the highest X days out of Y days (if weather_mapping is true, it choses the days with the highest OAT values)
if not X:
X=power_data.shape[0]
cols = np.arange(event_start_h, event_end_h+include_last*1)
if weather_mapping==True:
if method=='proximity': #chooses x days based on how close the weather is
rows=np.shape(weather_data)[0]
weather_event_day=weather_event_data
for i in range(rows-1):
weather_event_data=weather_event_data.append(weather_event_day, ignore_index=True)
weather_event_data=weather_event_data[cols]
weather_event_data.index=weather_data[cols].index
x_days=abs(weather_event_data-weather_data[cols]).sum(axis=1).sort_values(ascending=True)[0:X].index
else:
x_days=weather_data[cols].sum(axis=1).sort_values(ascending=False)[0:X].index
else:
x_days = power_data[cols].sum(axis=1).sort_values(ascending=False)[0:X].index
return data[data.index.isin(x_days)], x_days
def _get_adj_ratio(data,
event_data,
event_start_h=14,
min_ratio=1.0,
max_ratio=1.3):
# this is hardcoded, we may want to do it in a more flexible way
# strategy: 4 hours before the event, take the first 3 and average them
pre_event_period_start = event_start_h - 4
try:
ratio = event_data.iloc[:,(pre_event_period_start*4):(event_start_h-1)*4].mean().mean()/data.iloc[:,(pre_event_period_start*4):(event_start_h-1)*4].mean().mean()
# print(ratio)
except:
ratio=1
print('Error in calculating ratios')
#If you want to implement maximum and minimum restrictions uncomment lines below!
if ratio < min_ratio:
ratio=min_ratio
if ratio > max_ratio:
ratio=max_ratio
if np.isnan(ratio):
ratio=1
return ratio
"""
if method='proximity' (and weather-mapping=true), then it chooses the X days that are closest to the weather in the event day,
if method='max' it chooses the hottest x days out of y days.
"""
def get_X_in_Y_baseline(data, weather_pivot, event_day,PDP_dates,
event_index,
X=3,
Y=10,
event_start_h=12,
event_end_h=18,
include_last=False,
adj_ratio=True,
min_ratio=1.0,
max_ratio=1.3,
sampling="quarterly", weather_mapping=False, method='max'):
event_data= data[data.index.date == event_day]
data = _remove_event_day(data, event_index,PDP_dates)
data = _remove_WE_holidays_NaN(data)
weather_event_data=weather_pivot[weather_pivot.index.date == event_day]
weather_data=_remove_event_day(weather_pivot, event_index, PDP_dates)
weather_data = _remove_WE_holidays_NaN(weather_data)
data_y =_get_last_Y_days(data, event_index, Y)
days=data_y.index
weather_data=_get_last_Y_days(weather_data, event_index, Y)
data_x, x_days = _get_X_in_Y(data, power_data=data_y,
X=X,
event_start_h=event_start_h,
event_end_h=event_end_h,
weather_event_data=weather_event_data,
include_last=include_last, weather_mapping=weather_mapping, weather_data=weather_data, method=method)
if adj_ratio:
ratio = _get_adj_ratio(data_x, event_data,
event_start_h=event_start_h,
min_ratio=min_ratio,
max_ratio=max_ratio)
else:
ratio = 1
data_x = (data_x.mean()*ratio).to_frame() # baseline is the average of the days selected
data_x.columns = ["baseline"]
return data_x, days, event_data.T, x_days, ratio
def parse_date(date):
date=str(date)
yyyy=date[0:4]
mm=date[5:7]
dd=date[8:10]
return(int(yyyy),int(mm),int(dd))
def calculate_rmse(demand_baseline, event_index):
demand_pivot.fillna(method='bfill',inplace=True) # TODO find a better solution
RMSE=np.sqrt(mean_squared_error(demand_baseline,demand_pivot[demand_pivot.index==event_index].T))
return RMSE
def mape_vectorized_v2(a, b):
mask = a != 0
return (np.fabs(a - b)/a)[mask].mean()
|
11500840
|
import os
import teek
try:
# examples/soup.py does bs4.BeautifulSoup(html_string, 'lxml')
import bs4 # noqa
import lxml # noqa
soup_py_can_run = True
except ImportError:
soup_py_can_run = False
EXAMPLES_DIR = os.path.join(
os.path.dirname(os.path.dirname(os.path.abspath(__file__))),
'examples')
# magic ftw
# TODO: this doesn't work with pytest-xdist and pythons that don't have
# ordered dict, i have no idea why and i don't know how to fix it
def _create_test_function(filename):
if filename == 'soup.py' and not soup_py_can_run:
return
with open(os.path.join(EXAMPLES_DIR, filename), 'r') as file:
code = file.read()
def func(monkeypatch, handy_callback):
@handy_callback
def fake_run():
pass
with monkeypatch.context() as monkey:
monkey.setattr(teek, 'run', fake_run)
exec(code, {'__file__': os.path.join(EXAMPLES_DIR, filename)})
assert fake_run.ran_once()
# make sure that nothing breaks if the real .run() is called
teek.update()
teek.after_idle(teek.quit)
teek.run()
func.__name__ = func.__qualname__ = 'test_' + filename.replace('.', '_')
globals()[func.__name__] = func
for filename in sorted(os.listdir(EXAMPLES_DIR)):
if filename.endswith('.py') and not filename.startswith('_'):
_create_test_function(filename)
|
11500848
|
from .component import Component
# trauma component is used for screenshake
class TraumaComponent(Component):
def init(self):
self.key = 'trauma'
self._traumaLevel = 0
self.maxTrauma = 1
self.traumaDecrement = 0.01
def reset(self):
self.traumaLevel = 0
# trauma level property
@property
def traumaLevel(self):
return self._traumaLevel
# clamps value between 0 and 1
@traumaLevel.setter
def traumaLevel(self, value):
self._traumaLevel = min(1, max(0, value))
|
11500863
|
from django.db import models
import reversion
from lims.shared.models import Organism
@reversion.register()
class CodonUsageTable(models.Model):
species = models.ForeignKey(Organism)
class Meta:
ordering = ['-id']
def __str__(self):
return self.species.name
@reversion.register()
class CodonUsage(models.Model):
name = models.CharField(max_length=3)
value = models.FloatField()
table = models.ForeignKey(CodonUsageTable, related_name='codons')
class Meta:
ordering = ['-id']
def __str__(self):
return '{}/{}'.format(self.table, self.name)
|
11500868
|
import torch
import torch.nn as nn
import torch.nn.functional as F
from transformers import BertPreTrainedModel, BertModel, AutoConfig
from bojone_snippets import DataGenerator, sequence_padding
from bojone_tokenizers import Tokenizer
from configuration.config import *
from opt import create_optimizer_and_scheduler
from utils import l2_normalize, compute_corrcoef
batch_size = 64
maxlen = 64
task_name = "LCQMC"
epochs = 1
gradient_accumulation_steps = 1
# 加载数据
def load_data(data_path):
D = []
for line in data_path.open():
text1, text2, label = line.strip().split("\t")
D.append((text1, text2, float(label)))
return D
# 加载分词器
dict_path = str(robert_wwm_pt_path / "vocab.txt")
tokenizer = Tokenizer(dict_path, do_lower_case=True)
class data_generator(DataGenerator):
"""训练语料生成器
"""
def __iter__(self, random=False):
batch_token_ids, batch_segment_ids = [], []
for is_end, text, in self.sample(random):
token_ids, _ = tokenizer.encode(text, maxlen=maxlen)
batch_token_ids.append(token_ids)
if "mode" in self.kwargs and self.kwargs["mode"] == "train":
batch_token_ids.append(token_ids)
batch_segment_ids.append([1] * len(token_ids))
batch_segment_ids.append([1] * len(token_ids))
if len(batch_token_ids) == self.batch_size * 2 or is_end:
batch_token_ids = torch.tensor(sequence_padding(batch_token_ids), dtype=torch.long)
batch_segment_ids = torch.tensor(sequence_padding(batch_segment_ids), dtype=torch.long)
yield batch_token_ids, batch_segment_ids
batch_token_ids, batch_segment_ids = [], []
class EncodingModel(BertPreTrainedModel):
def __init__(self, config):
super(EncodingModel, self).__init__(config)
self.bert = BertModel(config)
def forward(self, input_ids, attention_mask, encoder_type="fist-last-avg"):
"""
:param input_ids:
:param attention_mask:
:param encoder_type: "first-last-avg", "last-avg", "cls", "pooler(cls + dense)"
:return:
"""
output = self.bert(input_ids, attention_mask, output_hidden_states=True)
if encoder_type == "fist-last-avg":
first = output.hidden_states[1] # hidden_states列表有13个hidden_state,第一个其实是embeddings,第二个元素才是第一层的hidden_state
last = output.hidden_states[-1]
seq_length = first.size(1)
first_avg = torch.avg_pool1d(first.transpose(1, 2), kernel_size=seq_length).squeeze(-1) # [b,d]
last_avg = torch.avg_pool1d(last.transpose(1, 2), kernel_size=seq_length).squeeze(-1) # [b,d]
final_encoding = torch.avg_pool1d(torch.cat([first_avg.unsqueeze(1), last_avg.unsqueeze(1)], dim=1).transpose(1,2), kernel_size=2).squeeze(-1)
return final_encoding
if encoder_type == "last-avg":
sequence_output = output.last_hidden_state # [b,s,d]
seq_length = sequence_output.size(1)
final_encoding = torch.avg_pool1d(sequence_output.transpose(1,2), kernel_size=seq_length).squeeze(-1) # [b,d]
return final_encoding
if encoder_type == "cls":
sequence_output = output.last_hidden_state
cls = sequence_output[:, 0] # [b,d]
return cls
if encoder_type == "pooler":
pooler_output = output.pooler_output # [b,d]
return pooler_output
def convert_to_ids(data):
"""转换文本数据为id形式
"""
a_token_ids, b_token_ids, labels = [], [], []
for d in tqdm(data):
token_ids = tokenizer.encode(d[0], maxlen=maxlen)[0]
a_token_ids.append(token_ids)
token_ids = tokenizer.encode(d[1], maxlen=maxlen)[0]
b_token_ids.append(token_ids)
labels.append(d[2])
a_token_ids = sequence_padding(a_token_ids)
b_token_ids = sequence_padding(b_token_ids)
return a_token_ids, b_token_ids, labels
def split_data(dat):
a_texts, b_texts, labels = [],[],[],
for d in tqdm(dat):
a_texts.append(d[0])
b_texts.append(d[1])
labels.append(d[2])
return a_texts, b_texts, labels
datasets = {fn: load_data(open_dataset_path / task_name / f"{fn}.tsv") for fn in ["train", "dev", "test"]}
all_weights, all_texts, all_labels = [], [], []
train_texts = []
for name, data in datasets.items():
a_texts, b_texts, labels = split_data(data)
all_weights.append(len(data))
all_texts.append((a_texts, b_texts))
all_labels.append(labels)
train_texts.extend(a_texts)
train_texts.extend(b_texts)
np.random.shuffle(train_texts)
train_texts = train_texts[:10000]
train_generator = data_generator(train_texts, batch_size, mode="train")
# 计算loss
loss_func = nn.BCEWithLogitsLoss()
def simcse_loss(y_pred):
"""用于SimCSE训练的loss
"""
# 构造标签
idxs = torch.arange(0, y_pred.size(0)) # [b]
idxs_1 = idxs[None, :] # [1,b]
idxs_2 = (idxs + 1 - idxs % 2 * 2)[:, None] # [b,1]
y_true = idxs_1 == idxs_2
y_true = y_true.to(torch.float).to(device)
# 计算相似度
y_pred = F.normalize(y_pred, dim=1, p=2)
similarities = torch.matmul(y_pred, y_pred.transpose(0,1)) # [b,d] * [b.d] -> [b,1]
similarities = similarities - torch.eye(y_pred.size(0)).to(device) * 1e12
similarities = similarities * 20
loss = loss_func(similarities, y_true)
return loss
# 加载模型
config_path = robert_wwm_pt_path / "bert_config.json"
config = AutoConfig.from_pretrained(pretrained_model_name_or_path=config_path, hidden_dropout_prob=0.1)
model = EncodingModel.from_pretrained(robert_wwm_pt_path, config=config)
optimizer, scheduler = create_optimizer_and_scheduler(model=model, lr=1e-5, num_training_steps=train_generator.steps * epochs // gradient_accumulation_steps)
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
model.to(device)
# train
model.zero_grad()
for e in range(epochs):
model.train()
for step, batch in enumerate(train_generator):
# if step > 1: break
batch = [_.to(device) for _ in batch]
input_ids, seg_ids = batch
encoding_output = model(input_ids, seg_ids)
loss = simcse_loss(encoding_output)
loss.backward()
if step % gradient_accumulation_steps == 0 and step != 0:
torch.nn.utils.clip_grad_norm_(model.parameters(), max_norm=5.0)
optimizer.step()
optimizer.zero_grad()
if step % 100 == 0 and step != 0:
print(f"epoch: {e} - batch: {step}/{train_generator.steps} - loss: {loss}")
model.eval()
# 语料向量化
all_vecs = []
for a_texts, b_texts in all_texts:
a_text_generator = data_generator(a_texts, batch_size, mode="eval")
b_text_generator = data_generator(b_texts, batch_size, mode="eval")
all_a_vecs = []
for eval_batch in tqdm(a_text_generator):
eval_batch = [_.to(device) for _ in eval_batch]
with torch.no_grad():
eval_encodings = model(*eval_batch)
eval_encodings = eval_encodings.cpu().detach().numpy()
all_a_vecs.extend(eval_encodings)
all_b_vecs = []
for eval_batch in tqdm(b_text_generator):
eval_batch = [_.to(device) for _ in eval_batch]
with torch.no_grad():
eval_encodings = model(*eval_batch)
eval_encodings = eval_encodings.cpu().detach().numpy()
all_b_vecs.extend(eval_encodings)
all_vecs.append((np.array(all_a_vecs), np.array(all_b_vecs)))
# 标准化,相似度,相关系数
all_corrcoefs = []
for (a_vecs, b_vecs), labels in zip(all_vecs, all_labels):
a_vecs = l2_normalize(a_vecs)
b_vecs = l2_normalize(b_vecs)
sims = (a_vecs * b_vecs).sum(axis=1)
corrcoef = compute_corrcoef(labels, sims)
all_corrcoefs.append(corrcoef)
all_corrcoefs.extend([
np.average(all_corrcoefs),
np.average(all_corrcoefs, weights=all_weights)
])
print(all_corrcoefs)
|
11500880
|
from django.core.management.base import BaseCommand, CommandError
class Command(BaseCommand):
"""This command will persist icons from the collection named `myicons` into frontend files.
This collection contains the icons for this project itself. Normal user need not care about this.
"""
def handle(self, *args, **kwargs):
from iconcollections.models import Collection
try:
bs_collection = Collection.objects.get(build_name='myicons')
except Collection.DoesNotExist:
raise CommandError('Bootstraping icons collection not found')
import os
import zipfile
from StringIO import StringIO
from fontbuilder.serializers import CollectionSerializer
from fontbuilder.renderers import ZIPPackRenderer
path = os.path.abspath(os.path.join(os.path.dirname(__file__), '../../static'))
serializer = CollectionSerializer(bs_collection)
renderer = ZIPPackRenderer()
zipcontent = StringIO(renderer.render(serializer.data))
zipfileobj = zipfile.ZipFile(zipcontent)
namelist = filter(lambda n: n not in ('cheatsheet.html', 'css/myicons.css'), zipfileobj.namelist())
zipfileobj.extractall(path, namelist)
print 'File extracted to %s' % path
|
11500895
|
import collections
import copy
import math
def get_data_stats(examples):
"""Compute the IDF score for each word. Then compute the TF-IDF score."""
word_doc_freq = collections.defaultdict(int)
# Compute IDF
for i in range(len(examples)):
cur_word_dict = {}
text = examples[i].text
text = clean_web_text(" ".join(text)).split(" ")
cur_sent = copy.deepcopy(text)
for word in cur_sent:
cur_word_dict[word] = 1
for word in cur_word_dict:
word_doc_freq[word] += 1
idf = {}
for word in word_doc_freq:
idf[word] = math.log(len(examples) * 1.0 / word_doc_freq[word])
# Compute TF-IDF
tf_idf = {}
for i in range(len(examples)):
cur_word_dict = {}
text = examples[i].text
text = clean_web_text(" ".join(text)).split(" ")
cur_sent = copy.deepcopy(text)
# cur_sent = copy.deepcopy(examples[i].text)
for word in cur_sent:
if word not in tf_idf:
tf_idf[word] = 0
tf_idf[word] += 1.0 / len(cur_sent) * idf[word]
return {"idf": idf, "tf_idf": tf_idf}
def build_vocab(examples):
vocab = {}
def add_to_vocab(word_list):
for word in word_list:
if word not in vocab:
vocab[word] = len(vocab)
for i in range(len(examples)):
text = examples[i].text
text = clean_web_text(" ".join(text)).split(" ")
add_to_vocab(text)
# add_to_vocab(examples[i].text)
return vocab
def clean_web_text(st):
"""Clean text."""
st = st.replace("<br />", " ")
st = st.replace(""", '"')
st = st.replace("<p>", " ")
if "<a href=" in st:
while "<a href=" in st:
start_pos = st.find("<a href=")
end_pos = st.find(">", start_pos)
if end_pos != -1:
st = st[:start_pos] + st[end_pos + 1 :]
else:
print("incomplete href")
print("before", st)
st = st[:start_pos] + st[start_pos + len("<a href=")]
print("after", st)
st = st.replace("</a>", "")
# st = st.replace("\\n", " ")
# st = st.replace("\\", " ")
while " " in st:
st = st.replace(" ", " ")
return st
|
11500999
|
from django.core.management.base import BaseCommand, CommandError
from django.contrib.auth.models import User, Group
class Command(BaseCommand):
can_import_settings = True
def handle(self, *args, **options):
if len(args) < 1:
print 'Specify email address'
return
email = args[0]
users = User.objects.filter(email=email)
print email, ':',
for user in users:
print user.username,
print ''
|
11501101
|
from keras.models import Sequential
from keras.layers.core import TimeDistributedMerge, TimeDistributedDense, Dense, Dropout, Activation
from nyse import *
from nn import *
from keras.optimizers import SGD
# import theano
# theano.compile.mode.Mode(linker='py', optimizer='fast_compile')
class MLP:
def __init__(self, input_length, hidden_cnt, input_dim, output_dim):
self.input_dim = input_dim
self.output_dim = output_dim
self.input_length = input_length
self.hidden_cnt = hidden_cnt
self.model = self.__prepare_model()
def __prepare_model(self):
print('Build model...')
model = Sequential()
model.add(TimeDistributedDense(output_dim=self.hidden_cnt,
input_dim=self.input_dim,
input_length=self.input_length,
activation='sigmoid'))
model.add(TimeDistributedMerge(mode='ave'))
model.add(Dropout(0.5))
model.add(Dense(self.hidden_cnt, activation='tanh'))
model.add(Dense(self.output_dim, activation='softmax'))
# try using different optimizers and different optimizer configs
print('Compile model...')
sgd = SGD(lr=0.1, decay=1e-6, momentum=0.9, nesterov=True)
model.compile(loss='categorical_crossentropy', optimizer=sgd)
return model
def change_input_dim(self, input_dim):
self.input_dim = input_dim
self.model = self.__prepare_model()
def get_model(self):
return self.model
def main():
input_length = 100
hidden_cnt = 50
nn = NeuralNetwork(MLP(input_length, hidden_cnt))
data = get_test_data(input_length)
print("TRAIN")
nn.train(data)
print("TEST")
nn.test(data)
print("TRAIN WITH CROSS-VALIDATION")
nn.run_with_cross_validation(data, 2)
print("FEATURE SELECTION")
features = nn.feature_selection(data)
print("Selected features: {0}".format(features))
if __name__ == '__main__':
main()
|
11501104
|
import uuid as uuid
from django.conf import settings
from django.db import models
class Boolean(models.Model):
boolean = models.BooleanField('Bool value', default=False)
def __str__(self): return str(self.boolean)
class NullBoolean(models.Model):
null_boolean = models.NullBooleanField('Null bool value')
def __str__(self): return str(self.null_boolean)
class File(models.Model):
file = models.FileField()
def __str__(self): return self.file.path if self.file else None
class FilePath(models.Model):
filepath = models.FilePathField(path=settings.BASE_DIR)
def __str__(self): return self.filepath
class Float(models.Model):
float = models.FloatField()
def __str__(self): return str(self.float)
class Decimal(models.Model):
decimal = models.DecimalField(max_digits=5, decimal_places=2)
def __str__(self): return str(self.decimal)
class Integer(models.Model):
integer = models.IntegerField()
def __str__(self): return str(self.integer)
class BigInteger(models.Model):
big_integer = models.BigIntegerField()
def __str__(self): return str(self.big_integer)
class PositiveInteger(models.Model):
positive_integer = models.PositiveIntegerField()
def __str__(self): return str(self.positive_integer)
class SmallInteger(models.Model):
small_integer = models.SmallIntegerField()
def __str__(self): return str(self.small_integer)
class PositiveSmallInteger(models.Model):
positive_small_integer = models.PositiveSmallIntegerField()
def __str__(self): return str(self.positive_small_integer)
class OneToOneRelative(models.Model):
def __str__(self): return str(self.pk)
class OneToOne(models.Model):
one_to_one = models.OneToOneField(OneToOneRelative, models.CASCADE, related_name='one')
def __str__(self): return str(self.pk)
class ForeignKey(models.Model):
foreign_key = models.ForeignKey('app.Integer', on_delete=models.CASCADE)
def __str__(self): return str(self.pk)
class M2MDependency(models.Model):
pass
class ManyToMany(models.Model):
m2m = models.ManyToManyField('app.M2MDependency', blank=True)
name = models.CharField(max_length=20)
def __str__(self): return self.name
class GenericIPAddress(models.Model):
ip_address_v4 = models.GenericIPAddressField(protocol='ipv4')
ip_address_v6 = models.GenericIPAddressField(protocol='ipv6')
generic_ip_address = models.GenericIPAddressField()
def __str__(self): return self.generic_ip_address
class Char(models.Model):
char = models.CharField(max_length=100)
def __str__(self): return self.char
class Text(models.Model):
text = models.TextField()
def __str__(self): return self.text[:50]
class Uuid(models.Model):
uuid = models.UUIDField()
def __str__(self): return str(self.uuid)
class Slug(models.Model):
slug = models.SlugField()
def __str__(self): return self.slug
class DateTime(models.Model):
date_time = models.DateTimeField(null=True, blank=True)
date_time_auto = models.DateTimeField(auto_now_add=True)
def __str__(self): return str(self.date_time)
class Date(models.Model):
date = models.DateField()
def __str__(self): return str(self.date)
class Time(models.Model):
time = models.TimeField()
def __str__(self): return str(self.time)
class Duration(models.Model):
duration = models.DurationField()
def __str__(self): return str(self.duration)
class Binary(models.Model):
binary = models.BinaryField()
def __str__(self): return str(self.pk)
class AllModel(models.Model):
boolean = models.BooleanField(default=True)
char = models.CharField(max_length=1)
decimal = models.DecimalField(max_digits=5, decimal_places=2)
file = models.FileField()
filepath = models.FilePathField(path=settings.BASE_DIR)
float = models.FloatField(null=True)
integer = models.IntegerField(null=True)
big_integer = models.BigIntegerField(null=True)
generic_ip_address = models.GenericIPAddressField()
null_boolean = models.NullBooleanField()
one_to_one = models.OneToOneField('app.Integer', models.CASCADE, related_name='all_model')
fk = models.OneToOneField('app.Integer', models.CASCADE, related_name='all_models')
positive_integer = models.PositiveIntegerField()
positive_small_integer = models.PositiveSmallIntegerField()
slug = models.SlugField()
small_integer = models.SmallIntegerField()
text = models.TextField(null=True)
time = models.TimeField()
uuid = models.UUIDField(default=uuid.uuid4)
|
11501133
|
import tensorflow as tf
PARAMETERS_NAME = ["conv_%d_w", \
"conv_%d_b", \
"prelu_%d_%d_alpha", \
"bn_%d_%d_offset", \
"bn_%d_%d_scale", \
"bn_%d_%d_mv_mean", \
"bn_%d_%d_mv_var", \
"in_%d_%d_offset", \
"in_%d_%d_scale", \
"ln_%d_%d_offset", \
"ln_%d_%d_scale"]
# .#####...######..##......##..##.
# .##..##..##......##......##..##.
# .#####...####....##......##..##.
# .##..##..##......##......##..##.
# .##..##..######..######...####..
# ................................
def relu_layer():
return dict(name='relu')
def exe_relu_layer(tensor):
tensor = tf.nn.relu(tensor)
return tensor
# .#####...#####...######..##......##..##.
# .##..##..##..##..##......##......##..##.
# .#####...#####...####....##......##..##.
# .##......##..##..##......##......##..##.
# .##......##..##..######..######...####..
# ........................................
def prelu_layer():
return dict(name='prelu')
def exe_prelu_layer(tensor, net_info, l_index, is_first, act_o):
p_index = 2
parameter_count = 1
alphas_l = []
for i in range(act_o['size']):
alphas = tf.compat.v1.get_variable(name=PARAMETERS_NAME[p_index] % (l_index, i), \
shape=tensor.get_shape()[-1], \
initializer=tf.constant_initializer(0.0))
alphas_l.append(alphas)
alphas = alphas_l[act_o['index']]
pos = tf.nn.relu(tensor)
neg = alphas * (tensor - abs(tensor)) * 0.5
tensor = pos + neg
if is_first:
net_info.weights.extend(alphas_l)
for i in range(parameter_count):
for j in range(act_o['size']):
net_info.parameter_names.append(PARAMETERS_NAME[p_index + i] % (l_index, j))
return tensor
# .##......#####...######..##......##..##.
# .##......##..##..##......##......##..##.
# .##......#####...####....##......##..##.
# .##......##..##..##......##......##..##.
# .######..##..##..######..######...####..
# ........................................
def lrelu_layer(leak):
return dict(
name='lrelu',
leak=leak)
def exe_lrelu_layer(tensor, layer_o):
leak = layer_o['leak']
f1 = 0.5 * (1 + leak)
f2 = 0.5 * (1 - leak)
tensor = f1 * tensor + f2 * abs(tensor)
return tensor
# ..####...######..##......##..##.
# .##......##......##......##..##.
# ..####...####....##......##..##.
# .....##..##......##......##..##.
# ..####...######..######...####..
# ................................
def selu_layer():
return dict(name='selu')
def exe_selu_layer(tensor):
#alpha = 1.6732632423543772848170429916717
#scale = 1.0507009873554804934193349852946
alpha, scale = (1.0198755295894968, 1.0026538655307724)
return scale*tf.where(tensor>=0.0, tensor, alpha*tf.nn.elu(tensor))
# .#####...##..##.
# .##..##..###.##.
# .#####...##.###.
# .##..##..##..##.
# .#####...##..##.
# ................
def bn_layer(use_offset=False, use_scale=False, epsilon=1e-5, decay=0.9):
return dict(
name='bn',
use_offset=use_offset,
use_scale=use_scale,
epsilon=epsilon,
decay=decay)
def exe_bn_layer(tensor, layer_o, net_info, l_index, is_first, is_training, trainable, act_o):
p_index = 3
parameter_count = 4
shape = [tensor.get_shape()[-1]]
offset_trainable = layer_o['use_offset'] if trainable else False
scale_trainable = layer_o['use_scale'] if trainable else False
pars = []
for i in range(act_o['size']):
offset = tf.compat.v1.get_variable(name=PARAMETERS_NAME[p_index ] % (l_index, i), shape=shape, initializer=tf.constant_initializer(0.0), trainable=offset_trainable)
scale = tf.compat.v1.get_variable(name=PARAMETERS_NAME[p_index+1] % (l_index, i), shape=shape, initializer=tf.constant_initializer(1.0), trainable=scale_trainable)
mv_mean = tf.compat.v1.get_variable(name=PARAMETERS_NAME[p_index+2] % (l_index, i), shape=shape, initializer=tf.constant_initializer(0.0), trainable=False)
mv_var = tf.compat.v1.get_variable(name=PARAMETERS_NAME[p_index+3] % (l_index, i), shape=shape, initializer=tf.constant_initializer(1.0), trainable=False)
pars.append([offset, scale, mv_mean, mv_var])
offset, scale, mv_mean, mv_var = pars[act_o['index']]
if is_first:
for ps in pars:
net_info.weights.extend(ps)
for i in range(parameter_count):
for j in range(act_o['size']):
net_info.parameter_names.append(PARAMETERS_NAME[p_index + i] % (l_index, j))
if is_training:
batch_mean, batch_var = tf.nn.moments(tensor, [0, 1, 2])
train_mean = tf.assign(mv_mean,
mv_mean * layer_o['decay'] + batch_mean * (1 - layer_o['decay']))
train_var = tf.assign(mv_var,
mv_var * layer_o['decay'] + batch_var * (1 - layer_o['decay']))
with tf.control_dependencies([train_mean, train_var]):
tensor = tf.nn.batch_normalization(tensor, batch_mean, batch_var, offset, scale, layer_o['epsilon'])
else:
tensor = tf.nn.batch_normalization(tensor, mv_mean, mv_var, offset, scale, layer_o['epsilon'])
return tensor
# .######..##..##.
# ...##....###.##.
# ...##....##.###.
# ...##....##..##.
# .######..##..##.
# ................
def in_layer(use_offset=False, use_scale=False, epsilon=1e-5):
return dict(
name='in',
use_offset=use_offset,
use_scale=use_scale,
epsilon=epsilon)
def exe_in_layer(tensor, layer_o, net_info, l_index, is_first, trainable, act_o):
p_index = 7
shape = [tensor.get_shape()[-1]]
offset_trainable = layer_o['use_offset'] if trainable else False
scale_trainable = layer_o['use_scale'] if trainable else False
pars = []
for i in range(act_o['size']):
offset = tf.compat.v1.get_variable(name=PARAMETERS_NAME[p_index ] % (l_index, i), shape=shape, initializer=tf.constant_initializer(0.0), trainable=offset_trainable)
scale = tf.compat.v1.get_variable(name=PARAMETERS_NAME[p_index+1] % (l_index, i), shape=shape, initializer=tf.constant_initializer(1.0), trainable=scale_trainable)
pars.append([offset, scale])
offset, scale = pars[act_o['index']]
if is_first:
for ps in pars:
net_info.weights.extend(ps)
parameter_count = 2
for i in range(parameter_count):
for j in range(act_o['size']):
net_info.parameter_names.append(PARAMETERS_NAME[p_index + i] % (l_index, j))
t_list = tf.unstack(tensor)
result = []
for t in t_list:
batch_mean, batch_var = tf.nn.moments(t, [0, 1])
t = tf.nn.batch_normalization(t, batch_mean, batch_var, offset, scale, layer_o['epsilon'])
result.append(t)
return tf.stack(result)
# mean, var = tf.nn.moments(tensor, [1, 2], keep_dims=True)
# normalized = tf.div(tf.sub(tensor, mean), tf.sqrt(tf.add(var, layer_o['epsilon'])))
# return scale * normalized + offset
# .##......##..##.
# .##......###.##.
# .##......##.###.
# .##......##..##.
# .######..##..##.
# ................
def ln_layer(use_offset=False, use_scale=False, epsilon=1e-5):
return dict(
name='ln',
use_offset=use_offset,
use_scale=use_scale,
epsilon=epsilon)
def exe_ln_layer(tensor, layer_o, net_info, l_index, is_first, trainable, act_o):
p_index = 9
shape = [1, 1, tensor.get_shape()[-1]]
offset_trainable = layer_o['use_offset'] if trainable else False
scale_trainable = layer_o['use_scale'] if trainable else False
pars = []
for i in range(act_o['size']):
offset = tf.compat.v1.get_variable(name=PARAMETERS_NAME[p_index ] % (l_index, i), shape=shape, initializer=tf.constant_initializer(0.0), trainable=offset_trainable)
scale = tf.compat.v1.get_variable(name=PARAMETERS_NAME[p_index+1] % (l_index, i), shape=shape, initializer=tf.constant_initializer(1.0), trainable=scale_trainable)
pars.append([offset, scale])
offset, scale = pars[act_o['index']]
if is_first:
for ps in pars:
net_info.weights.extend(ps)
parameter_count = 2
for i in range(parameter_count):
for j in range(act_o['size']):
net_info.parameter_names.append(PARAMETERS_NAME[p_index + i] % (l_index, j))
mean, var = tf.nn.moments(tensor, [1, 2, 3], keep_dims=True)
result = tf.nn.batch_normalization(tensor, mean, var, offset, scale, layer_o['epsilon'])
return result
# ..####....####...##..##..##..##.
# .##..##..##..##..###.##..##..##.
# .##......##..##..##.###..##..##.
# .##..##..##..##..##..##...####..
# ..####....####...##..##....##...
# ................................
def conv_layer(kernel, stride, rate, filter, pad_mode, initializer, dropout=1, padding='VALID'):
return dict(
name='conv',
kernel=kernel,
stride=stride,
rate=rate,
filter=filter,
pad_mode=pad_mode,
initializer=initializer,
dropout=dropout,
padding=padding)
def exe_conv_layer(tensor, layer_o, net_info, l_index, is_first, is_training, trainable, seed, dilation_rate):
p_index = 0
parameter_count = 2
kernel = layer_o['kernel']
stride = layer_o['stride']
rate = layer_o['rate']
filter = layer_o['filter']
pad_mode = layer_o['pad_mode']
dropout = layer_o['dropout']
initializer = layer_o['initializer']
padding = layer_o['padding']
conv_w_tmp = tf.compat.v1.get_variable(PARAMETERS_NAME[p_index ] % l_index, \
[kernel, kernel, tensor.get_shape()[-1], filter], \
initializer=initializer, \
trainable=trainable)
conv_b = tf.compat.v1.get_variable(PARAMETERS_NAME[p_index+1] % l_index, \
[filter], \
initializer=tf.constant_initializer(0), \
trainable=trainable)
if dilation_rate is None:
conv_w = tf.reshape(conv_w_tmp, [1, kernel, kernel, -1])
conv_w = tf.image.resize(conv_w, [rate*(kernel-1)+1, rate*(kernel-1)+1], method=tf.image.ResizeMethod.AREA, align_corners=False)
conv_w = tf.reshape(conv_w, [rate*(kernel-1)+1, rate*(kernel-1)+1, tf.shape(tensor)[-1], filter])
conv_w = conv_w * kernel * kernel / tf.cast((rate*(kernel-1)+1) * (rate*(kernel-1)+1), tf.float32)
pad_size = rate * (kernel - 1) // 2
if kernel > 1 and pad_mode is not None:
tensor = tf.pad(tensor, [[0, 0], [pad_size, pad_size], [pad_size, pad_size], [0, 0]], pad_mode)
if is_training and dropout < 1:
tensor = tf.nn.dropout(tensor, dropout, seed=seed)
tensor = tf.nn.bias_add(tf.nn.conv2d(tensor, conv_w, strides=[1,stride,stride,1], padding=padding), conv_b)
else:
pad_size = dilation_rate * (kernel - 1) // 2
if kernel > 1 and pad_mode is not None:
tensor = tf.pad(tensor, [[0, 0], [pad_size, pad_size], [pad_size, pad_size], [0, 0]], pad_mode)
if is_training and dropout < 1:
tensor = tf.nn.dropout(tensor, dropout, seed=seed)
tensor = tf.nn.bias_add(tf.nn.atrous_conv2d(tensor, conv_w_tmp, rate=dilation_rate, padding=padding), conv_b)
if stride > 1:
tensor = tf.image.resize(tensor, [tf.shape(tensor)[1]//stride, tf.shape(tensor)[2]//stride], method=tf.image.ResizeMethod.AREA, align_corners=False)
if is_first:
net_info.weights.extend((conv_w_tmp, conv_b))
for i in range(parameter_count):
net_info.parameter_names.append(PARAMETERS_NAME[p_index + i] % l_index)
return tensor
# ..####....####...##..##..##..##..........#####...######...####...######..#####...##..##...####...##.....
# .##..##..##..##..###.##..##..##..........##..##..##......##........##....##..##..##..##..##..##..##.....
# .##......##..##..##.###..##..##..........#####...####.....####.....##....##..##..##..##..######..##.....
# .##..##..##..##..##..##...####...........##..##..##..........##....##....##..##..##..##..##..##..##.....
# ..####....####...##..##....##....######..##..##..######...####...######..#####....####...##..##..######.
# ........................................................................................................
def conv_res_layer(index, kernel, stride, initializer, dropout=1, padding='VALID'):
return dict(
name='conv_res',
index=index,
kernel=kernel,
stride=stride,
dropout=dropout,
initializer=initializer,
padding=padding)
def exe_conv_res_layer(res_tensor, layer_o, tensor_list, net_info, l_index, is_first, is_training, trainable, seed):
p_index = 0
parameter_count = 2
index = layer_o['index']
kernel = layer_o['kernel']
stride = layer_o['stride']
dropout = layer_o['dropout']
initializer = layer_o['initializer']
padding = layer_o['padding']
filter = res_tensor.get_shape()[-1]
tensor = tensor_list[index]
conv_w = tf.compat.v1.get_variable(PARAMETERS_NAME[p_index ] % l_index, \
[kernel, kernel, tensor.get_shape()[-1], filter], \
initializer=initializer, \
trainable=trainable)
conv_b = tf.compat.v1.get_variable(PARAMETERS_NAME[p_index+1] % l_index, \
[filter], \
initializer=tf.constant_initializer(0), \
trainable=trainable)
if is_training and dropout < 1:
tensor = tf.nn.dropout(tensor, dropout, seed=seed)
tensor = tf.nn.bias_add(tf.nn.conv2d(tensor, conv_w, strides=[1,stride,stride,1], padding=padding), conv_b)
if is_first:
net_info.weights.extend((conv_w, conv_b))
for i in range(parameter_count):
net_info.parameter_names.append(PARAMETERS_NAME[p_index + i] % l_index)
tensor = tf.add(res_tensor, tensor)
return tensor
# .#####...######...####...######..#####...##..##...####...##.....
# .##..##..##......##........##....##..##..##..##..##..##..##.....
# .#####...####.....####.....##....##..##..##..##..######..##.....
# .##..##..##..........##....##....##..##..##..##..##..##..##.....
# .##..##..######...####...######..#####....####...##..##..######.
# ................................................................
def res_layer(index, axis):
return dict(
name='res',
index=index,
axis=axis)
def exe_res_layer(tensor, layer_o, tensor_list):
index = layer_o['index']
axis = layer_o['axis']
res_tensor = tensor_list[index]
l = [res_tensor[:, :, :, i] for i in axis]
res_tensor = tf.stack(l, -1)
tensor = tf.add(tensor, res_tensor)
return tensor
# .##...##...####...##..##..........#####....####....####...##.....
# .###.###..##..##...####...........##..##..##..##..##..##..##.....
# .##.#.##..######....##............#####...##..##..##..##..##.....
# .##...##..##..##...####...........##......##..##..##..##..##.....
# .##...##..##..##..##..##..######..##.......####....####...######.
# .................................................................
def max_pool_layer(kernel, stride, padding='VALID'):
return dict(
name='max_pool',
kernel=kernel,
stride=stride,
padding=padding)
def exe_max_pool_layer(tensor, layer_o):
kernel = layer_o['kernel']
stride = layer_o['stride']
padding = layer_o['padding']
tensor = tf.nn.max_pool(tensor, [1, kernel, kernel, 1], [1, stride, stride, 1], padding=padding)
return tensor
# ..####...##..##...####...........#####....####....####...##.....
# .##..##..##..##..##..............##..##..##..##..##..##..##.....
# .######..##..##..##.###..........#####...##..##..##..##..##.....
# .##..##...####...##..##..........##......##..##..##..##..##.....
# .##..##....##.....####...######..##.......####....####...######.
# ................................................................
def avg_pool_layer(kernel, stride, padding='VALID'):
return dict(
name='avg_pool',
kernel=kernel,
stride=stride,
padding=padding)
def exe_avg_pool_layer(tensor, layer_o):
kernel = layer_o['kernel']
stride = layer_o['stride']
padding = layer_o['padding']
tensor = tf.nn.avg_pool(tensor, [1, kernel, kernel, 1], [1, stride, stride, 1], padding=padding)
return tensor
# .#####...######...####...######..######..######.
# .##..##..##......##........##.......##...##.....
# .#####...####.....####.....##......##....####...
# .##..##..##..........##....##.....##.....##.....
# .##..##..######...####...######..######..######.
# ................................................
def resize_layer(scale, method, align_corners=False):
return dict(
name='resize',
scale=scale,
method=method,
align_corners=align_corners)
def exe_resize_layer(tensor, layer_o):
scale = layer_o['scale']
method = layer_o['method']
align_corners = layer_o['align_corners']
t_shape = tensor.get_shape().as_list()
if t_shape[1] == None or t_shape[2] == None:
t_shape = tf.shape(tensor)
t_size = [t_shape[1] * scale, t_shape[2] * scale]
tensor = tf.image.resize(tensor, t_size, method=method, align_corners=align_corners)
return tensor
# ..####....####...##..##...####....####...######.
# .##..##..##..##..###.##..##..##..##..##....##...
# .##......##..##..##.###..##......######....##...
# .##..##..##..##..##..##..##..##..##..##....##...
# ..####....####...##..##...####...##..##....##...
# ................................................
def concat_layer(index):
return dict(
name='concat',
index=index)
def exe_concat_layer(tensor, layer_o, tensor_list):
index = layer_o['index']
concat_t = tensor_list[index]
tensor = tf.concat([tensor, concat_t], 3)
return tensor
# ..####...##.......####...#####....####...##...............####....####...##..##...####....####...######.
# .##......##......##..##..##..##..##..##..##..............##..##..##..##..###.##..##..##..##..##....##...
# .##.###..##......##..##..#####...######..##..............##......##..##..##.###..##......######....##...
# .##..##..##......##..##..##..##..##..##..##..............##..##..##..##..##..##..##..##..##..##....##...
# ..####...######...####...#####...##..##..######..######...####....####...##..##...####...##..##....##...
# ........................................................................................................
def global_concat_layer(index):
return dict(
name='g_concat',
index=index)
def exe_global_concat_layer(tensor, layer_o, tensor_list):
index = layer_o['index']
h = tf.shape(tensor)[1]
w = tf.shape(tensor)[2]
concat_t = tf.squeeze(tensor_list[index], [1, 2])
dims = concat_t.get_shape()[-1]
batch_l = tf.unstack(concat_t, axis=0)
bs = []
for batch in batch_l:
batch = tf.tile(batch, [h * w])
batch = tf.reshape(batch, [h, w, -1])
bs.append(batch)
concat_t = tf.stack(bs)
concat_t.set_shape(concat_t.get_shape().as_list()[:3] + [dims])
tensor = tf.concat([tensor, concat_t], 3)
return tensor
# .#####...######...####...##..##...####...#####...######.
# .##..##..##......##......##..##..##..##..##..##..##.....
# .#####...####.....####...######..######..#####...####...
# .##..##..##..........##..##..##..##..##..##......##.....
# .##..##..######...####...##..##..##..##..##......######.
# ........................................................
def reshape_layer(shape):
return dict(
name='reshape',
shape=shape)
def exe_reshape_layer(tensor, layer_o):
shape = reshape['shape']
shape = [tensor.get_shape().as_list()[0]] + shape
tensor = tf.reshape(tensor, shape)
return tensor
# ..####...##......######..#####..
# .##..##..##........##....##..##.
# .##......##........##....#####..
# .##..##..##........##....##.....
# ..####...######..######..##.....
# ................................
def clip_layer(min_v=0, max_v=1):
return dict(
name='clip',
min_v=min_v,
max_v=max_v)
def exe_clip_layer(tensor, layer_o):
min_v = layer_o['min_v']
max_v = layer_o['max_v']
tensor = tf.clip_by_value(tensor, min_v, max_v)
return tensor
# ..####...######...####...##...##...####...######..#####..
# .##........##....##......###.###..##..##....##....##..##.
# ..####.....##....##.###..##.#.##..##..##....##....##..##.
# .....##....##....##..##..##...##..##..##....##....##..##.
# ..####...######...####...##...##...####...######..#####..
# .........................................................
def sigmoid_layer():
return dict(name='sigmoid')
def exe_sigmoid_layer(tensor):
return tf.nn.sigmoid(tensor)
# ..####....####...######..######..##...##...####...##..##.
# .##......##..##..##........##....###.###..##..##...####..
# ..####...##..##..####......##....##.#.##..######....##...
# .....##..##..##..##........##....##...##..##..##...####..
# ..####....####...##........##....##...##..##..##..##..##.
# .........................................................
def softmax_layer():
return dict(name='softmax')
def exe_softmax_layer(tensor):
return tf.nn.softmax(tensor)
# ..####....####...##..##..######..######..######..######.
# .##......##..##..##..##..##......##.........##...##.....
# ..####...##.###..##..##..####....####......##....####...
# .....##..##..##..##..##..##......##.......##.....##.....
# ..####....#####...####...######..######..######..######.
# ........................................................
def squeeze_layer(axis):
return dict(name='squeeze', axis=axis)
def exe_squeeze_layer(tensor, layer_o):
axis = layer_o['axis']
return tf.squeeze(tensor, axis)
# ..####...#####....####..
# .##..##..##..##..##.....
# .######..#####....####..
# .##..##..##..##......##.
# .##..##..#####....####..
# ........................
def abs_layer():
return dict(name='abs')
def exe_abs_layer(tensor):
return tf.abs(tensor)
# .######...####...##..##..##..##.
# ...##....##..##..###.##..##..##.
# ...##....######..##.###..######.
# ...##....##..##..##..##..##..##.
# ...##....##..##..##..##..##..##.
# ................................
def tanh_layer():
return dict(name='tanh')
def exe_tanh_layer(tensor):
return tf.tanh(tensor)
# .######..##..##..##..##..........######...####...##..##..##..##.
# ...##....###.##..##..##............##....##..##..###.##..##..##.
# ...##....##.###..##..##............##....######..##.###..######.
# ...##....##..##...####.............##....##..##..##..##..##..##.
# .######..##..##....##....######....##....##..##..##..##..##..##.
# ................................................................
def inv_tanh_layer():
return dict(name='inv_tanh')
def exe_inv_tanh_layer(tensor):
return -tf.log((2.0 / (tensor + 1 + 1e-100)) - 1) * 0.5
# ..####...#####...#####..
# .##..##..##..##..##..##.
# .######..##..##..##..##.
# .##..##..##..##..##..##.
# .##..##..#####...#####..
# ........................
def add_layer(value):
return dict(name='add', value=value)
def exe_add_layer(tensor, layer_o):
value = layer_o['value']
return tf.add(tensor, value)
# .##...##..##..##..##.....
# .###.###..##..##..##.....
# .##.#.##..##..##..##.....
# .##...##..##..##..##.....
# .##...##...####...######.
# .........................
def mul_layer(value):
return dict(name='mul', value=value)
def exe_mul_layer(tensor, layer_o):
value = layer_o['value']
return tf.mul(tensor, value)
# .##..##..##..##..##......##.....
# .###.##..##..##..##......##.....
# .##.###..##..##..##......##.....
# .##..##..##..##..##......##.....
# .##..##...####...######..######.
# ................................
def null_layer():
return dict(name='null')
def exe_null_layer(tensor):
return tensor
# .#####...######..#####...##..##...####...######..........##...##..######...####...##..##.
# .##..##..##......##..##..##..##..##..##..##..............###.###..##......##..##..###.##.
# .#####...####....##..##..##..##..##......####............##.#.##..####....######..##.###.
# .##..##..##......##..##..##..##..##..##..##..............##...##..##......##..##..##..##.
# .##..##..######..#####....####....####...######..######..##...##..######..##..##..##..##.
# .........................................................................................
def reduce_mean_layer(axis=None, keep_dims=False):
return dict(name='reduce_mean', axis=axis, keep_dims=keep_dims)
def exe_reduce_mean_layer(tensor, layer_o):
axis = layer_o['axis']
keep_dims = layer_o['keep_dims']
return tf.reduce_mean(tensor, axis, keep_dims)
# .#####...######...####...######..#####...##..##...####...##..............#####...##.......####....####...##..##.
# .##..##..##......##........##....##..##..##..##..##..##..##..............##..##..##......##..##..##..##..##.##..
# .#####...####.....####.....##....##..##..##..##..######..##..............#####...##......##..##..##......####...
# .##..##..##..........##....##....##..##..##..##..##..##..##..............##..##..##......##..##..##..##..##.##..
# .##..##..######...####...######..#####....####...##..##..######..######..#####...######...####....####...##..##.
# ................................................................................................................
def residual_block(input_p, output_p, stride, initializer, index):
result = []
bottle_p = output_p // 4
result.append(bn_layer(True, True))
result.append(prelu_layer())
result.append(conv_layer(1, stride, bottle_p, None, initializer))
result.append(bn_layer(True, True))
result.append(prelu_layer())
result.append(conv_layer(3, 1, bottle_p, "CONSTANT", initializer))
result.append(bn_layer(True, True))
result.append(prelu_layer())
result.append(conv_layer(1, 1, output_p, None, initializer))
if input_p == output_p:
result.append(res_layer(index))
else:
result.append(conv_res_layer(index + 2, 1, stride, initializer))
return result
def residual_layer(count, input_p, output_p, stride, initializer, index):
result = residual_block(input_p, output_p, stride, initializer, index)
for _ in range(count - 1):
index = index + 10
result = result + residual_block(output_p, output_p, 1, initializer, index)
return result
|
11501153
|
import torch
import torch.nn as nn
import math
def ASoftmax(margin,input_dim,output_dim):
#https://github.com/clcarwin/sphereface_pytorch/blob/master/net_sphere.py
return ASoftmaxLoss(margin=margin,input_dim=input_dim,output_dim=output_dim)
class ASoftmaxLoss(nn.Module):
def __init__(self, input_dim, output_dim, margin = 4, phiflag=True, gamma=0):
super(ASoftmaxLoss, self).__init__()
self.input_dim = input_dim
self.output_dim = output_dim
self.weight = nn.Parameter(torch.Tensor(input_dim,output_dim)).cuda()
self.weight.data.uniform_(-1, 1).renorm_(2,1,1e-5).mul_(1e5)
self.phiflag = phiflag
self.margin = margin
self.mlambda = [
lambda x: x**0,
lambda x: x**1,
lambda x: 2*x**2-1,
lambda x: 4*x**3-3*x,
lambda x: 8*x**4-8*x**2+1,
lambda x: 16*x**5-20*x**3+5*x
]
self.gamma = gamma
self.it = 0
self.LambdaMin = 5.0
self.LambdaMax = 1500.0
self.lamb = 1500.0
self.reset_parameters()
def myphi(self,x,m):
x = x * m
return 1-x**2/math.factorial(2)+x**4/math.factorial(4)-x**6/math.factorial(6) + \
x**8/math.factorial(8) - x**9/math.factorial(9)
def reset_parameters(self):
nn.init.kaiming_normal_(self.weight.data.t())
def angle_linear_layer(self, input):
x = input # size=(B,F) F is feature len
w = self.weight # size=(F,Classnum) F=input_dim Classnum=output_dim
ww = w.renorm(2,1,1e-5).mul(1e5)
xlen = x.pow(2).sum(1).pow(0.5) # size=B
wlen = ww.pow(2).sum(0).pow(0.5) # size=Classnum
cos_theta = x.mm(ww) # size=(B,Classnum)
cos_theta = cos_theta / xlen.view(-1,1) / wlen.view(1,-1)
cos_theta = cos_theta.clamp(-1,1)
if self.phiflag:
cos_m_theta = self.mlambda[self.margin](cos_theta)
theta = cos_theta.data.acos()
k = (self.margin*theta/3.14159265).floor()
n_one = k*0.0 - 1
phi_theta = (n_one**k) * cos_m_theta - 2*k
else:
theta = cos_theta.acos()
phi_theta = self.myphi(theta,self.margin)
phi_theta = phi_theta.clamp(-1*self.margin,1)
cos_theta = cos_theta * xlen.view(-1,1)
phi_theta = phi_theta * xlen.view(-1,1)
output = (cos_theta,phi_theta)
return output # size=(B,Classnum,2)
def forward(self, input, target):
self.it += 1
cos_theta,phi_theta = self.angle_linear_layer(input)
target = target.view(-1,1) #size=(B,1)
index = cos_theta.data * 0.0 #size=(B,Classnum)
index.scatter_(1,target.data.view(-1,1),1)
index = index.byte()
self.lamb = max(self.LambdaMin,self.LambdaMax/(1+0.1*self.it ))
output = cos_theta * 1.0 #size=(B,Classnum)
output[index] -= cos_theta[index]*(1.0+0)/(1+self.lamb)
output[index] += phi_theta[index]*(1.0+0)/(1+self.lamb)
logpt = torch.nn.functional.log_softmax(output,dim=1)
logpt = logpt.gather(1,target)
logpt = logpt.view(-1)
pt = logpt.data.exp()
loss = -1 * (1-pt)**self.gamma * logpt
loss = loss.mean()
return loss
|
11501158
|
from __future__ import annotations
import hashlib
import logging
import os
import pickle
import random
import shutil
import sys
import time
from collections import OrderedDict
from copy import deepcopy
import dill
import numpy as np
import pandas as pd
import torch
import torch.nn as nn
import yaml
from dask.callbacks import Callback
from torch.optim.optimizer import Optimizer
from tqdm import tqdm
from .types import *
def make_dirs(parent_dir_path: str, child_dirs: Optional[Union[str, List[str]]] = None) -> None:
"""
Create the parent and (optionally) all child
directories within parent directory.
"""
def create_dir_if_not_exists(dir_path: str) -> None:
"""
Create a directory at `dir_path`
if it doesn't exist already.
"""
if not os.path.isdir(dir_path):
os.makedirs(dir_path, exist_ok=True) # `exist_ok=True` to avoid concurrent dir creation
# Create parent dir
create_dir_if_not_exists(parent_dir_path)
# Create child dir(s) if provided
if child_dirs is not None:
if isinstance(child_dirs, str):
child_dirs = [child_dirs]
assert isinstance(child_dirs, list)
for dir_name in child_dirs:
dir_path = get_file_path(parent_dir_path, dir_name)
create_dir_if_not_exists(dir_path)
def remove_dir(dir_path: str, force: Optional[bool] = False) -> None:
"""
Remove a directory at `dir_path`.
:param force: whether to delete the directory
even if it is not empty.
If False and directory is not
empty, raises `OSError`.
"""
if os.path.isdir(dir_path):
if force:
shutil.rmtree(dir_path, ignore_errors=True)
else:
os.rmdir(dir_path)
def setup_logging(name: str, log_dir: Optional[str] = None) -> logging.Logger:
"""
Configures logging format to write to both stdout
and log files (if `log_dir` is specified).
If `log_dir` is specified, then log files of the
format `YYYY-MM-DD.log` will be written to `log_dir`.
"""
# Reset logging because the below `logging.basicConfig`
# will do nothing if someone has already called logging
# methods before this call.
# Solution found at: https://rcaguilar.wordpress.com/2012/02/07/when-python-logging-isnt/
if logging.root:
del logging.root.handlers[:]
LOG_FORMAT = "%(asctime)s: %(levelname)s: %(filename)s: %(funcName)s: %(message)s"
formatter = logging.Formatter(LOG_FORMAT)
logger = logging.getLogger(name)
logger.setLevel("INFO")
# Setup streaming handler so logging also goes to stdout
log_handlers = [logging.StreamHandler()]
if log_dir is not None:
# Set file name based on date
file_name = f"{time.strftime('%Y-%m-%d')}.log"
file_path = get_file_path(log_dir, file_name)
# Add a file handler
log_handlers.append(logging.FileHandler(file_path))
# Configure all logging to log info messages and higher
for handler in log_handlers:
handler.setFormatter(formatter)
logger.addHandler(handler)
return logger
def human_time_interval(time_seconds: float) -> str:
"""
Converts a time interval in seconds to a human-friendly
representation in hours, minutes, seconds and milliseconds.
:param time_seconds: time in seconds (float)
>>> human_time_interval(13301.1)
"3h 41m 41s 100ms"
"""
hours, time_seconds = divmod(time_seconds, 3600)
minutes, time_seconds = divmod(time_seconds, 60)
seconds, milliseconds = divmod(time_seconds, 1)
hours, minutes, seconds = int(hours), int(minutes), int(seconds)
milliseconds, float_milliseconds = int(milliseconds * 1000), milliseconds * 1000
if hours > 0:
return f"{hours}h {minutes:02}m {seconds:02}s {milliseconds:03}ms"
if minutes > 0:
return f"{minutes}m {seconds:02}s {milliseconds:03}ms"
if seconds > 0:
return f"{seconds}s {milliseconds:03}ms"
return f"{float_milliseconds:.2f}ms"
def set_seed(seed: Optional[int] = 0) -> None:
"""
Fix all random seeds.
"""
random.seed(seed)
np.random.seed(seed)
torch.manual_seed(seed)
torch.cuda.manual_seed_all(seed) # Safe to call even if no GPU available
def print_dataframe(data: pd.DataFrame) -> None:
"""
Print useful summary statistics of a dataframe.
"""
logger.info(f"\nHead of data:\n{data.head(10)}\n")
logger.info(f"\nShape of data: {data.shape}\n")
logger.info(f"\nColumns:\n{data.columns}\n")
# This returns errors at times for unknown data types
try:
logger.info(f"\nSummary statistics:\n{data.describe()}\n")
except TypeError:
logger.warning("TypeError: Could not compute `.describe()` successfully.")
pass
def save_plot(
config: _Config,
fig: Figure,
plot_name: str,
model_name: str,
config_info_dict: Optional[_StringDict] = None,
ext: Optional[str] = "png",
) -> None:
"""
Save a high-quality plot created by matplotlib.
:param plot_name: Plot name, e.g. "accuracy-vs-epochs"
:param ext: file extension
"""
assert ext in ["png", "jpeg", "eps", "pdf"]
unique_name = get_unique_config_name(model_name, config_info_dict)
file_name = "-".join([plot_name, unique_name])
fig.savefig(get_file_path(config.plot_dir, f"{file_name}.{ext}"), dpi=300)
def save_object(obj: Any, primary_path: str, file_name: Optional[str] = None, module: Optional[str] = "pickle") -> None:
"""
This is a generic function to save any given
object using different `module`s, e.g. pickle,
dill, and yaml.
Note: See `get_file_path()` for details on how
how to set `primary_path` and `file_name`.
"""
file_path = get_file_path(primary_path, file_name)
logger.info(f"Saving '{file_path}'...")
if module == "yaml":
save_yaml(obj, file_path)
else:
save_pickle(obj, file_path, module)
logger.info("Done.")
def save_pickle(obj: Any, file_path: str, module: Optional[str] = "pickle") -> None:
"""
This is a defensive way to write (pickle/dill).dump,
allowing for very large files on all platforms.
"""
pickle_module = get_pickle_module(module)
bytes_out = pickle_module.dumps(obj, protocol=pickle_module.HIGHEST_PROTOCOL)
n_bytes = sys.getsizeof(bytes_out)
MAX_BYTES = 2 ** 31 - 1
with open(file_path, "wb") as f_out:
for idx in range(0, n_bytes, MAX_BYTES):
f_out.write(bytes_out[idx : idx + MAX_BYTES])
def save_yaml(obj: Dict, file_path: str) -> None:
"""
Save a given dictionary as a yaml file.
"""
assert isinstance(obj, dict), "Only `dict` objects can be stored as YAML files."
with open(file_path, "w") as f_out:
yaml.dump(obj, f_out)
def load_object(primary_path: str, file_name: Optional[str] = None, module: Optional[str] = "pickle") -> Any:
"""
This is a generic function to load any given
object using different `module`s, e.g. pickle,
dill, and yaml.
Note: See `get_file_path()` for details on how
how to set `primary_path` and `file_name`.
"""
file_path = get_file_path(primary_path, file_name)
logger.info(f"Loading '{file_path}'...")
if os.path.isfile(file_path):
if module == "yaml":
obj = load_yaml(file_path)
else:
obj = load_pickle(file_path, module)
logger.info(f"Successfully loaded '{file_path}'.")
return obj
else:
raise FileNotFoundError(f"Could not find '{file_path}'.")
def load_pickle(file_path: str, module: Optional[str] = "pickle") -> Any:
"""
This is a defensive way to write (pickle/dill).load,
allowing for very large files on all platforms.
This function is intended to be called inside
`load_object()`, and assumes that the file
already exists.
"""
input_size = os.path.getsize(file_path)
bytes_in = bytearray(0)
pickle_module = get_pickle_module(module)
MAX_BYTES = 2 ** 31 - 1
with open(file_path, "rb") as f:
for _ in range(0, input_size, MAX_BYTES):
bytes_in += f.read(MAX_BYTES)
obj = pickle_module.loads(bytes_in)
return obj
def load_yaml(file_path: str) -> Dict:
"""
Load a given yaml file.
Return an empty dictionary if file is empty.
This function is intended to be called inside
`load_object()`, and assumes that the file
already exists.
"""
with open(file_path, "r") as f:
obj = yaml.safe_load(f)
return obj if obj is not None else {}
def remove_object(primary_path: str, file_name: Optional[str] = None) -> None:
"""
Remove a given object if it exists.
Note: See `get_file_path()` for details on how
how to set `primary_path` and `file_name`.
"""
file_path = get_file_path(primary_path, file_name)
if os.path.isfile(file_path):
logger.info(f"Removing '{file_path}'...")
os.remove(file_path)
logger.info("Done.")
def get_file_path(primary_path: str, file_name: Optional[str] = None) -> str:
"""
Generate appropriate full file path:
- If `file_name` is None, it's assumed that the full
path to the file is provided in `primary_path`.
- Otherwise, it's assumed that `primary_path` is the
path to the folder where a file named `file_name`
exists.
"""
return primary_path if file_name is None else os.path.join(primary_path, file_name)
def get_pickle_module(pickle_module: Optional[str] = "pickle") -> Union[pickle, dill]:
"""
Return the correct module for pickling.
:param pickle_module: must be one of ["pickle", "dill"]
"""
if not pickle_module in ["pickle", "dill"]:
raise ValueError(f"Param 'pickle_module' ('{pickle_module}') must be one of ['pickle', 'dill'].")
return eval(pickle_module)
def delete_model(model: nn.Module) -> None:
"""
Delete model and free GPU memory.
"""
model = None
torch.cuda.empty_cache()
def get_string_from_dict(config_info_dict: Optional[_StringDict] = None) -> str:
"""
Generate a (unique) string from a given configuration dictionary.
The dictionary will always be sorted by key first so that if
the order of items is changed but the dictionary is essentially
still the same, the string returned remains unchanged.
E.g.:
>>> get_string_from_dict({"size": 100, "lr": 1e-3})
"lr_0.001-size_100"
>>> get_string_from_dict({"lr": 1e-3, "size": 100}) # Same
"lr_0.001-size_100"
"""
config_info = ""
if isinstance(config_info_dict, dict):
config_info_dict = OrderedDict(sorted(config_info_dict.items())) # Sort to be order-agnostic
clean = lambda k: str(k).replace("-", "_").lower()
config_info = "-".join([f"{clean(k)}_{clean(v)}" for k, v in config_info_dict.items()])
return config_info
def get_unique_config_name(primary_name: str, config_info_dict: Optional[_StringDict] = None) -> str:
"""
Return a unique name for the current configuration.
The name will comprise the `primary_name` followed by a
hash value uniquely generated from the `config_info_dict`.
:param primary_name: Primary name of the object being stored.
:param config_info_dict: An optional dict provided containing
information about current config.
E.g.:
`subcategory_classifier-3d02e8616cbeab37bc1bb972ecf02882`
Each attribute in `config_info_dict` is in the "{name}_{value}"
format (lowercased), separated from one another by a hyphen.
If a hyphen exists in the value (e.g. LR), it's converted to
an underscore. Finally, this string is passed into a hash
function to generate a unique ID for this configuration.
"""
unique_id = ""
# Generate unique ID based on config_info_dict
config_info = get_string_from_dict(config_info_dict)
if config_info != "":
unique_id = "-" + hashlib.md5(config_info.encode("utf-8")).hexdigest()
unique_name = primary_name + unique_id
return unique_name
def get_checkpoint_name(
checkpoint_type: str,
model_name: str,
epoch: int,
config_info_dict: Optional[_StringDict] = None,
) -> str:
"""
Returns the appropriate name of checkpoint file
by generating a unique ID from the config.
:param checkpoint_type: Type of checkpoint ("state" | "model")
:param config_info_dict: An optional dict provided containing
information about current config.
E.g.:
`checkpoint-model-subcategory_classifier-3d02e8616cbeab37bc1bb972ecf02882-epoch_1.pt`
"""
assert checkpoint_type in ["state", "model"]
unique_name = get_unique_config_name(model_name, config_info_dict)
checkpoint_name = f"checkpoint-{checkpoint_type}-{unique_name}-epoch_{epoch}.pt"
return checkpoint_name
def get_trainable_params(model: nn.Module) -> Dict[str, int]:
"""
Print and return the number of trainable
and total parameters of a model.
"""
num_params = sum(p.numel() for p in model.parameters())
num_trainable_params = sum(p.numel() for p in model.parameters() if p.requires_grad)
model_name = getattr(model, "__name__", model.__class__.__name__)
logger.info(f"Number of trainable/total parameters in {model_name}: {num_trainable_params}/{num_params}")
return {"trainable": num_trainable_params, "total": num_params}
def get_model_outputs_only(outputs: _TensorOrTensors) -> _TensorOrTensors:
"""
Use this function to get just the raw
outputs. Useful for many libraries, e.g.
`transformers` and `allennlp` that return
a tuple from the model, comprising loss,
attention matrices, etc. too.
"""
if isinstance(outputs, tuple):
outputs = outputs[0]
return outputs
def copy_model(model: nn.Module) -> nn.Module:
return deepcopy(model)
def send_model_to_device(model: nn.Module, device: _Device, device_ids: Optional[List[int]] = None) -> nn.Module:
"""
Send a model to specified device.
Will also parallelize model if required.
Note: `model.to()` is an inplace operation, so it will move the
original model to the desired device. If the original model
is to be retained on the original device, and a copy is to
be moved to the desired device(s) and returned, make sure to
send `model.copy()` (or `copy.deepcopy(model)` if not your
model is not inherited from `BasePyTorchModel`) to this function.
"""
logger.info(f"Setting default device for model to {device}...")
# Note: `model.to()` doesn't work as desired if model is
# parallelized (model is still wrapped inside
# `module`); therefore must do `model.module.to()`
model = model.module.to(device) if hasattr(model, "module") else model.to(device)
logger.info("Done.")
# Set default value here instead of in signature
# See: http://www.omahapython.org/IdiomaticPython.html#default-parameter-values
if device_ids is None:
device_ids = []
# Parallelize model
n_gpu = len(device_ids)
if n_gpu > 1:
logger.info(f"Using {n_gpu} GPUs: {device_ids}...")
model = DataParallel(model, device_ids=device_ids)
logger.info("Done.")
return model
def send_batch_to_device(batch: _Batch, device: _Device, non_blocking: Optional[bool] = True) -> _Batch:
"""
Send batch to given device.
:param non_blocking: If True and this copy is between CPU
and GPU, the copy may occur asynchronously
with respect to the host. For other cases,
this argument has no effect.
For explanation, see: https://stackoverflow.com/a/55564072
Useful when the batch tuple is of variable lengths.
Specifically,
- In regular multiclass setting:
batch = (product_embedding, y)
- In one-hot encoded multiclass / multilabel setting (e.g. ABSANet):
batch = ( (product_embedding, label_embedding), y )
This function will recursively send all tensors to the
device retaining the original structure of the batch.
E.g.:
>>> a = torch.tensor([1,2,3], device="cpu")
>>> b = torch.tensor([4,5,6], device="cpu")
>>> c = torch.tensor([7,8,9], device="cpu")
>>> batch = ((a, b), c)
>>> cuda_batch = send_batch_to_device(batch, "cuda:0")
>>> compare_tensors_or_arrays(cuda_batch, batch)
True
>>> is_batch_on_gpu(batch)
False
>>> is_batch_on_gpu(cuda_batch)
True
"""
if torch.is_tensor(batch):
if compare_devices(batch.device, device): # Avoid copy/transfer if already on given device
return batch
return batch.to(device=device, non_blocking=non_blocking)
elif isinstance(batch, (list, tuple)):
# Retain same data type as original
return type(batch)(send_batch_to_device(e, device, non_blocking) for e in batch)
else: # Structure/type of batch unknown
logger.warning(f"Type '{type(batch)}' not understood. Returning variable as-is.")
return batch
def send_optimizer_to_device(optimizer: Optimizer, device: _Device) -> Optimizer:
"""
Send an optimizer to specified device.
"""
for state in optimizer.state.values():
for k, v in state.items():
if torch.is_tensor(v):
state[k] = send_batch_to_device(v, device)
return optimizer
def convert_tensor_to_numpy(batch: _Batch) -> _Batch:
"""
Convert torch tensor(s) on any device to numpy array(s).
Similar to `send_batch_to_device()`, can take a
`torch.Tensor` or a tuple/list of them as input.
"""
if torch.is_tensor(batch):
return send_batch_to_device(batch, "cpu").detach().numpy()
elif isinstance(batch, (list, tuple)):
# Retain same data type as original
return type(batch)(convert_tensor_to_numpy(e) for e in batch)
else: # Structure/type of batch unknown
logger.warning(f"Type '{type(batch)}' not understood. Returning variable as-is.")
return batch
def convert_numpy_to_tensor(
batch: _Batch, device: Optional[_Device] = None, non_blocking: Optional[bool] = True
) -> _Batch:
"""
Convert numpy array(s) to torch tensor(s) and
optionally sends them to the desired device.
Inverse operation of `convert_tensor_to_numpy()`,
and similar to it, can take a np.ndarray or a
tuple/list of them as input.
"""
if isinstance(batch, np.ndarray):
batch = torch.as_tensor(batch)
return (
batch
if (device is None or compare_devices(batch.device, device))
else send_batch_to_device(batch, device, non_blocking)
)
elif isinstance(batch, (list, tuple)):
# Retain same data type as original
return type(batch)(convert_numpy_to_tensor(e, device, non_blocking) for e in batch)
else: # Structure/type of batch unknown
logger.warning(f"Type '{type(batch)}' not understood. Returning variable as-is.")
return batch
def compare_tensors_or_arrays(batch_a: _Batch, batch_b: _Batch) -> bool:
"""
Compare the contents of two batches.
Each batch may be of type `np.ndarray` or
`torch.Tensor` or a list/tuple of them.
Will return True if the types of the two
batches are different but contents are the same.
"""
if torch.is_tensor(batch_a):
batch_a = convert_tensor_to_numpy(batch_a)
if torch.is_tensor(batch_b):
batch_b = convert_tensor_to_numpy(batch_b)
if isinstance(batch_a, np.ndarray) and isinstance(batch_b, np.ndarray):
return np.all(batch_a == batch_b)
elif isinstance(batch_a, (list, tuple)) and isinstance(batch_b, (list, tuple)):
return all(compare_tensors_or_arrays(a, b) for a, b in zip(batch_a, batch_b))
else: # Structure/type of batch unknown
raise TypeError(
f"Types of each batch '({type(batch_a)}, {type(batch_b)})' must "
f"be `np.ndarray`, `torch.Tensor` or a list/tuple of them."
)
def compare_model_parameters(parameters1: Iterable[torch.Tensor], parameters2: Iterable[torch.Tensor]) -> bool:
"""
Compare two sets of model parameters.
Useful in unit tests for ensuring consistency
on saving and then loading the same set of
parameters.
"""
for p1, p2 in zip(parameters1, parameters2):
if p1.data.ne(p2.data).sum() > 0:
return False
return True
def compare_model_state_dicts(
state_dict1: OrderedDict[str, _TensorOrTensors], state_dict2: OrderedDict[str, _TensorOrTensors]
) -> bool:
"""
Compare two sets of model state dicts.
Useful in unit tests for ensuring
consistency on saving and then
loading the same state dict.
"""
for key1, key2 in zip(state_dict1, state_dict2):
if state_dict1[key1].ne(state_dict2[key2]).sum() > 0:
return False
return True
def is_batch_on_gpu(batch: _Batch) -> bool:
"""
Check if a `batch` is on a GPU.
Similar to `send_batch_to_device()`, can take a
`torch.Tensor` or a tuple/list of them as input.
"""
if torch.is_tensor(batch):
return batch.is_cuda
elif isinstance(batch, (list, tuple)):
return all(is_batch_on_gpu(e) for e in batch)
else: # Structure/type of batch unknown
raise TypeError(f"Type '{type(batch)}' not understood.")
def is_model_on_gpu(model: nn.Module) -> bool:
"""
Check if a `model` is on a GPU.
"""
return get_model_device(model).type != "cpu"
def is_model_parallelized(model: nn.Module) -> bool:
"""
Check if a `model` is parallelized across multiple GPUs.
"""
try:
return isinstance(model.device_ids, (list, tuple))
except AttributeError:
return False
def get_model_device(model: nn.Module) -> torch.device:
"""
The device the `model` is on (assuming that all
the model parameters are on the same device).
"""
return get_next_parameter(model).device
def get_model_dtype(model: nn.Module) -> torch.dtype:
"""
The dtype of the `model` (assuming that all
the model parameters have the same dtype).
"""
return get_next_parameter(model).dtype
def get_next_parameter(model: nn.Module) -> _TensorOrTensors:
"""
Get next model parameter.
Useful for getting model device and dtype.
"""
try:
return next(model.parameters())
except StopIteration: # For nn.DataParallel compatibility in PyTorch 1.5
def find_tensor_attributes(module: nn.Module) -> List[Tuple[str, Tensor]]:
tuples = [(k, v) for k, v in module.__dict__.items() if torch.is_tensor(v)]
return tuples
gen = model._named_members(get_members_fn=find_tensor_attributes)
first_tuple = next(gen)
return first_tuple[1]
def compare_devices(device1, device2):
"""
Return True if the given devices
are the same, otherwise False.
"""
if any(device is None for device in (device1, device2)):
return False
def _convert_to_torch_device(device):
if isinstance(device, torch.device):
return device
elif isinstance(device, str):
return torch.device(device)
else:
raise ValueError(f"Device '{device}' not understood.")
return _convert_to_torch_device(device1) == _convert_to_torch_device(device2)
def get_total_grad_norm(parameters: Iterable[torch.Tensor], norm_type: Optional[float] = 2) -> torch.Tensor:
"""
Get the total `norm_type` norm
over all parameter gradients.
"""
return nn.utils.clip_grad_norm_(parameters, max_norm=np.inf, norm_type=norm_type)
def get_model_performance_trackers(config: _Config) -> Tuple[ModelTracker, ModelTracker]:
"""
Initialize loss and eval criteria
loggers for train and val datasets.
"""
train_logger = ModelTracker(config, is_train=True)
val_logger = ModelTracker(config, is_train=False)
return train_logger, val_logger
class ModelTracker:
"""
Class for tracking model's progress.
Use this for keeping track of the loss and
any evaluation metrics (accuracy, f1, etc.)
at each epoch.
"""
def __init__(self, config: _Config, is_train: Optional[bool] = True):
self.eval_criteria = config.eval_criteria
self.is_train = is_train
if not is_train:
self.early_stopping_criterion = config.early_stopping_criterion
self._init_progress_trackers()
def _init_progress_trackers(self):
"""
Initialize the loss/eval_criteria tracking dictionaries.
"""
self.loss_hist, self.eval_metrics_hist = OrderedDict(), OrderedDict()
for eval_criterion in self.eval_criteria:
self.eval_metrics_hist[eval_criterion] = OrderedDict()
def add_losses(self, losses: List[float], epoch: Optional[int] = -1) -> None:
"""
Store the losses at a given epoch.
:param epoch: If not provided, will store
at the next epoch.
"""
epoch = self._get_next_epoch(epoch, "loss")
if not isinstance(losses, list):
losses = [losses]
self.loss_hist[epoch] = losses
def get_losses(
self, epoch: Optional[int] = None, flatten: Optional[bool] = False
) -> Union[List[float], OrderedDict[str, List[float]]]:
"""
Get the loss history.
:param epoch: If provided, returns the list
of losses at that epoch,
otherwise the whole dictionary.
If epoch=-1, returns list of
losses at last epoch.
:param flatten: If true, a single list of all
flattened values is returned.
"""
epoch = self._get_correct_epoch(epoch, "loss")
if epoch is not None:
return self.loss_hist[epoch]
if flatten: # Flatten across all epochs
return self.get_all_losses()
return self.loss_hist
def get_all_losses(self) -> List[float]:
"""
Get the entire loss history across all
epochs flattened into one list.
"""
return np.concatenate(list(self.loss_hist.values())).tolist()
def add_eval_metrics(self, eval_metrics: Dict[str, float], epoch: Optional[int] = -1) -> None:
"""
Store the eval_metrics at a given epoch.
:param epoch: If not provided, will store
at the next epoch.
"""
epoch = self._get_next_epoch(epoch, "eval_metrics")
for eval_criterion in self.eval_criteria:
self.eval_metrics_hist[eval_criterion][epoch] = eval_metrics[eval_criterion]
def get_eval_metrics(
self,
eval_criterion: Optional[str] = None,
epoch: Optional[int] = None,
flatten: Optional[bool] = False,
) -> Union[float, List[float], OrderedDict[str, Union[float, List[float]]]]:
"""
Get the evaluation metrics history.
:param eval_criterion: The criterion whose history
is to be returned.
:param epoch: The epoch for which the history
is to be returned.
- If both params are provided, the value at that epoch
is returned.
- If only eval_criterion is provided:
- If `flatten=False`, a dictionary of values
at each epoch is returned
- If `flatten=True`, the values across all
epochs are flattened into a single list
- If only `epoch` is provided, a dictionary of values
for each criterion at that epoch is returned.
If `epoch=-1`, returns list of losses at last epoch.
"""
epoch = self._get_correct_epoch(epoch, "eval_metrics")
if eval_criterion is not None:
if epoch is not None: # Both params provided
return self.eval_metrics_hist[eval_criterion][epoch]
elif flatten: # Flatten across all epochs
return self.get_all_eval_metrics(eval_criterion)
return self.eval_metrics_hist[eval_criterion] # Return ordered dict
elif epoch is not None:
return OrderedDict(
{eval_criterion: self.eval_metrics_hist[eval_criterion][epoch] for eval_criterion in self.eval_criteria}
)
return self.eval_metrics_hist
def get_all_eval_metrics(self, eval_criterion: Optional[str] = None) -> Union[List[float], Dict[str, List[float]]]:
"""
Get the entire eval_metrics history across all
epochs flattened into one list for each eval_criterion.
:param eval_criterion: If provided, only the list of
history for that eval_criterion
is returned.
"""
def get_eval_criterion_metrics(eval_criterion):
return list(self.eval_metrics_hist[eval_criterion].values())
if eval_criterion is not None:
return get_eval_criterion_metrics(eval_criterion)
eval_metrics_dict = OrderedDict(
{eval_criterion: get_eval_criterion_metrics(eval_criterion) for eval_criterion in self.eval_criteria}
)
return eval_metrics_dict
def log_epoch_metrics(self, epoch: Optional[int] = -1) -> str:
"""
Log loss and evaluation metrics for a
given epoch in the following format:
"TRAIN Epoch: 1 Average loss: 0.5, ACCURACY: 0.8, PRECISION: 0.7"
"""
epoch_loss = self._get_correct_epoch(epoch, "loss")
epoch_eval_metrics = self._get_correct_epoch(epoch, "eval_metrics")
assert epoch_loss == epoch_eval_metrics
dataset_type = "TRAIN" if self.is_train else "VAL "
mean_loss_epoch = np.mean(self.get_losses(epoch=epoch_loss))
result_str = f"\n\033[1m{dataset_type} Epoch: {epoch_loss}\tAverage loss: {mean_loss_epoch:.4f}, "
result_str += ", ".join(
[
f"{eval_criterion}: {self.get_eval_metrics(eval_criterion, epoch_loss):.4f}"
for eval_criterion in self.eval_criteria
]
)
result_str += "\033[0m\n"
logger.info(result_str)
return result_str
def add_metrics(self, losses: List[float], eval_metrics: Dict[str, float], epoch: Optional[int] = -1) -> None:
"""
Shorthand function to add losses
and eval metrics at the end of
a given epoch.
"""
self.add_losses(losses, epoch)
self.add_eval_metrics(eval_metrics, epoch)
def add_and_log_metrics(
self, losses: List[float], eval_metrics: Dict[str, float], epoch: Optional[int] = -1
) -> str:
"""
Shorthand function to add losses
and eval metrics at the end of a
given epoch, and then print the
results for that epoch.
"""
self.add_metrics(losses, eval_metrics, epoch)
return self.log_epoch_metrics(epoch)
def get_early_stopping_metric(self) -> float:
"""
For validation loggers, returns the
`early_stopping_criterion` for the
last epoch for which history is stored.
"""
if self.is_train:
raise ValueError("Early stopping must be applied on validation set.")
return self.get_eval_metrics(self.early_stopping_criterion, -1)
def get_eval_metrics_df(self, epoch: Optional[int] = None) -> pd.DataFrame:
"""
Get a DataFrame object of all eval metrics
for all (or optionally a specific) epoch(s).
"""
metrics_df = pd.DataFrame.from_dict(self.get_eval_metrics())
metrics_df.insert(loc=0, column="epoch", value=metrics_df.index)
metrics_df.reset_index(drop=True, inplace=True)
if epoch is not None:
epoch = self._get_correct_epoch(epoch, "loss")
return metrics_df.query("epoch == @epoch")
return metrics_df
def set_best_epoch(self, best_epoch: Optional[int] = None) -> None:
"""
Add the `best_epoch` attribute to validation
logger for future evaluation purposes.
"""
if self.is_train:
raise ValueError("Best epoch can only be stored into validation logger.")
if best_epoch is None:
self.best_epoch = self.get_overall_best_epoch()
else:
if best_epoch not in self.epochs:
raise ValueError(f"Best epoch provided ({best_epoch}) must be one of {self.epochs}.")
self.best_epoch = best_epoch
def get_overall_best_epoch(self) -> int:
"""
Get the overall best epoch if early stopping is not used.
Returns the maximum value across all epochs based
on the (early) stopping criterion, which defaults
to accuracy / mse if it isn't defined.
"""
eval_metrics_dict = self.get_eval_metrics(self.early_stopping_criterion)
best_epoch = max(eval_metrics_dict, key=eval_metrics_dict.get)
return best_epoch
@property
def _epochs_loss(self) -> List[int]:
"""
List of epochs for which loss history is stored.
"""
return list(self.loss_hist.keys())
@property
def _epochs_eval_metrics(self) -> List[int]:
"""
List of epochs for which eval metrics history is stored.
"""
k = list(self.eval_metrics_hist.keys())[0] # Any random metric
return list(self.eval_metrics_hist[k].keys())
@property
def epochs(self) -> List[int]:
"""
Returns the total list of epochs for which history is stored.
Assumes that history is stored for the same number of epochs
for both loss and eval_metrics.
"""
assert self._epochs_loss == self._epochs_eval_metrics
return self._epochs_loss
def _get_correct_epoch(self, epoch: int, hist_type: str) -> int:
"""
If `epoch=-1`, returns the last epoch for
which history is currently stored, otherwise
the epoch itself.
"""
if epoch == -1:
total_epochs = self._epochs_loss if hist_type == "loss" else self._epochs_eval_metrics
return max(total_epochs) if len(total_epochs) else 0
return epoch
def _get_next_epoch(self, epoch: int, hist_type: str) -> int:
"""
If `epoch=-1`, returns the next epoch for
which history is to be stored, otherwise
the epoch itself.
"""
if epoch == -1:
total_epochs = self._epochs_loss if hist_type == "loss" else self._epochs_eval_metrics
epoch = max(total_epochs) if len(total_epochs) else 0
return epoch + 1
class SequencePooler(nn.Module):
"""
Pool the sequence output for transformer-based models.
Class used instead of lambda functions to remain
compatible with `torch.save()` and `torch.load()`.
"""
DEFAULT_POOLER_TYPE = "default"
def __init__(self, model_type: Optional[str] = "bert"):
"""
:param model_type: Type of `transformers` model.
Can be manually specified or extracted
from the model class like this:
>>> from transformers import AutoModel
>>> model = AutoModel.from_pretrained("roberta-base")
>>> model.config.model_type
"roberta"
"""
super().__init__()
self._set_pooler(model_type)
def __repr__(self):
return f"{self.__class__.__name__}(model_type={self.model_type})"
def forward(self, x):
return self.pooler(x)
def _set_pooler(self, model_type: str) -> None:
"""
Set the appropriate pooler as per the `model_type`.
"""
# Set the appropriate pooler as per `model_type`
self.POOLER_MAPPING = {
"bert": self._bert_pooler,
"distilbert": self._distilbert_pooler,
"albert": self._albert_pooler,
"roberta": self._roberta_pooler,
"electra": self._electra_pooler,
}
# Use default pooler if not supported
if model_type in self.POOLER_MAPPING.keys():
self.model_type = model_type
self.pooler = self.POOLER_MAPPING[self.model_type]
else:
logger.warning(
f"No supported sequence pooler was found for model of type '{model_type}'. Using the default one."
)
self.model_type = self.DEFAULT_POOLER_TYPE
self.pooler = self._default_pooler
def _default_pooler(self, x):
return x
def _bert_pooler(self, x):
"""
**NOTE**: The sentence/sequence vector obtained
from BERT does NOT correspond to the [CLS] vector.
It takes as input this vector and then runs a small
network on top of it to give the "pooled" sequence output.
See:
1. https://github.com/huggingface/transformers/blob/1cdd2ad2afb73f6af185aafecb7dd7941a90c4d1
/src/transformers/modeling_bert.py#L426-L438
2. https://github.com/huggingface/transformers/blob/1cdd2ad2afb73f6af185aafecb7dd7941a90c4d1
/src/transformers/modeling_bert.py#L738-L739
3. https://www.kaggle.com/questions-and-answers/86510
"""
return x[1] # Pooled seq vector
def _distilbert_pooler(self, x):
return x[0][:, 0] # [CLS] vector
def _albert_pooler(self, x):
return self._bert_pooler(x) # Same as BERT (see above)
def _roberta_pooler(self, x):
return x[0][:, 0] # <s> vector (equiv. to [CLS])
def _electra_pooler(self, x):
return x[0][:, 0] # [CLS] vector
class DataParallel(nn.DataParallel):
"""
Custom DataParallel class inherited from `nn.DataParallel`.
Purpose is to allow direct access to model attributes and
methods when it is wrapped in a `module` attribute because
of `nn.DataParallel`.
"""
def __init__(self, model: nn.Module, **kwargs):
super().__init__(model, **kwargs)
def __getattr__(self, name):
"""
Return model's own attribute if available, otherwise
fallback to attribute of parent class.
Solves the issue that when `nn.DataParallel` is applied,
methods and attributes defined in `BasePyTorchModel`
like `predict()` can only be accessed with
`self.module.predict()` instead of `self.predict()`.
"""
try:
return super().__getattr__(name)
except AttributeError:
return getattr(self.module, name)
@property
def is_parallelized(self) -> bool:
"""
Check if the model is parallelized
across multiple GPUs.
"""
return is_model_parallelized(self)
class DaskProgressBar(Callback):
"""
Real-time tqdm progress bar adapted to dask dataframes (for `apply`).
Code reference: https://github.com/tqdm/tqdm/issues/278#issue-180452055
"""
def _start_state(self, dsk, state):
self._tqdm = tqdm(total=sum(len(state[k]) for k in ["ready", "waiting", "running", "finished"]))
def _posttask(self, key, result, dsk, state, worker_id):
self._tqdm.update(1)
def _finish(self, dsk, state, errored):
pass
class GELU(nn.Module):
"""
Implementation of the gelu activation function
currently in Google BERT repo (identical to OpenAI GPT).
Also see: https://arxiv.org/abs/1606.08415
Code reference:
https://github.com/huggingface/transformers/blob/1cdd2ad2afb73f6af185aafecb7dd7941a90c4d1
/src/transformers/activations.py#L25-L29
"""
def __init__(self):
super().__init__()
def forward(self, x: torch.Tensor):
return 0.5 * x * (1 + torch.tanh(np.sqrt(2 / np.pi) * (x + 0.044715 * torch.pow(x, 3.0))))
logger = setup_logging(__name__)
|
11501160
|
from django import forms
class AutocompleteWidget(forms.widgets.TextInput):
template_name = "register/includes/autocomplete_input.html"
|
11501180
|
from __future__ import print_function
import os, time
import torch
import torch.nn.functional as F
from datasets import get_img_loader
from nets import ConvODENet
from trainer import TrainerBase
import util, options
import easydict
from torch.optim import SGD, Adam
from torchdiffeq import odeint_adjoint as odesolve
from snopt import SNOpt, ODEFuncBase, ODEBlock
import colored_traceback.always
from ipdb import set_trace as debug
def build_optim_and_precond(opt, network):
# build optimizer
optim_dict = {"lr": opt.lr, 'weight_decay':opt.l2_norm, 'momentum':opt.momentum}
if opt.optimizer =='Adam': optim_dict.pop('momentum', None)
optim = {
'SGD': SGD,
'Adam': Adam,
'SNOpt': SGD,
}.get(opt.optimizer)(network.parameters(), **optim_dict)
# build precond
if opt.optimizer=='SNOpt':
kwargs = dict(eps=opt.snopt_eps, update_freq=opt.snopt_freq, full_precond=True)
precond = SNOpt(network, **kwargs)
else:
precond = None
return optim, precond
class ConcatConv2d(torch.nn.Module):
def __init__(self, dim_in, dim_out, ksize=3, stride=1, padding=0, dilation=1, groups=1, bias=True, transpose=False):
super(ConcatConv2d, self).__init__()
module = torch.nn.ConvTranspose2d if transpose else torch.nn.Conv2d
self._layer = module(
dim_in + 1, dim_out, kernel_size=ksize, stride=stride, padding=padding, dilation=dilation, groups=groups,
bias=bias
)
def forward(self, t, x):
tt = torch.ones_like(x[:, :1, :, :]) * t
ttx = torch.cat([tt, x], 1)
return self._layer(ttx)
class ConvODEfunc(ODEFuncBase):
def __init__(self, opt, hidden):
super(ConvODEfunc, self).__init__(opt)
self.relu = torch.nn.ReLU(inplace=True)
self.conv1 = ConcatConv2d(hidden, hidden, 3, 1, 1)
self.conv2 = ConcatConv2d(hidden, hidden, 3, 1, 1)
def F(self, t, x):
self.nfe += 1
out = x
out = self.conv1(t, out)
out = self.relu(out)
out = self.conv2(t, out)
return out
class Trainer(TrainerBase):
def __init__(self, train_loader, test_loader, network, optim, loss,
precond=None, sched=None):
super(Trainer, self).__init__(
train_loader, test_loader, network, optim, loss, precond, sched
)
def prepare_var(self, opt, batch):
var = easydict.EasyDict()
[var.data, var.target] = [v.to(opt.device) for v in batch]
return var
def build_clf_neural_ode(opt, hidden=64, t0=0.0, t1=1.0):
odefunc = ConvODEfunc(opt, hidden)
integration_time = torch.tensor([t0, t1]).float()
ode = ODEBlock(opt, odefunc, odesolve, integration_time, is_clf_problem=True)
network = ConvODENet(ode, hidden, opt.input_dim[0]).to(opt.device)
print(network)
print(util.magenta("Number of trainable parameters: {}".format(
util.count_parameters(network)
)))
return network
if __name__ == '__main__':
# build opt and trainer
opt = options.set()
train_loader, test_loader = get_img_loader(opt)
network = build_clf_neural_ode(opt, t1=opt.t1)
optim, precond = build_optim_and_precond(opt, network)
loss = F.cross_entropy
trainer = Trainer(train_loader, test_loader, network, optim, loss, precond=precond)
trainer.restore_checkpoint(opt, keys=["network","optim"])
# save path
os.makedirs(opt.result_dir, exist_ok=True)
path = "{}/{}-{}_seed_{}_".format(opt.result_dir, opt.problem, opt.optimizer_config, opt.seed)
# things we're going to collect over training
losses = util.Collector(path + 'train')
eval_losses = util.Collector(path + 'eval')
accuracies = util.Collector(path + 'accuracy')
train_clocks = util.Collector(path + 'train_clock')
eval_clocks = util.Collector(path + 'eval_clock')
if opt.use_adaptive_t1: t1s = util.Collector(path + 't1s')
# strat training
print(util.yellow("======= TRAINING START ======="))
print(util.green(path))
trainer.time_start()
for ep in range(opt.epoch):
for it, batch in enumerate(trainer.train_loader):
train_it = ep*len(trainer.train_loader)+it
loss = trainer.train_step(opt, train_it, batch=batch)
# util.print_train_progress(opt, trainer, train_it, loss)
losses.append(loss)
train_clocks.append(trainer.clock)
if opt.use_adaptive_t1: t1s.append(trainer.get_ode_t1())
if (train_it+1)%opt.eval_itr==0:
eval_loss, accuracy=trainer.evaluate(opt, ep, train_it)
util.print_eval_progress(opt, trainer, train_it, eval_loss, accuracy=accuracy)
eval_losses.append(eval_loss)
accuracies.append(accuracy)
eval_clocks.append(trainer.clock)
losses.save()
eval_losses.save()
accuracies.save()
train_clocks.save()
eval_clocks.save()
if opt.use_adaptive_t1: t1s.save()
time.sleep(1)
print(util.yellow("======= TRAINING DONE ======="))
|
11501191
|
from cx_Freeze import setup, Executable
include_files = [ "PyiiASMH.ico",
"__includes.s",
"lib/" ]
excludes = [ "tkinter" ]
options = {
"build_exe": {
"optimize": 4,
"excludes": excludes,
"include_files": include_files
}
}
setup(name = "PyiiASMH 3",
version = "4.1.5",
description = "A cross platform gecko code compiler for PowerPC assembly",
executables = [Executable("pyiiasmh.py", icon="PyiiASMH.ico")],
author = "JoshuaMK",
author_email = "<EMAIL>",
options = options
)
|
11501243
|
from test import cassette
from test.resources.documents import *
def test_should_create_group_document():
session = get_user_session()
delete_all_group_documents()
with cassette('fixtures/resources/documents/create_group_document/create_group_document.yaml'):
doc = create_group_document(session)
assert_core_document(doc)
assert_bib_document(doc)
assert_client_document(doc)
assert_tags_document(doc)
assert doc.group.id == '164d48fb-2343-332d-b566-1a4884a992e4'
def test_should_create_minimal_group_document():
session = get_user_session()
delete_all_documents()
with cassette('fixtures/resources/documents/create_group_document/create_minimal_group_document.yaml'):
doc = session.groups.get('164d48fb-2343-332d-b566-1a4884a992e4').documents\
.create('Underwater basket weaving', 'journal')
assert doc.title == 'Underwater basket weaving'
assert doc.type == 'journal'
assert doc.group.id == '164d48fb-2343-332d-b566-1a4884a992e4'
def test_should_get_group_details():
session = get_user_session()
delete_all_documents()
with cassette('fixtures/resources/documents/create_group_document/get_group_details.yaml'):
doc = create_group_document(session)
assert doc.group.name == 'Basket weaving'
|
11501249
|
import nltk
from nltk.tag import tnt
from nltk.corpus import treebank
testing = treebank.tagged_sents()[2000:]
training= treebank.tagged_sents()[:7000]
tnt_tagger=tnt.TnT()
tnt_tagger.train(training)
print(tnt_tagger.evaluate(testing))
|
11501257
|
from .base import BaseModel
class RationaleResult(BaseModel):
"""Model class for handling rationale result object."""
@staticmethod
def from_json(json):
"""
Constructs RationaleResult object from given dict and returns it.
:param json: Dict with a rationale result value.
:type json: dict
:returns: RationaleResult object
:rtype: :class:`infermedica_api.models.RationaleResult`
"""
return RationaleResult(**json)
|
11501317
|
from typing import Dict
from tabulate import tabulate
from python import DOCUMENT_ID, TOPIC_ID, SUBTOPIC, TOKEN, SENTENCE_IDX, EVENT, MENTION_ID, TOKEN_IDX_FROM, TOKEN_IDX_TO
from python.handwritten_baseline import MENTION_TYPE_COARSE
from python.handwritten_baseline.pipeline.data.base import Dataset, BaselineDataProcessorStage
class StatisticsStage(BaselineDataProcessorStage):
def __init__(self, pos, config, config_global, logger):
super(StatisticsStage, self).__init__(pos, config, config_global, logger)
self.print_examples = config.get("print_examples", False)
def _process_dataset(self,
dataset: Dataset,
live_objects: Dict) -> Dataset:
num_topics = len(dataset.documents.index.get_level_values(TOPIC_ID).unique())
num_subtopics = len(dataset.documents.index.to_frame()[[TOPIC_ID, SUBTOPIC]].drop_duplicates())
num_documents = len(dataset.documents)
avg_subtopics_per_topic = num_subtopics / num_topics
avg_documents_per_subtopic = num_documents / num_subtopics
avg_documents_per_topic = num_documents / num_topics
num_tokens = len(dataset.tokens)
num_types = len(dataset.tokens[TOKEN].unique())
num_sentences = len(dataset.tokens.index.to_frame()[[DOCUMENT_ID, SENTENCE_IDX]].drop_duplicates())
num_action_mentions = len(dataset.mentions_action)
num_action_mentions_stacked = dataset.mentions_action.reset_index().duplicated([DOCUMENT_ID, SENTENCE_IDX, TOKEN_IDX_FROM, TOKEN_IDX_TO], keep=False).sum()
num_action_clusters = len(dataset.mentions_action[EVENT].unique())
num_action_singleton_clusters = (dataset.mentions_action[EVENT].value_counts() == 1).sum()
num_participant_mentions = len(dataset.mentions_participants) if dataset.mentions_participants is not None else 0
num_location_mentions = len(dataset.mentions_location) if dataset.mentions_location is not None else 0
num_time_mentions = len(dataset.mentions_time) if dataset.mentions_time is not None else 0
clusters_by_size = dataset.mentions_action[EVENT].value_counts()
cluster_size_distribution = clusters_by_size.value_counts().sort_index()
cluster_size_distribution.name = "num-occurrences"
cluster_size_distribution.index.name = "cluster-size"
# how many mentions have participants, time, location linked
if dataset.semantic_roles is not None:
# count number of participant, time and location mention for each action mention
num_mentions_of_coarse_type_per_mention = dataset.semantic_roles.groupby([DOCUMENT_ID, MENTION_ID])[MENTION_TYPE_COARSE].value_counts()
num_mentions_of_coarse_type_per_mention.name = "num-mentions"
# pivot so that we have [doc-id, mention-id] as the index and the number of location/participant/time mentions for each mention as the columns
num_mentions_of_coarse_type_per_mention = num_mentions_of_coarse_type_per_mention.reset_index().pivot([DOCUMENT_ID, MENTION_ID], MENTION_TYPE_COARSE, "num-mentions")
# number of mentions which have at least one argument (location/participant/time) linked, regardless of type
num_mentions_with_linked_args = len(num_mentions_of_coarse_type_per_mention)
# number of mentions which have at least one argument (location/participant/time) linked, by type - absolute and relative percentage
num_mentions_with_args_by_type = {type_: len(num_mentions_of_coarse_type_per_mention.loc[num_mentions_of_coarse_type_per_mention[type_] > 0]) for type_ in ["location", "participants", "time"]}
num_mentions_with_args_by_type_relative = {type_: num / num_mentions_with_linked_args for type_, num in num_mentions_with_args_by_type.items()}
else:
num_mentions_with_linked_args = 0
num_mentions_with_args_by_type = {}
num_mentions_with_args_by_type_relative = {}
# NOTE: we can get the number of coreference link by type (within-doc, within-subtopic, cross-subtopic, cross-topic from the mention pair generator during training), so we don't need to repeat that here
with (self.stage_disk_location / "statistics.txt").open("w") as f:
f.write(f"""
Number of topics: {num_topics}
Number of subtopics: {num_subtopics}
Number of documents: {num_documents}
Avg. subtopics per topic: {avg_subtopics_per_topic}
Avg. documents per subtopic: {avg_documents_per_subtopic}
Avg. documents per topic: {avg_documents_per_topic}
Number of tokens: {num_tokens}
Number of types: {num_types}
Number of sentences: {num_sentences}
Number of participant mentions: {num_participant_mentions}
Number of time mentions: {num_time_mentions}
Number of location mentions: {num_location_mentions}
Number of action mentions: {num_action_mentions}
Number of stacked action mentions (identical span): {num_action_mentions_stacked}
Number of event clusters: {num_action_clusters}
Number of singleton event clusters: {num_action_singleton_clusters}
Largest clusters:
{tabulate(clusters_by_size.head(10).to_frame("num-mentions"), headers="keys")}
Cluster size distribution:
{tabulate(cluster_size_distribution.to_frame(), headers="keys")}
""")
if dataset.semantic_roles is not None:
f.write(f"""
Number of action mentions with linked arguments: {num_mentions_with_linked_args}
Number of action mentions with linked arguments of type:
{num_mentions_with_args_by_type}
Number of action mentions with linked arguments of type (relative):
{num_mentions_with_args_by_type_relative}
""")
# print some examples if that's desired
if self.print_examples:
# print sentences with stacked annotations
mentions_action = dataset.mentions_action.reset_index()
mentions_action_stacked = mentions_action.loc[mentions_action.duplicated([DOCUMENT_ID, SENTENCE_IDX, TOKEN_IDX_FROM, TOKEN_IDX_TO], keep=False)]
sentences_with_stacked_actions = mentions_action_stacked[[DOCUMENT_ID, SENTENCE_IDX]].drop_duplicates()
for _, row in sentences_with_stacked_actions.iterrows():
sentence = " ".join(dataset.tokens.loc[tuple(row), TOKEN])
print(f"{row.values} - {sentence}")
# for each event cluster, collect all sentences referencing that event
sentences_with_event_mentions = dataset.mentions_action.reset_index()[[DOCUMENT_ID, SENTENCE_IDX]].drop_duplicates()
sentences_with_event_mentions = sentences_with_event_mentions.merge(dataset.tokens.reset_index(), on=[DOCUMENT_ID, SENTENCE_IDX])
sentences_with_event_mentions = sentences_with_event_mentions.groupby([DOCUMENT_ID, SENTENCE_IDX])[TOKEN].apply(list)
mentions_action_with_sentences = dataset.mentions_action.reset_index().merge(sentences_with_event_mentions, on=[DOCUMENT_ID, SENTENCE_IDX])
mentions_action_with_sentences.to_json(self.stage_disk_location / "event_mentions_with_sentences.json", orient="records")
# print full documents with event actions highlighted
for idx, mention in dataset.mentions_action.iterrows():
doc_id, mention_id = idx
sent_idx = mention[SENTENCE_IDX]
token_idx_from = mention[TOKEN_IDX_FROM]
token_idx_to = mention[TOKEN_IDX_TO]
dataset.tokens.at[(doc_id, sent_idx, token_idx_from), TOKEN] = ">>>" + dataset.tokens.at[(doc_id, sent_idx, token_idx_from), TOKEN]
dataset.tokens.at[(doc_id, sent_idx, token_idx_to-1), TOKEN] = dataset.tokens.at[(doc_id, sent_idx, token_idx_to-1), TOKEN] + "<<<"
sentences = dataset.tokens[TOKEN].groupby([DOCUMENT_ID, SENTENCE_IDX]).apply(lambda l: " ".join(l.values))
documents = sentences.groupby(DOCUMENT_ID).apply(lambda l: "\n".join(l.values))
documents.to_csv(self.stage_disk_location / "documents_with_event_actions_marked.csv")
return dataset
component = StatisticsStage
|
11501380
|
import torch.nn as nn
from models.styleganv2.modules import EqualConv2d, EqualConv2dSame
from models.styleganv2.op import FusedLeakyReLU
from . import _utils as utils
class EncoderMixin:
"""Add encoder functionality such as:
- output channels specification of feature tensors (produced by encoder)
- patching first convolution for arbitrary input channels
"""
@property
def out_channels(self):
"""Return channels dimensions for each tensor of forward output of encoder"""
return self._out_channels[: self._depth + 1]
def set_in_channels(self, in_channels):
"""Change first convolution chennels"""
if in_channels == 3:
return
self._in_channels = in_channels
if self._out_channels[0] == 3:
self._out_channels = tuple([in_channels] + list(self._out_channels)[1:])
utils.patch_first_conv(model=self, in_channels=in_channels)
def get_stages(self):
"""Method should be overridden in encoder"""
raise NotImplementedError
def make_dilated(self, stage_list, dilation_list):
stages = self.get_stages()
for stage_indx, dilation_rate in zip(stage_list, dilation_list):
utils.replace_strides_with_dilation(
module=stages[stage_indx],
dilation_rate=dilation_rate,
)
class BasicBlockv2(nn.Module):
def __init__(self, inplanes, planes, stride=1, downsample=False, norm_layer=None, same=False):
super().__init__()
if norm_layer is None:
norm_layer = nn.BatchNorm2d
ConvLayer = EqualConv2d if not same else EqualConv2dSame
# Both self.conv1 and self.downsample layers downsample the input when stride != 1
self.conv1 = ConvLayer(inplanes, planes, 3, stride=stride, padding=1, bias=False)
self.bn1 = norm_layer(planes, affine=True)
self.relu = FusedLeakyReLU(planes)
self.conv2 = ConvLayer(planes, planes, 3, padding=1, bias=False)
self.bn2 = norm_layer(planes, affine=True)
self.stride = stride
if downsample:
conv_down = ConvLayer(inplanes, planes, 1, stride=2, bias=False)
norm_down = norm_layer(planes, affine=True)
self.downsample = nn.Sequential(conv_down, norm_down)
else:
self.downsample = None
def forward(self, x):
identity = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
if self.downsample is not None:
identity = self.downsample(x)
out += identity
out = self.relu(out)
return out
class ResNetv2Encoder(nn.Module, EncoderMixin):
def __init__(self, in_channels, ngf=64, norm_layer=None, same=False):
super().__init__()
self._out_channels = [in_channels, ngf]
self._out_channels += [ngf*2**i for i in range(4)]
self._depth = 5
if norm_layer is None:
norm_layer = nn.BatchNorm2d
ConvLayer = EqualConv2d if not same else EqualConv2dSame
self.conv1 = ConvLayer(in_channels, ngf, kernel_size=7, stride=2, padding=3, bias=False)
self.norm1 = norm_layer(ngf, affine=True)
self.relu = FusedLeakyReLU(ngf)
maxpool_layers = []
if same:
maxpool_layers.append(nn.ReplicationPad2d(1))
else:
maxpool_layers.append(nn.ZeroPad2d(1))
maxpool_layers.append(nn.MaxPool2d(kernel_size=3, stride=2))
self.maxpool = nn.Sequential(*maxpool_layers)
block1_1 = BasicBlockv2(ngf, ngf, stride=1, norm_layer=norm_layer, same=same)
block1_2 = BasicBlockv2(ngf, ngf, stride=1, norm_layer=norm_layer, same=same)
self.layer1 = nn.Sequential(block1_1, block1_2)
block2_1 = BasicBlockv2(ngf, ngf*2, stride=2, norm_layer=norm_layer, downsample=True, same=same)
block2_2 = BasicBlockv2(ngf*2, ngf*2, stride=1, norm_layer=norm_layer, same=same)
self.layer2 = nn.Sequential(block2_1, block2_2)
block3_1 = BasicBlockv2(ngf*2, ngf*4, stride=2, norm_layer=norm_layer, downsample=True, same=same)
block3_2 = BasicBlockv2(ngf*4, ngf*4, stride=1, norm_layer=norm_layer, same=same)
self.layer3 = nn.Sequential(block3_1, block3_2)
block4_1 = BasicBlockv2(ngf*4, ngf*8, stride=2, norm_layer=norm_layer, downsample=True, same=same)
block4_2 = BasicBlockv2(ngf*8, ngf*8, stride=1, norm_layer=norm_layer, same=same)
self.layer4 = nn.Sequential(block4_1, block4_2)
def get_stages(self):
return [
nn.Identity(),
nn.Sequential(self.conv1, self.norm1, self.relu),
nn.Sequential(self.maxpool, self.layer1),
self.layer2,
self.layer3,
self.layer4,
]
def forward(self, x):
stages = self.get_stages()
features = []
for i in range(self._depth + 1):
x = stages[i](x)
features.append(x)
return features
class ResNetv2EncoderCont(nn.Module, EncoderMixin):
def __init__(self, in_channels, add_channels=1, ngf=64, norm_layer=None, same=False):
super().__init__()
self._out_channels = [in_channels, ngf]
self._out_channels += [ngf*2**i for i in range(4)]
self._depth = 5
if norm_layer is None:
norm_layer = nn.BatchNorm2d
ConvLayer = EqualConv2d if not same else EqualConv2dSame
self.in_channels = in_channels
self.add_channels = add_channels
self.conv1 = ConvLayer(in_channels, ngf, kernel_size=7, stride=2, padding=3, bias=False)
self.conv1_add = ConvLayer(add_channels, ngf, kernel_size=7, stride=2, padding=3, bias=False)
self.norm1 = norm_layer(ngf, affine=True)
self.relu = FusedLeakyReLU(ngf)
maxpool_layers = []
if same:
maxpool_layers.append(nn.ReplicationPad2d(1))
else:
maxpool_layers.append(nn.ZeroPad2d(1))
maxpool_layers.append(nn.MaxPool2d(kernel_size=3, stride=2))
self.maxpool = nn.Sequential(*maxpool_layers)
block1_1 = BasicBlockv2(ngf, ngf, stride=1, norm_layer=norm_layer, same=same)
block1_2 = BasicBlockv2(ngf, ngf, stride=1, norm_layer=norm_layer, same=same)
self.layer1 = nn.Sequential(block1_1, block1_2)
block2_1 = BasicBlockv2(ngf, ngf*2, stride=2, norm_layer=norm_layer, downsample=True, same=same)
block2_2 = BasicBlockv2(ngf*2, ngf*2, stride=1, norm_layer=norm_layer, same=same)
self.layer2 = nn.Sequential(block2_1, block2_2)
block3_1 = BasicBlockv2(ngf*2, ngf*4, stride=2, norm_layer=norm_layer, downsample=True, same=same)
block3_2 = BasicBlockv2(ngf*4, ngf*4, stride=1, norm_layer=norm_layer, same=same)
self.layer3 = nn.Sequential(block3_1, block3_2)
block4_1 = BasicBlockv2(ngf*4, ngf*8, stride=2, norm_layer=norm_layer, downsample=True, same=same)
block4_2 = BasicBlockv2(ngf*8, ngf*8, stride=1, norm_layer=norm_layer, same=same)
self.layer4 = nn.Sequential(block4_1, block4_2)
def get_stages(self):
return [
nn.Identity(),
nn.Sequential(self.conv1, self.norm1, self.relu),
nn.Sequential(self.maxpool, self.layer1),
self.layer2,
self.layer3,
self.layer4,
]
def forward(self, x, alfa=1.):
stages = self.get_stages()
features = []
x_main = x[:, :self.in_channels]
x_add = x[:, self.in_channels:]
assert(x_add.shape[1] == self.add_channels)
features.append(x_main)
out_main = self.conv1(x_main)
out_add = self.conv1_add(x_add)
out = out_main + alfa*out_add
out = self.norm1(out)
out = self.relu(out)
features.append(out)
out = self.maxpool(out)
out = self.layer1(out)
features.append(out)
out = self.layer2(out)
features.append(out)
out = self.layer3(out)
features.append(out)
out = self.layer4(out)
features.append(out)
return features
|
11501388
|
import pytest
import uvicore
import sqlalchemy as sa
from uvicore.support.dumper import dump
# DB SQLAlchemy
@pytest.fixture(scope="module")
def Posts():
from app1.database.tables.posts import Posts
yield Posts
@pytest.fixture(scope="module")
def post(Posts):
yield Posts.table
@pytest.mark.asyncio
async def test_single(app1, Posts, post):
# Single NOT where
query = post.select().where(post.c.creator_id != 2)
posts = await uvicore.db.fetchall(query, connection='app1')
assert [1, 2, 6, 7] == [x.id for x in posts]
@pytest.mark.asyncio
async def test_and(app1, Posts, post):
# Multiple where NOT AND
query = post.select().where(post.c.creator_id != 2).where(post.c.owner_id != 2)
posts = await uvicore.db.fetchall(query)
assert [6, 7] == [x.id for x in posts]
@pytest.mark.asyncio
async def test_and2(app1, Posts, post):
# Multiple where NOT AND using multiple parameters on and_
query = post.select().where(sa.and_(post.c.creator_id != 2, post.c.owner_id != 2))
posts = await uvicore.db.fetchall(query)
assert [6, 7] == [x.id for x in posts]
@pytest.mark.asyncio
async def test_or(app1, Posts, post):
# Where NOT OR
query = post.select().where(sa.or_(post.c.creator_id != 1, post.c.owner_id != 2))
posts = await uvicore.db.fetchall(query)
assert [3, 4, 5, 6, 7] == [x.id for x in posts]
@pytest.mark.asyncio
async def test_and_or(app1, Posts, post):
# Where NOT AND with where OR
query = post.select().where(post.c.unique_slug != 'test-post5').where(sa.or_(post.c.creator_id != 1, post.c.owner_id != 2))
posts = await uvicore.db.fetchall(query)
assert [3, 4, 6, 7] == [x.id for x in posts]
|
11501391
|
from .img_service import ImgService
from ..config import Config
from ..user_exception import UserException
from ..qiniu_client import QiniuClient
class QiniuImgService(ImgService):
def upload(self, localImg: str) -> str:
"上传到七牛云"
clientInfo = self.getConfigInfo()
client = QiniuClient()
dnsDomain = clientInfo[Config.QINIU_INFO_DNS_DOMAIN]
accessKey = clientInfo[Config.QINIU_INFO_ACCESS_KEY]
secretKey = clientInfo[Config.QINIU_INFO_SECRET_KEY]
bucketName = clientInfo[Config.QINIU_INFO_BUCKET_NAME]
sysConfig = Config.getInstance()
try:
url = client.upload(localImg, dnsDomain, accessKey,
secretKey, bucketName)
except UserException as e:
sysConfig.writeErrorLog(e.getErrorMsg())
return False
return url
def getConfigInfo(self) -> dict:
"""获取七牛云的详细配置"""
sysConfig = Config.getInstance()
info = sysConfig.getConfigParam(Config.PARAM_QINIU_INFO)
if info == '':
raise UserException(UserException.CODE_NO_IMG_SERVICE_CONFIG)
return info
def inputConfig(self) -> None:
qiniuInfo = {}
print("{}{}".format(self.globalization.getText("qiniu_info_required"),
self.globalization.getText("colon")))
qiniuInfo[Config.QINIU_INFO_ACCESS_KEY] = input("{}{}".format(self.globalization.getText("qiniu_access_key_input"),
self.globalization.getText("colon")))
qiniuInfo[Config.QINIU_INFO_SECRET_KEY] = input("{}{}".format(self.globalization.getText("qiniu_secret_key_input"),
self.globalization.getText("colon")))
qiniuInfo[Config.QINIU_INFO_DNS_DOMAIN] = input("{}{}".format(self.globalization.getText("qiniu_dns_domain_input"),
self.globalization.getText("colon")))
qiniuInfo[Config.QINIU_INFO_BUCKET_NAME] = input("{}{}".format(self.globalization.getText("qiniu_bucket_name_input"),
self.globalization.getText("colon")))
sysConfig = Config.getInstance()
sysConfig.setConfigParam(Config.PARAM_QINIU_INFO, qiniuInfo)
sysConfig.writeMainConfig()
print(self.globalization.getText("qiniu_info_saved"))
def getConfigInfoText(self) -> tuple:
lines = list()
qiniuInfo = self.getConfigInfo()
lines.append("\t{}{}{}".format(self.globalization.getText("qiniu_access_key"),
self.globalization.getText("colon"), qiniuInfo[Config.QINIU_INFO_ACCESS_KEY]))
lines.append("\t{}{}{}".format(self.globalization.getText("qiniu_secret_key"),
self.globalization.getText("colon"), qiniuInfo[Config.QINIU_INFO_SECRET_KEY]))
lines.append("\t{}{}{}".format(self.globalization.getText("qiniu_dns_domain"),
self.globalization.getText("colon"), qiniuInfo[Config.QINIU_INFO_DNS_DOMAIN]))
lines.append("\t{}{}{}".format(self.globalization.getText("qiniu_bucket_name"),
self.globalization.getText("colon"), qiniuInfo[Config.QINIU_INFO_BUCKET_NAME]))
return tuple(lines)
def inputNewConfig(self) -> None:
sysConfig = Config.getInstance()
qiniuInfo = {}
qiniuInfo[Config.QINIU_INFO_ACCESS_KEY] = input("{}{}".format(self.globalization.getText(
"qiniu_new_access_key_input"), self.globalization.getText("colon")))
qiniuInfo[Config.QINIU_INFO_SECRET_KEY] = input("{}{}".format(self.globalization.getText(
"qiniu_new_secret_key_input"), self.globalization.getText("colon")))
qiniuInfo[Config.QINIU_INFO_DNS_DOMAIN] = input("{}{}".format(self.globalization.getText(
"qiniu_new_dns_domain_input"), self.globalization.getText("colon")))
qiniuInfo[Config.QINIU_INFO_BUCKET_NAME] = input("{}{}".format(self.globalization.getText(
"qiniu_new_bucket_name_input"), self.globalization.getText("colon")))
sysConfig.setConfigParam(Config.PARAM_QINIU_INFO, qiniuInfo)
sysConfig.writeMainConfig()
print(self.globalization.getText("token_changed_successfully"))
|
11501392
|
from decimal import Decimal
from django.db import models
from django.utils import timezone
from consistency_model import consistency_validator, consistency_error
class Order(models.Model):
"""
total - the amount that initially was charged
refund - the amount was later refunded
revenue - the total revenue from that order
"""
created_on = models.DateTimeField(default=timezone.now)
total = models.DecimalField(
default=Decimal("0.00"), decimal_places=2, max_digits=10
)
refund = models.DecimalField(
default=Decimal("0.00"), decimal_places=2, max_digits=10
)
revenue = models.DecimalField(
default=Decimal("0.00"), decimal_places=2, max_digits=10
)
@consistency_validator
def validate_total(self):
assert self.total >= 0, "can't be negative"
@consistency_validator
def validate_revenue(self):
if self.revenue < 0:
consistency_error("can't be negative", "negative")
if self.revenue != self.total - self.refund:
consistency_error("revenue = total - refund", "formula")
class OrderItem(models.Model):
order = models.ForeignKey(Order, on_delete=models.CASCADE)
name = models.CharField(max_length=10)
price = models.DecimalField(decimal_places=2, max_digits=10)
@consistency_validator
def validate_price(self):
assert self.total >= 0, "can't be negative"
|
11501482
|
import cauldron as cd
from cauldron import plotting
import plotly.graph_objs as go
df = cd.shared.df
cd.display.markdown(
"""
## Plot Data
Now we're going to plot the loaded data to get an
idea for what it looks like. For this notebook,
we're going to use the Plotly plotting library.
"""
)
cd.display.plotly(
data=go.Scatter(
x=df['Year'],
y=100.0 * df['Female'] / df['Total'],
mode='lines+markers'
),
layout=plotting.create_layout(
title='Female Time Covers',
y_label='Percentage each Year (%)',
x_label='Year'
)
)
cd.display.markdown(
"""
Immediately apparent from this plot is that the
data has high-frequency variations. We want to get
a better sense of the trend. To do that we'll use
a running-window smoothing operator that looks like:
$$$
X_i = @frac{1}{2N + 1} @sum_{@delta=-N}^N x_{@delta}
$$$
where $$N$$ is the window size.
"""
)
|
11501509
|
from django.db import migrations, models
from corehq.apps.domain.models import Domain
from corehq.util.django_migrations import skip_on_fresh_install
@skip_on_fresh_install
def _disable_ga(apps, schema_editor):
for domain in Domain.get_all():
if domain.hipaa_compliant:
domain.ga_opt_out = True
domain.save()
class Migration(migrations.Migration):
dependencies = [
('domain', '0004_domainauditrecordentry'),
]
operations = [
migrations.RunPython(_disable_ga, migrations.RunPython.noop)
]
|
11501514
|
import logging
import time
from typing import Any, Dict, List
import httpx
from django.contrib.auth import get_user_model
from django.http import HttpRequest
from django.urls import resolve
from . import settings as app_settings
from .utils import get_client_ip, get_user_agent
try:
from django.contrib.gis.geoip2 import GeoIP2 # type: ignore
except ImportError:
CAN_GEOIP = False
else: # pragma: no cover
CAN_GEOIP = True
log = logging.getLogger(__name__)
class AmplitudeException(Exception):
pass
class Amplitude():
def __init__(
self,
api_key: str = None,
include_user_data: bool = None,
include_group_data: bool = None,
min_id_length: int = None,
):
if not api_key:
api_key = app_settings.API_KEY
if include_user_data is None:
include_user_data = app_settings.INCLUDE_USER_DATA
if include_group_data is None:
include_group_data = app_settings.INCLUDE_GROUP_DATA
if not min_id_length:
min_id_length = app_settings.MIN_ID_LENGTH
self.url = 'https://api.amplitude.com/2/httpapi'
self.api_key = api_key
self.include_user_data = include_user_data
self.include_group_data = include_group_data
self.min_id_length = min_id_length
def send_events(self, events: List[Dict[str, Any]]) -> dict:
"""
https://developers.amplitude.com/docs/http-api-v2
"""
events = [self.clean_event(event) for event in events]
kwargs: Dict[str, Any] = {
'url': self.url,
'method': 'POST',
'json': {
'events': events,
'api_key': self.api_key
}
}
if self.min_id_length is not None:
kwargs['json']['options'] = {
'min_id_length': self.min_id_length
}
response = httpx.request(**kwargs)
try:
response.raise_for_status()
except httpx.HTTPError as e:
raise AmplitudeException(e)
return response.json()
def clean_event(self, event: dict) -> dict:
for key, value in event.items():
if isinstance(value, dict):
event[key] = {k: v for k, v in value.items() if v not in [None, [], '', {}]} # NOQA: E501
event = {k: v for k, v in event.items() if v not in [None, [], '', {}]}
return event
def build_event_data(
self, event_type: str,
request: HttpRequest,
event_properties: dict = {},
**kwargs
) -> dict:
"""
Build event data using a Django request object
"""
event: Dict[str, Any] = {
'device_id': request.session.get('amplitude_device_id'),
'session_id': request.session.get('amplitude_session_id'),
'event_type': event_type,
'time': int(round(time.time() * 1000)),
'ip': get_client_ip(request),
'language': getattr(request, 'LANGUAGE_CODE', ''),
'app_version': kwargs.get('app_version'),
'carrier': kwargs.get('carrier'),
'dma': kwargs.get('dma'),
'price': kwargs.get('price'),
'quantity': kwargs.get('quantity'),
'revenue': kwargs.get('revenue'),
'productId': kwargs.get('productId'),
'revenueType': kwargs.get('revenueType'),
'idfa': kwargs.get('idfa'),
'idfv': kwargs.get('idfv'),
'adid': kwargs.get('adid'),
'android_id': kwargs.get('android_id'),
'event_id': kwargs.get('event_id'),
'insert_id': kwargs.get('insert_id'),
}
if event_properties:
event['event_properties'] = event_properties
else:
event['event_properties'] = self.event_properties_from_request(request) # NOQA: E501
try:
event['user_id'] = f'{request.user.pk:05}'
except (AttributeError, TypeError):
pass
event['user_properties'] = self.user_properties_from_request(request)
event['groups'] = self.group_from_request(request)
device_data = self.device_data_from_request(request)
event.update(device_data)
location_data = self.location_data_from_ip_address(event['ip'])
event.update(location_data)
return event
def event_properties_from_request(self, request: HttpRequest) -> dict:
url_name = resolve(request.path_info).url_name
event_properties = {
'url': request.path,
'url_name': url_name,
'method': request.method,
'params': dict(request.GET),
'scheme': request.scheme,
'content_type': request.content_type,
'content_params': request.content_params,
'content_length': request.META.get('CONTENT_LENGTH'),
'http_accept': request.META.get('HTTP_ACCEPT'),
'http_accept_encoding': request.META.get('HTTP_ACCEPT_ENCODING'),
'http_accept_language': request.META.get('HTTP_ACCEPT_LANGUAGE'),
'http_host': request.META.get('HTTP_HOST'),
'referer': request.META.get('HTTP_REFERER'),
'server_name': request.META.get('SERVER_NAME'),
'server_port': request.META.get('SERVER_PORT'),
}
if request.resolver_match:
event_properties['kwargs'] = request.resolver_match.kwargs
return event_properties
def user_properties_from_request(self, request: HttpRequest) -> dict:
try:
request.user.is_authenticated
except AttributeError:
return {}
if not self.include_user_data or not request.user.is_authenticated:
return {}
User = get_user_model()
user = User.objects.get(pk=request.user.pk)
user_data = {
'username': user.get_username(),
'email': user.email,
'full_name': user.get_full_name(),
'is_staff': user.is_staff,
'is_superuser': user.is_superuser,
}
if user.last_login:
user_data['last_login'] = user.last_login.isoformat()
if user.date_joined:
user_data['date_joined'] = user.date_joined.isoformat()
return user_data
def group_from_request(self, request: HttpRequest) -> list:
try:
request.user.is_authenticated
except AttributeError:
return []
if not self.include_group_data or not request.user.is_authenticated:
return []
User = get_user_model()
user = User.objects.get(pk=request.user.pk)
groups = user.groups.all().values_list('name', flat=True)
return list(groups)
def location_data_from_ip_address(self, ip_address: str) -> dict:
location_data: dict = {}
if not ip_address or not CAN_GEOIP:
return location_data
# pip install geoip2
# https://pypi.org/project/geoip2/
# from django.contrib.gis.geoip2 import GeoIP2
g = GeoIP2()
location = g.city(ip_address)
location_data['country'] = location['country_name']
location_data['city'] = location['city']
lat_lon = g.lat_lon(ip_address)
location_data['location_lat'] = lat_lon[0]
location_data['location_lng'] = lat_lon[1]
return location_data
def device_data_from_request(self, request: HttpRequest) -> dict:
device_data: dict = {}
user_agent = get_user_agent(request)
if not user_agent:
return device_data
device_data['os_name'] = user_agent.os.family
device_data['os_version'] = user_agent.os.version_string
device_data['platform'] = user_agent.device.family
# device_data['device_brand']
device_data['device_manufacturer'] = user_agent.device.brand
device_data['device_model'] = user_agent.device.model
return device_data
|
11501674
|
from sevenbridges.meta.fields import HrefField, StringField
from sevenbridges.meta.resource import Resource
class Division(Resource):
"""
Central resource for managing divisions.
"""
_URL = {
'query': '/divisions',
'get': '/divisions/{id}',
}
href = HrefField(read_only=True)
id = StringField(read_only=True)
name = StringField(read_only=True)
def __str__(self):
return f'<Division: id={self.id}>'
def __eq__(self, other):
if type(other) is not type(self):
return False
return self is other or self.id == other.id
@classmethod
def query(cls, offset=None, limit=None, api=None):
"""
Query (List) divisions.
:param offset: Pagination offset.
:param limit: Pagination limit.
:param api: Api instance.
:return: Collection object.
"""
api = api if api else cls._API
return super()._query(
url=cls._URL['query'], offset=offset, limit=limit,
fields='_all', api=api
)
def get_teams(self, offset=None, limit=None):
return self._api.teams.query(
division=self.id, offset=offset, limit=limit
)
def get_members(self, role=None, offset=None, limit=None):
return self._api.users.query(self, role=role, offset=offset,
limit=limit)
|
11501684
|
import os
import PureCloudPlatformClientV2
from PureCloudPlatformClientV2.rest import ApiException
print('-----------------------------------------------------------------')
print('- Python3 Get Number of On-Queue Agents using Genesys Cloud SDK -')
print('-----------------------------------------------------------------')
# Credentials
CLIENT_ID = os.environ['GENESYS_CLOUD_CLIENT_ID']
CLIENT_SECRET = os.environ['GENESYS_CLOUD_CLIENT_SECRET']
ORG_REGION = os.environ['GENESYS_CLOUD_REGION'] # eg. us_east_1
# Set environment
region = PureCloudPlatformClientV2.PureCloudRegionHosts[ORG_REGION]
PureCloudPlatformClientV2.configuration.host = region.get_api_host()
# OAuth when using Client Credentials
api_client = PureCloudPlatformClientV2.api_client.ApiClient() \
.get_client_credentials_token(CLIENT_ID, CLIENT_SECRET)
# Create an instance of the Routing API and Analytics API
routing_api = PureCloudPlatformClientV2.RoutingApi(api_client)
analytics_api = PureCloudPlatformClientV2.AnalyticsApi(api_client)
def get_on_queue_agents(queue_name):
""" Get number of agents active on a queue given the name of the queue.
Args:
queue_name (str): Name of the Queue.
Returns:
int: Number of agents 'on-queue'.
"""
queue_id = 0
on_queue_agents = 0
# Search for the routing id of the queue
try:
api_response = routing_api.get_routing_queues(name=queue_name)
# Check for the number of entities returned
if len(api_response.entities) <= 0:
print("Queue not found.")
return -1
elif len(api_response.entities) > 1:
print("Found more than one queue with the name. Getting the first one.")
else:
# Get the id of the queue
queue_id = api_response.entities[0].id
except ApiException as e:
print("Error on RoutingAPI -> " + str(e))
# Count the 'on-queue' agents on the queue.
try:
# Build analytics query
query = PureCloudPlatformClientV2.QueueObservationQuery()
query.metrics = ['oOnQueueUsers']
query.filter = PureCloudPlatformClientV2.ConversationAggregateQueryFilter()
query.filter.type = 'or'
query.filter.clauses = [PureCloudPlatformClientV2.ConversationAggregateQueryClause()]
query.filter.clauses[0].type = 'or'
query.filter.clauses[0].predicates = [PureCloudPlatformClientV2.ConversationAggregateQueryPredicate()]
query.filter.clauses[0].predicates[0].dimension = 'queueId'
query.filter.clauses[0].predicates[0].value = queue_id
# Execute analytics query
query_result = analytics_api.post_analytics_queues_observations_query(query)
result_data = query_result.results[0].data
on_queue_agents = result_data[0].stats.count if result_data else 0
except ApiException as e:
print("Error on RoutingAPI -> " + e.body)
return on_queue_agents
if __name__ == "__main__":
queue_name = input("Enter queue name: ")
on_queue_agents = get_on_queue_agents(queue_name)
print(f"Number of agents in \"{queue_name}\": {on_queue_agents}")
|
11501698
|
import os
import requests
import pickle
from git import Repo
from datetime import datetime
from jinja2 import Template,Environment, FileSystemLoader
dir_path = os.path.dirname(os.path.realpath(__file__))
def fetchjson(urlstr):
#Fetch results
response = requests.get(url=urlstr)
return response.json()
#load cache
try:
with open(os.path.join(dir_path,'cache.pickle'), "rb") as input_file:
cache = pickle.load(input_file)
except (EnvironmentError,EOFError):
cache = {}
cache['last_run'] = datetime(2000,1,1).strftime('%Y-%m-%dT%H:%M:%SZ')
last_run = datetime.strptime(cache['last_run'],'%Y-%m-%dT%H:%M:%SZ')
# import pprint
# pprint.pprint(cache)
# exit()
i = 0
for repo in fetchjson('https://api.github.com/search/repositories?q=scoop+buckets&per_page=500')['items']:
name = repo['name']
repofoldername = repo['full_name'].replace('/','+')
git_clone_url = repo['git_url']
html_url = repo['html_url']
repo_score = repo['score']
last_updated = datetime.strptime(repo['updated_at'],'%Y-%m-%dT%H:%M:%SZ')
if(not repofoldername in cache):
#Delete folder if exists
#clone repo to cache folder
i += 1
Repo.clone_from(git_clone_url, os.path.join(dir_path,'cache',repofoldername))
cache[repofoldername] = {'name':name,'url':html_url,'score':float(repo_score),'entries':[]}
elif repofoldername in cache and (last_updated > last_run ):
i += 1
repo = Repo(os.path.join(dir_path, 'cache', repofoldername))
o = repo.remotes.origin
o.pull()
if(not os.path.isdir(os.path.join(dir_path,'cache',repofoldername))):
continue
cache[repofoldername]['entries'] = []
for f in os.listdir(os.path.join(dir_path,'cache',repofoldername)):
file_path = os.path.join(dir_path,'cache',repofoldername,f)
if(os.path.isfile(file_path) and os.path.splitext(file_path)[1] == '.json'):
cache[repofoldername]['entries'].append(os.path.splitext(f)[0])
#update last run
cache['last_run'] = datetime.strftime(datetime.now().replace(hour=0, minute=0, second=0),'%Y-%m-%dT%H:%M:%SZ')
try:
with open(os.path.join(dir_path,'cache.pickle'), "wb") as input_file:
pickle.dump(cache,input_file)
except EnvironmentError:
pass
print(i,' repos updated')
#Sort Repos by github score
repos = [repo for repo in cache.keys()]
actual_repos = [ repo for repo in repos if (repo != 'last_run' and len(cache[repo]['entries']) > 0) ]
sorted(actual_repos, key=lambda repo:cache[repo]['score'])
print(str(len(actual_repos)) + 'valid repositories found.')
#Update Readme file
TEMPLATE_ENVIRONMENT = Environment(
autoescape=False,
loader=FileSystemLoader(os.path.join(dir_path, 'template')),
trim_blocks=False)
context = {
'sortedrepos':actual_repos,
'cache': cache
}
markdown_content = TEMPLATE_ENVIRONMENT.get_template('ReadmeTemplate.tpl').render(context)
with open(os.path.join(dir_path,'..','README.md'), "w") as readme_file:
readme_file.write(markdown_content)
print('[INFO] Script Finished...')
|
11501710
|
class Solution:
def kthGrammar(self, N: int, K: int) -> int:
return N > 1 and self.kthGrammar(N - 1, (K + 1) // 2) ^ ((K -1) % 2) or 0
|
11501734
|
from __future__ import annotations
from abc import ABCMeta, abstractmethod
from pathlib import Path
from typing import Dict, Optional
import pandas
import pyarrow
import pyarrow.parquet as pq
from nuplan.planning.metrics.metric_dataframe import MetricStatisticsDataFrame
class AbstractMetricAggregator(metaclass=ABCMeta):
"""Interface for metric aggregator"""
@property
@abstractmethod
def name(self) -> str:
"""
Returns the metric aggregator name
:return the metric aggregator name.
"""
pass
@property
@abstractmethod
def final_metric_score(self) -> Optional[float]:
"""Returns the final metric score."""
pass
@abstractmethod
def __call__(self, metric_dataframes: Dict[str, MetricStatisticsDataFrame]) -> None:
"""
Run an aggregator to generate an aggregated parquet file
:param metric_dataframes: A dictionary of metric name and dataframe.
"""
pass
@staticmethod
def _save_with_metadata(dataframe: pandas.DataFrame, save_path: Path, metadata: Dict[str, str]) -> None:
"""
Save to a parquet file with additional metadata using pyarrow
:param dataframe: Pandas dataframe
:param save_path: Path to save the dataframe.
"""
pyarrow_table = pyarrow.Table.from_pandas(df=dataframe)
schema_metadata = pyarrow_table.schema.metadata
schema_metadata.update(metadata)
updated_schema = pyarrow_table.schema.with_metadata(schema_metadata)
pyarrow_table = pyarrow_table.cast(updated_schema)
pq.write_table(pyarrow_table, str(save_path))
@staticmethod
def _save_parquet(dataframe: pandas.DataFrame, save_path: Path) -> None:
"""
Save dataframe to a parquet file
:param dataframe: Pandas dataframe
:param save_path: Path to save the dataframe.
"""
dataframe.to_parquet(str(save_path))
@abstractmethod
def read_parquet(self) -> None:
"""Read a parquet file, and update the dataframe."""
pass
|
11501744
|
from __future__ import print_function
"""
.. module:: grammaregex
:platform: Unix, Windows, Linux
:synopsis: A useful module for processing sentences(in tree form) by grammar patterns.
.. moduleauthor:: <NAME> <<EMAIL>>
"""
import re
class PatternSyntaxException(Exception):
"""Exception class for raising wrong structure of patterns"""
def __init__(self, pattern):
self.pattern = pattern
def __str__(self):
return repr("Error in syntax of provided pattern (%s)" % self.pattern)
def _match_token(t, p, isEdge):
p = p.strip()
if p[0] == "!":
return not _match_token(t, p[1:], isEdge)
elif p[0] == "[":
return any(_match_token(t, _p, isEdge) for _p in p[1:-1].split(","))
elif p == "*" or p == "**":
return True
elif isEdge:
return p == t.dep_
else:
return p == t.tag_ or p == t.pos_ or p == t.ent_type_ or p == t.lemma_
def verify_pattern(pattern):
"""Verifies if pattern for matching and finding fulfill expected structure.
:param pattern: string pattern to verify
:return: True if pattern has proper syntax, False otherwise
"""
regex = re.compile("^!?[a-zA-Z]+$|[*]{1,2}$")
def __verify_pattern__(__pattern__):
if not __pattern__:
return False
elif __pattern__[0] == "!":
return __verify_pattern__(__pattern__[1:])
elif __pattern__[0] == "[" and __pattern__[-1] == "]":
return all(__verify_pattern__(p) for p in __pattern__[1:-1].split(","))
else:
return regex.match(__pattern__)
return all(__verify_pattern__(p) for p in pattern.split("/"))
def print_tree(sent, token_attr):
"""Prints sentences tree as string using token_attr from token(like pos_, tag_ etc.)
:param sent: sentence to print
:param token_attr: choosen attr to present for tokens(e.g. dep_, pos_, tag_, ...)
"""
def __print_sent__(token, attr):
print("{", end=" ")
[__print_sent__(t, attr) for t in token.lefts]
print(u"%s->%s(%s)" % (token,token.dep_,token.tag_ if not attr else getattr(token, attr)), end="")
[__print_sent__(t, attr) for t in token.rights]
print("}", end=" ")
return __print_sent__(sent.root, token_attr)
def match_tree(sentence, pattern):
"""Matches given sentence with provided pattern.
:param sentence: sentence from Spacy(see: http://spacy.io/docs/#doc-spans-sents) representing complete statement
:param pattern: pattern to which sentence will be compared
:return: True if sentence match to pattern, False otherwise
:raises: PatternSyntaxException: if pattern has wrong syntax
"""
if not verify_pattern(pattern):
raise PatternSyntaxException(pattern)
def _match_node(t, p):
pat_node = p.pop(0) if p else ""
return not pat_node or (_match_token(t, pat_node, False) and _match_edge(t.children,p))
def _match_edge(edges,p):
pat_edge = p.pop(0) if p else ""
if not pat_edge:
return True
elif not edges:
return False
else:
for (t) in edges:
if (_match_token(t, pat_edge, True)) and _match_node(t, list(p)):
return True
elif pat_edge == "**" and _match_edge(t.children, ["**"] + p):
return True
return False
return _match_node(sentence.root, pattern.split("/"))
def find_tokens(sentence, pattern):
"""Find all tokens from parts of sentence fitted to pattern, being on the end of matched sub-tree(of sentence)
:param sentence: sentence from Spacy(see: http://spacy.io/docs/#doc-spans-sents) representing complete statement
:param pattern: pattern to which sentence will be compared
:return: Spacy tokens(see: http://spacy.io/docs/#token) found at the end of pattern if whole pattern match
:raises: PatternSyntaxException: if pattern has wrong syntax
"""
if not verify_pattern(pattern):
raise PatternSyntaxException(pattern)
def _match_node(t, p, tokens):
pat_node = p.pop(0) if p else ""
res = not pat_node or (_match_token(t, pat_node, False) and (not p or _match_edge(t.children, p, tokens)))
if res and not p:
tokens.append(t)
return res
def _match_edge(edges,p, tokens):
pat_edge = p.pop(0) if p else ""
if pat_edge:
for (t) in edges:
if _match_token(t, pat_edge, True):
_match_node(t, list(p), tokens)
if pat_edge == "**":
_match_edge(t.children, ["**"] + p, tokens)
result_tokens = []
_match_node(sentence.root, pattern.split("/"), result_tokens)
return result_tokens
|
11501768
|
import unittest
from index_power import index_power
class Tests(unittest.TestCase):
TESTS = {
"Basics": [
{"input": ([1, 2, 3, 4], 2), "answer": 9},
{"input": ([1, 3, 10, 100], 3), "answer": 1_000_000},
{"input": ([0, 1], 0), "answer": 1},
{"input": ([1, 2], 3), "answer": -1},
],
"Extra": [
{"input": ([0], 0), "answer": 1},
{"input": ([1, 1, 1, 1, 1, 1, 1, 1, 1, 1], 9), "answer": 1},
{
"input": ([1, 1, 1, 1, 1, 1, 1, 1, 1, 100], 9),
"answer": 1_000_000_000_000_000_000,
},
{"input": ([29, 82, 45, 10], 3), "answer": 1000},
{"input": ([6, 31], 3), "answer": -1},
{"input": ([75, 68, 35, 61, 9, 36, 89, 0, 30], 10), "answer": -1},
{"input": ([29, 44, 50, 92, 56, 86], 2), "answer": 2500},
{"input": ([86, 41, 89, 53, 16, 15, 31, 63, 40], 6), "answer": 887_503_681},
{
"input": ([73, 26, 11, 3, 74, 94, 10, 10, 81, 63], 4),
"answer": 29_986_576,
},
{"input": ([96, 92, 94], 3), "answer": -1},
{"input": ([42, 69, 86, 55, 30, 35, 28, 84, 61, 40], 17), "answer": -1},
{"input": ([7, 36, 82, 38, 50, 47, 62, 44], 6), "answer": 56_800_235_584},
{
"input": ([68, 81, 3, 10, 96, 67, 55, 83, 63, 11], 9),
"answer": 2_357_947_691,
},
{"input": ([47, 77, 80, 48, 40, 21, 65], 1), "answer": 77},
{"input": ([28, 30, 48, 89, 31, 66], 4), "answer": 923_521},
{"input": ([71, 53, 51, 75, 16, 33, 88, 5], 3), "answer": 421_875},
{"input": ([74, 40, 3, 90, 17, 62, 14], 0), "answer": 1},
{"input": ([23, 61, 56, 93], 0), "answer": 1},
{"input": ([31, 53, 11, 79, 3, 95, 40, 2], 4), "answer": 81},
{"input": ([43, 61, 8, 12, 31, 10, 34, 52], 5), "answer": 100_000},
{"input": ([32, 25, 93, 1], 2), "answer": 8649},
{"input": ([2, 56, 73, 54, 88], 4), "answer": 59_969_536},
{
"input": ([65, 18, 93, 94, 36, 21, 65, 95, 30, 43], 6),
"answer": 75_418_890_625,
},
{
"input": ([79, 70, 88, 19, 12, 92, 27, 52, 48], 5),
"answer": 6_590_815_232,
},
{"input": ([72, 3, 8, 25, 15, 16], 1), "answer": 3},
],
}
def test_Basics(self):
for i in self.TESTS['Basics']:
assert index_power(*i['input']) == i['answer'], i['input']
def test_Extra(self):
for i in self.TESTS['Extra']:
assert index_power(*i['input']) == i['answer'], i['input']
if __name__ == "__main__": # pragma: no cover
unittest.main()
|
11501770
|
import discord
from discord.ext import commands
import os
TOKEN = os.environ['TOKEN']
intents = discord.Intents.all() #need to enable
bot = commands.Bot(command_prefix='~', intents=intents)
for foldername in os.listdir('./cogs'): #for every folder in cogs
for filename in os.listdir(foldername):# for every file in a folder in cogs
if filename.endswith('.py') and not filename in ['util.py', 'error.py']: #if the file is a python file and if the file is a cog
bot.load_extension(f'cogs.{foldername}.{filename[:-3]}')#load the extension
bot.run(TOKEN)
|
11501778
|
import os
from contentful_management import Client
PLAYGROUND_KEY = os.environ.get('CF_TEST_CMA_TOKEN', 'foobar')
PLAYGROUND_SPACE = 'facgnwwgj5fe'
PLAYGROUND_ORG = 'some_org'
CLIENT = Client(PLAYGROUND_KEY, gzip_encoded=False)
|
11501785
|
import os
# Workaround for PyTorch spawning too many threads
os.environ['OMP_NUM_THREADS'] = '4'
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
import numpy as np
import argparse
import pathlib
import math
from rendering.parallel_rasterizer import ParallelKaolinRasterizer
from rendering.mesh_template import MeshTemplate
from utils.losses import loss_flat, LaplacianLoss
from rendering.utils import qrot, qmul
from data.definitions import class_names
from skimage.segmentation import flood_fill
parser = argparse.ArgumentParser()
parser.add_argument('--mesh_path', type=str, default='autodetect', help='path to initial mesh template')
parser.add_argument('--image_resolution', type=int, default=256)
parser.add_argument('--symmetric', type=bool, default=True)
parser.add_argument('--gpu_ids', type=str, default='0', help='comma-separated')
parser.add_argument('--mode', type=str, required=True, help='single or multiple templates (singletpl|multitpl)')
parser.add_argument('--classes', type=str, default='all', help='all (default), or comma-separated list')
args = parser.parse_args()
gpu_ids = [int(x) for x in args.gpu_ids.split(',')]
print('Using {} GPUs: {}'.format(len(gpu_ids), gpu_ids))
torch.cuda.set_device(min(gpu_ids))
assert args.mode in ['multitpl', 'singletpl']
multi_template = args.mode == 'multitpl'
if args.mesh_path == 'autodetect':
args.mesh_path = 'mesh_templates/uvsphere_31rings.obj'
print('Using initial mesh topology', args.mesh_path)
def render_views(mesh, raw_vtx, rot, hardmask=False, closure=None, **kwargs):
assert raw_vtx.shape[0] == rot.shape[0]
assert len(raw_vtx.shape) == 3
assert len(rot.shape) == 3
assert raw_vtx.shape[-1] == 3
assert rot.shape[-1] == 4
bs = rot.shape[0]
num_views = rot.shape[1]
rot = rot.expand(raw_vtx.shape[0], -1, -1).flatten(0, 1)
raw_vtx = raw_vtx.unsqueeze(1).expand(-1, num_views, -1, -1).flatten(0, 1)
vtx = qrot(rot, raw_vtx) / math.sqrt(2)
vtx = vtx * torch.Tensor([1, -1, -1]).to(vtx.device)
tex = None
ret = mesh.forward_renderer(renderer, vtx, tex, return_hardmask=hardmask, num_gpus=len(gpu_ids),
closure=closure, **kwargs)
if closure is None:
pred_rgb, pred_alpha = ret
pred_alpha = pred_alpha.view(bs, num_views, *pred_alpha.shape[1:])
return pred_alpha
else:
return ret
renderer = nn.DataParallel(ParallelKaolinRasterizer(args.image_resolution, mode='alpha'), gpu_ids)
if args.classes == 'all':
selected_classes = class_names
else:
selected_classes = args.classes.split(',')
for cl in selected_classes:
assert cl in class_names, f'Invalid class {cl}'
classes = {}
class_is_aligned = {}
for cl in selected_classes:
classes[cl] = []
class_is_aligned[cl] = False
# The mesh templates of animals are already pre-aligned, there is no need to find optimal alignment
aligned_classes = ['bird', 'sheep', 'elephant', 'zebra', 'horse', 'cow', 'bear', 'giraffe']
for cl in aligned_classes:
if cl in class_is_aligned:
class_is_aligned[cl] = True
# Load mesh templates for each class
for cl in classes.keys():
for suf in range(1, 100):
fname = f'mesh_templates/classes/{cl}{suf}.obj'
if os.path.isfile(fname):
classes[cl].append(fname)
if not multi_template:
# Load only first template
break
else:
break
# Print summary
print('----------- Summary of selected classes -----------')
for k, v in classes.items():
print(f'{k}: loaded {len(v)} template(s)')
print('---------------------------------------------------')
output_dir = f'cache/remeshed_templates/{args.mode}'
pathlib.Path(output_dir).mkdir(parents=True, exist_ok=True)
print('Multi-template setting:', multi_template)
print('Output dir:', output_dir)
for selected_class in classes.keys():
print('Processing', selected_class)
mesh_paths = classes[selected_class]
torch.manual_seed(1)
target = []
initial_scaling = None
num_views = 64
is_aligned = class_is_aligned[selected_class]
target_rots = F.normalize(torch.randn(len(mesh_paths), num_views, 4), dim=-1).cuda() # Random viewpoints
templates = []
bbox_min = None
bbox_max = None
for i, mesh_path in enumerate(mesh_paths):
source_mesh = MeshTemplate(mesh_path, is_symmetric=False)
templates.append(source_mesh)
vertices = source_mesh.mesh.vertices
with torch.no_grad():
bbox_lower = vertices.min(dim=0, keepdim=True)[0]
bbox_higher = vertices.max(dim=0, keepdim=True)[0]
if i == 0:
bbox_min = bbox_lower
bbox_max = bbox_higher
else:
bbox_min = torch.min(bbox_lower, bbox_min)
bbox_max = torch.max(bbox_lower, bbox_max)
for i, source_mesh in enumerate(templates):
# Add backfaces
source_mesh.mesh.faces = torch.cat((source_mesh.mesh.faces, source_mesh.mesh.faces[..., [2, 1, 0]]), dim=0)
# Dummy UVs
source_mesh.mesh.face_textures = torch.zeros_like(source_mesh.mesh.faces)
source_mesh.mesh.uvs = torch.zeros((1, 2), device=source_mesh.mesh.face_textures.device)
# Normalize source mesh
def normalize_vertices(vertices):
with torch.no_grad():
if is_aligned:
bbox_lower = bbox_min
bbox_higher = bbox_max
else:
bbox_lower = vertices.min(dim=0, keepdim=True)[0]
bbox_higher = vertices.max(dim=0, keepdim=True)[0]
center = (bbox_lower + bbox_higher)/2
center[..., 0] = 0 # No left-right shift
vertices -= center
vertices /= vertices.abs().max()
return vertices.abs().max(dim=0)[0]
scaling = normalize_vertices(source_mesh.mesh.vertices)
if initial_scaling is None:
initial_scaling = scaling
else:
initial_scaling = torch.max(initial_scaling, scaling)
with torch.no_grad():
mesh_targets = render_views(source_mesh, source_mesh.mesh.vertices.unsqueeze(0), target_rots[i:i+1], hardmask=True)
target.append(mesh_targets)
target = torch.cat(target, dim=0)
# Fill holes/gaps that might mess up the result
images = []
for im in target.cpu().flatten(0, 1).numpy():
images.append(torch.FloatTensor(flood_fill(im[0], (0, 0), 1)))
target = target + (1 - torch.stack(images, dim=0).view(target.shape).cuda())
mesh_template = MeshTemplate(args.mesh_path, is_symmetric=True)
def pdist(vertices):
# Sparse L2 mode
dists = (vertices.unsqueeze(0) - vertices.unsqueeze(1)).norm(dim=-1)
return dists.mean()
# Mesh to optimize
source = mesh_template.mesh.vertices.clone().unsqueeze(0).expand(target.shape[0], -1, -1).contiguous().requires_grad_()
print(source.shape, target.shape)
alignment_t = torch.zeros(source.shape[0], 1, 3).cuda().requires_grad_()
alignment_s1 = torch.ones(1, 1, 3).cuda().requires_grad_()
alignment_s2 = torch.ones(source.shape[0], 1, 1).cuda().requires_grad_()
alignment_s1.data *= initial_scaling
pdist_t = torch.zeros(source.shape[0], 1, 3).cuda().requires_grad_()
pdist_s = torch.ones(source.shape[0], 1, 1).cuda().requires_grad_()
# Find optimal rigid alignment between meshes before actually optimizing individual vertices
# (helps with local minima)
lr = 0.0001
optimizer = optim.SGD([alignment_t, alignment_s1, alignment_s2, pdist_t, pdist_s], lr=lr, momentum=0.9)
criterion = nn.L1Loss()
pdist_coeff = 0.001 if multi_template else 0
print('Computing alignment...')
for i in range(1000):
optimizer.zero_grad()
renderer.module.set_sigma_mul(1.0)
source_translated = alignment_s1*alignment_s2*source.detach() + alignment_t
pred = render_views(mesh_template, source_translated, target_rots)
recon_loss = criterion(pred, target)
pdist_loss = pdist(source_translated*pdist_s + pdist_t)
loss = recon_loss + pdist_coeff*pdist_loss
loss.backward()
alignment_t.grad /= alignment_t.grad.norm(dim=-1, keepdim=True) + 1e-6
alignment_s1.grad /= alignment_s1.grad.norm(dim=-1, keepdim=True) + 1e-6
alignment_s2.grad /= alignment_s2.grad.norm(dim=-1, keepdim=True) + 1e-6
if source.shape[0] > 1 and pdist_coeff > 0 and multi_template:
pdist_t.grad /= pdist_t.grad.norm(dim=-1, keepdim=True) + 1e-6
pdist_s.grad /= pdist_s.grad.norm() + 1e-6
optimizer.step()
# Reproject to enforce symmetry
with torch.no_grad():
alignment_t.data[..., 0] = 0
pdist_t.data[..., 0] = 0
if multi_template and not is_aligned:
pdist_s.data /= pdist_s.data.max()
pdist_s.data.clamp_(min=0.8) # Avoid extreme scales
pdist_t.data -= pdist_t.data.mean(dim=0, keepdim=True) # Re-center
else:
pdist_s.data[:] = 1
pdist_t.data[:] = 0
if is_aligned or not multi_template:
alignment_s2.data[:] = 1
if i % 100 == 0:
print('[{}] lr {:.5f} recon {:.5f} pdist {:.5f}'.format(i, lr,
recon_loss.item(), pdist_loss.item()))
print(loss.item())
# Perform alignment
with torch.no_grad():
source.data[:] = source.data * alignment_s1.data * alignment_s2.data + alignment_t.data
alignment_s1.data[:] = 1
alignment_s2.data[:] = 1
alignment_t.data[:] = 0
# Reset
alignment_t = torch.zeros(source.shape[0], 1, 3).cuda().requires_grad_()
alignment_s = torch.ones(source.shape[0], 1, 1).cuda().requires_grad_()
# Optimize vertices
lr = 0.0001
optimizer = optim.SGD([source, alignment_t, alignment_s, pdist_t, pdist_s], lr=lr, momentum=0.9)
criterion = nn.MSELoss()
grid_laplacian, uv_connectivity = mesh_template.compute_grid_laplacian()
lap_regularizer = LaplacianLoss(grid_laplacian).cuda()
def length_regularizer(faces, vertices):
grid_positions = source[:, uv_connectivity]
tv_y = (grid_positions[:, 1:, :] - grid_positions[:, :-1, :]).abs()
tv_x = (grid_positions[:, :, 1:] - grid_positions[:, :, :-1]).abs()
return tv_x.mean() + tv_y.mean()
inv_mask = torch.FloatTensor([-1, 1, 1]).to(source.device) # Symmetry mask
loss_curve = []
lap_coeff = 0.003
len_coeff = 0.01
pdist_coeff = 0.001 if multi_template else 0
sigma_mul = 1
lr_warmup = True
lr_warmup_stop = 0.0005
print('Optimizing vertices...')
for i in range(100000):
optimizer.zero_grad()
renderer.module.set_sigma_mul(sigma_mul)
source_translated = alignment_s*source + alignment_t
pred = render_views(mesh_template, source_translated, target_rots)
recon_loss = criterion(pred, target)
flat_loss = loss_flat(mesh_template.mesh, mesh_template.compute_normals(source))
length_loss = length_regularizer(mesh_template.mesh.faces, source_translated)
laplacian_loss = lap_regularizer(source_translated).mean()
pdist_loss = pdist(source_translated*pdist_s + pdist_t)
loss = recon_loss + 0.00001*flat_loss + len_coeff*length_loss + pdist_coeff*pdist_loss + lap_coeff*laplacian_loss
loss.backward()
source.grad /= source.grad.norm(dim=2, keepdim=True) + 1e-6
alignment_t.grad /= alignment_t.grad.norm(dim=-1, keepdim=True) + 1e-6
alignment_s.grad /= alignment_s.grad.norm(dim=-1, keepdim=True) + 1e-6
if source.shape[0] > 1 and pdist_coeff > 0 and multi_template:
pdist_t.grad /= pdist_t.grad.norm(dim=-1, keepdim=True) + 1e-6
pdist_s.grad /= pdist_s.grad.norm() + 1e-6
optimizer.step()
# Reproject to enforce symmetry
with torch.no_grad():
if args.symmetric:
avg_lr = (source[:, mesh_template.pos_indices] + source[:, mesh_template.neg_indices]*inv_mask)/2
avg_lr[avg_lr[..., 0] < 0] *= inv_mask # Avoid violations across symmetry axis
source.data[:, mesh_template.pos_indices] = avg_lr
source.data[:, mesh_template.neg_indices] = avg_lr*inv_mask
source.data *= mesh_template.symmetry_mask
alignment_t.data[..., 0] = 0
pdist_t.data[..., 0] = 0
if multi_template and not is_aligned:
pdist_s.data /= pdist_s.data.max()
pdist_s.data.clamp_(min=0.8) # Avoid extreme scales
pdist_t.data -= pdist_t.data.mean(dim=0, keepdim=True) # Re-center
else:
pdist_s.data[:] = 1
pdist_t.data[:] = 0
if i % 100 == 0:
print('[{}] lr {:.5f} recon {:.5f} flat {:.5f} lap {:.5f} len {:.5f} pdist {:.5f}'.format(i, lr,
recon_loss.item(), flat_loss.item(), laplacian_loss.item(), length_loss.item(), pdist_loss.item()))
if not lr_warmup:
decay_rate = 0.9999
for param_group in optimizer.param_groups:
param_group['lr'] *= decay_rate
lr *= decay_rate
lap_coeff *= decay_rate
sigma_mul *= decay_rate
else:
lr_delta = 0.000001
for param_group in optimizer.param_groups:
param_group['lr'] += lr_delta
lr += lr_delta
if lr >= lr_warmup_stop:
lr_warmup = False
if lr < 1e-4:
break
print(loss.item())
# Perform alignment
with torch.no_grad():
source.data[:] = source.data * alignment_s.data + alignment_t.data
alignment_s.data[:] = 1
alignment_t.data[:] = 0
# Align different templates (has an effect only in multi-template setting)
with torch.no_grad():
source.data[:] = source.data * pdist_s.data + pdist_t.data
pdist_s.data[:] = 1
pdist_t.data[:] = 0
# Post-normalization: ensure that longest side is 1 & re-center.
source_post = source.detach().clone()
bbox_lower = source_post.flatten(0, 1).min(dim=0, keepdim=True)[0]
bbox_higher = source_post.flatten(0, 1).max(dim=0, keepdim=True)[0]
center = (bbox_lower + bbox_higher)/2
source_post -= center
source_post /= source_post.abs().max()
# Save result as PyTorch tensor
torch.save(source_post.cpu(), f'{output_dir}/{selected_class}_templates.pth')
# Save result as .obj (not used in practice, but useful for debugging)
mesh_template.export_obj(f'{output_dir}/{selected_class}_templates', source_post.detach().cpu(), texture=None)
print('Saved.')
print('Done.')
|
11501811
|
import pytest
from tests.python.test_common import data as data
from tests.python.test_common import info as info
from tests.python.test_common import dataWithoutPresModelWithDictionary as dataWithoutPresModelWithDictionary
from tests.python.test_common import storyPointsInfo as storyPointsInfo
from tests.python.test_common import storyPointsCmdResponse as storyPointsCmdResponse
from tableauscraper import utils
from tableauscraper import dashboard
from tableauscraper.TableauWorksheet import TableauWorksheet
from tableauscraper.TableauWorkbook import TableauWorkbook
from tableauscraper import TableauScraper as TS
def test_getWorkbook(monkeypatch):
ts = TS()
# all worksheet
monkeypatch.setattr("builtins.input", lambda _: "")
dataFrameGroup = dashboard.get(ts, data, info, ts.logger)
assert type(dataFrameGroup) is TableauWorkbook
assert len(dataFrameGroup.worksheets) == 2
assert dataFrameGroup.worksheets[0].name == "[WORKSHEET1]"
assert dataFrameGroup.worksheets[0].data.shape[0] == 4
assert dataFrameGroup.worksheets[0].data.shape[1] == 2
assert list(dataFrameGroup.worksheets[0].data.columns.values) == [
"[FIELD1]-value",
"[FIELD2]-alias",
]
assert dataFrameGroup.worksheets[1].name == "[WORKSHEET2]"
assert dataFrameGroup.worksheets[1].data.shape[0] == 0
assert dataFrameGroup.worksheets[1].data.shape[1] == 0
# single worksheet
monkeypatch.setattr("builtins.input", lambda _: "0")
dataFrameGroup = dashboard.get(ts, data, info, ts.logger)
assert len(dataFrameGroup.worksheets) == 1
assert dataFrameGroup.worksheets[0].name == "[WORKSHEET1]"
assert dataFrameGroup.worksheets[0].name == "[WORKSHEET1]"
assert dataFrameGroup.worksheets[0].data.shape[0] == 4
assert dataFrameGroup.worksheets[0].data.shape[1] == 2
def test_getWorksheet():
ts = TS()
tableauDataFrame = dashboard.getWorksheet(ts, data, info, "[WORKSHEET1]")
assert tableauDataFrame.name == "[WORKSHEET1]"
assert tableauDataFrame.data.shape[0] == 4
assert tableauDataFrame.data.shape[1] == 2
assert type(tableauDataFrame) is TableauWorksheet
# story point
tableauDataFrame = dashboard.getWorksheet(
ts, dataWithoutPresModelWithDictionary, storyPointsInfo, "[WORKSHEET1]")
assert tableauDataFrame.name == "[WORKSHEET1]"
assert tableauDataFrame.data.shape[0] == 4
assert tableauDataFrame.data.shape[1] == 2
assert type(tableauDataFrame) is TableauWorksheet
def test_getWorksheets():
ts = TS()
dataFrameGroup = dashboard.getWorksheets(ts, data, info)
assert type(dataFrameGroup) is TableauWorkbook
assert len(dataFrameGroup.worksheets) == 2
assert dataFrameGroup.worksheets[0].name == "[WORKSHEET1]"
assert dataFrameGroup.worksheets[0].data.shape[0] == 4
assert dataFrameGroup.worksheets[0].data.shape[1] == 2
assert list(dataFrameGroup.worksheets[0].data.columns.values) == [
"[FIELD1]-value",
"[FIELD2]-alias",
]
assert dataFrameGroup.worksheets[1].name == "[WORKSHEET2]"
assert dataFrameGroup.worksheets[1].data.shape[0] == 0
assert dataFrameGroup.worksheets[1].data.shape[1] == 0
# story point
dataFrameGroup = dashboard.getWorksheets(
ts, dataWithoutPresModelWithDictionary, storyPointsInfo)
assert type(dataFrameGroup) is TableauWorkbook
assert len(dataFrameGroup.worksheets) == 1
assert dataFrameGroup.worksheets[0].name == "[WORKSHEET1]"
assert dataFrameGroup.worksheets[0].data.shape[0] == 4
assert dataFrameGroup.worksheets[0].data.shape[1] == 2
assert list(dataFrameGroup.worksheets[0].data.columns.values) == [
"[FIELD1]-value",
"[FIELD2]-alias",
]
def test_getWorksheetsCmdResponse():
ts = TS()
ts.zones = storyPointsCmdResponse["vqlCmdResponse"]["layoutStatus"][
"applicationPresModel"]["workbookPresModel"]["dashboardPresModel"]["zones"]
# story point
wb = dashboard.getWorksheetsCmdResponse(
ts, storyPointsCmdResponse)
assert type(wb) is TableauWorkbook
assert len(wb.worksheets) == 1
assert wb.worksheets[0].name == "[WORKSHEET1]"
assert wb.worksheets[0].data.shape[0] == 4
assert wb.worksheets[0].data.shape[1] == 2
assert list(wb.worksheets[0].data.columns.values) == [
"[FIELD1]-value",
"[FIELD2]-alias",
]
|
11501837
|
from .falcon import Event
class TranslatorError(Exception):
pass
class EventDataError(TranslatorError):
pass
class FalconAPIDataError(TranslatorError):
pass
class FalconCache():
def __init__(self, falcon_api):
self.falcon_api = falcon_api
self._host_detail = {}
self._mdm_id = {}
def device_details(self, sensor_id):
if not sensor_id:
return EventDataError("Cannot process event. SensorId field is missing: ")
if sensor_id not in self._host_detail:
resources = self.falcon_api.device_details(sensor_id)
if len(resources) > 1:
raise FalconAPIDataError(
'Cannot process event for device: {}, multiple devices exists'.format(sensor_id))
if len(resources) == 0:
raise FalconAPIDataError('Cannot process event for device {}, device not known'.format(sensor_id))
detail = self.falcon_api.device_details(sensor_id)[0]
self._host_detail[sensor_id] = detail
return self._host_detail[sensor_id]
def mdm_identifier(self, sensor_id, event_platform):
if not sensor_id:
return EventDataError("Cannot process event. SensorId field is missing: ")
if sensor_id not in self._mdm_id or self._mdm_id[sensor_id] is None:
session = self.falcon_api.init_rtr_session(sensor_id)
if event_platform == 'Windows':
command = self.falcon_api.execute_rtr_command(
'RTR_ExecuteCommand',
session[0]['session_id'],
'reg query',
'reg query "HKEY_LOCAL_MACHINE\\SOFTWARE\\Microsoft\\Provisioning\\OMADM\\MDMDeviceID" DeviceClientId'
)
response = self.falcon_api.check_rtr_command_status(command[0]['cloud_request_id'], 0)[0]
while not response['complete']:
response = self.falcon_api.check_rtr_command_status(command[0]['cloud_request_id'], 0)[0]
if response['stderr']:
self._mdm_id[sensor_id] = None
else:
self._mdm_id[sensor_id] = response['stdout'].split(' = ')[1].split('\n')[0]
elif event_platform == 'Mac':
command = self.falcon_api.execute_rtr_command(
'RTR_ExecuteAdminCommand',
session[0]['session_id'],
'runscript',
"runscript -Raw=```system_profiler SPHardwareDataType | awk '/UUID/ { print $3; }'```"
)
response = self.falcon_api.check_rtr_command_status(command[0]['cloud_request_id'], 0)[0]
while not response['complete']:
response = self.falcon_api.check_rtr_command_status(command[0]['cloud_request_id'], 0)[0]
if response['stderr']:
self._mdm_id[sensor_id] = None
else:
self._mdm_id[sensor_id] = response['stdout'].split('\n')[0]
else:
self._mdm_id[sensor_id] = None
return self._mdm_id[sensor_id]
class FalconEvent():
def __init__(self, original_event: Event, cache: FalconCache):
self.original_event = original_event
self.cache = cache
@property
def device_details(self):
return self.cache.device_details(self.original_event.sensor_id)
@property
def mdm_identifier(self):
device_details = self.cache.device_details(self.original_event.sensor_id)
return self.cache.mdm_identifier(self.original_event.sensor_id, device_details['platform_name'])
@property
def cloud_provider(self):
return self.device_details.get('service_provider', None)
@property
def cloud_provider_account_id(self):
return self.device_details.get('service_provider_account_id')
@property
def instance_id(self):
return self.device_details['instance_id']
@property
def falcon_link(self):
return self.original_event['event']['FalconHostLink']
@property
def event_id(self):
return self.original_event['event']['DetectId']
@property
def time(self):
return self.original_event.creation_time
@property
def event_create_time(self):
return self.original_event['metadata']['eventCreationTime']
@property
def severity(self):
return self.original_event['event']['SeverityName']
@property
def severity_value(self):
return self.original_event['event']['Severity']
@property
def detect_description(self):
return self.original_event['event']['DetectDescription']
@property
def detect_name(self):
return self.original_event['event']['DetectName']
|
11501888
|
from bayesian_benchmarks.data import regression_datasets, classification_datasets
from bayesian_benchmarks.database_utils import Database
import itertools
import os
from subprocess import call
def make_experiment_combinations(combinations: list):
"""
The product of all combinations of arguments.
:param combinations: A list of dictionaries, each with a list of args
:return: A list of dictionaries for all combinations
"""
fields = []
vals = []
for p in combinations:
for k in p:
fields.append(k)
vals.append(p[k])
ret = []
for a in itertools.product(*vals):
d = {}
for f, arg in zip(fields, a):
d[f] = arg
ret.append(d)
return ret
def make_local_jobs(script: str, experiments: list, overwrite=False):
"""
Writes a file of commands to be run in in series on a single machine, e.g.
#!/usr/bin/env bash
python run_regression --split=0
python run_regression --split=1
etc.
If overwrite=True then a new file is written with a shebang, otherwise lines are appended.
:param script: name of python script to run
:param experiments: list of dictionaries of args
:return: None
"""
if overwrite:
with open('local_run', 'w') as f:
f.write('#!/usr/bin/env bash\n\n')
with open('local_run', 'a') as f:
for e in experiments:
s = 'python {}.py '.format(script)
for k in e:
s += '--{}={} '.format(k, e[k])
s += '\n'
f.write(s)
def make_condor_jobs(script: str, experiments: list, overwrite=False):
"""
Writes a condor submission file, and also creates the executable if necessary. Preamble for the
exectable (e.g. for setting up the python environment) should go in 'preamble.txt.txt'. Preamble
for the condor submission should go in condor_preamble.txt.txt.txt.
If overwrite=True then a new file is written with the condor preamble from condor_preamble.txt.txt,
otherwise lines are appended.
:param script: name of python script to run
:param experiments: list of dictionaries of args
:return: None
"""
condor_run_file = 'condor_run'
if not os.path.isfile(condor_run_file):
with open(condor_run_file, 'w') as f:
f.write("#!/usr/bin/env bash\n")
preamble = 'preamble.txt'
if os.path.isfile(preamble):
for l in open(preamble, 'r'):
f.writelines(l)
t = "python "
for i in range(1, 10):
t += '$' + str(i) + ' '
for i in range(10, 20):
t += '${' + str(i) + '} '
f.write(t + '\n')
call(["chmod", '777', condor_run_file])
if overwrite:
with open('condor_jobs', 'w') as f:
with open('condor_preamble.txt.txt', 'r') as ff:
f.writelines(ff)
with open('condor_jobs', 'a') as f:
for e in experiments:
t = 'Arguments = {}.py '.format(script)
for k in e:
t += '--{}={} '.format(k, e[k])
t += '\nQueue 1\n'
f.write(t)
def remove_already_run_experiments(table, experiments):
res = []
with Database() as db:
for e in experiments:
if len(db.read(table, ['test_loglik'], e)) == 0:
res.append(e)
s = 'originally {} experiments, but {} have already been run, so running {} experiments'
print(s.format(len(experiments), len(experiments) - len(res), len(res)))
return res
#################################################
models = [
'linear',
'variationally_sparse_gp',
'variationally_sparse_gp_minibatch',
'deep_gp_doubly_stochastic',
'svm',
'knn',
'naive_bayes',
'decision_tree',
'random_forest',
'gradient_boosting_machine',
'adaboost',
'mlp',
]
############# Regression
combinations = []
combinations.append({'dataset' : regression_datasets})
combinations.append({'split' : range(10)})
combinations.append({'model' : models})
experiments = make_experiment_combinations(combinations)
experiments = remove_already_run_experiments('regression', experiments)
make_local_jobs('../tasks/regression', experiments, overwrite=True)
make_condor_jobs('../tasks/regression', experiments, overwrite=True)
# make_local_jobs('../tasks/active_learning_continuous', experiments)
# make_condor_jobs('../tasks/active_learning_continuous', experiments)
# make_local_jobs('../tasks/conditional_density_estimation', experiments)
# make_condor_jobs('../tasks/conditional_density_estimation', experiments)
############# Classification
combinations = []
combinations.append({'dataset' : classification_datasets})
combinations.append({'split' : range(10)})
combinations.append({'model' : models})
experiments = make_experiment_combinations(combinations)
experiments = remove_already_run_experiments('classification', experiments)
make_local_jobs('../tasks/classification', experiments)
make_condor_jobs('../tasks/classification', experiments)
#
# # make_local_jobs('../tasks/active_learning_discrete', experiments)
# # make_condor_jobs('../tasks/active_learning_discrete', experiments)
|
11501896
|
import tensorflow as tf
from layers import LayerNormalization
from utils import backend as K
from utils import keras
class XLnetLoss(keras.layers.Layer):
def __init__(self,
d_model,
seq_len,
kernel_initializer='normal',
**kwargs):
super(XLnetLoss, self).__init__(**kwargs)
self.supports_masking = True
self.initializer = keras.initializers.get(kernel_initializer)
self.max_seq_length = seq_len
self.d_model = d_model
self.dense = keras.layers.Dense(1, kernel_initializer=self.initializer)
self.dense_0 = keras.layers.Dense(units=self.d_model,
kernel_initializer=self.initializer,
activation=keras.activations.tanh,
name="dense_0")
self.layer_norm = LayerNormalization()
self.dense_1 = keras.layers.Dense(1, kernel_initializer=self.initializer, name="dense_1")
self.dense_0_1 = keras.layers.Dense(
self.d_model,
activation=keras.activations.tanh,
kernel_initializer=self.initializer, name="dense_0")
self.dense_1_1 = keras.layers.Dense(
1,
kernel_initializer=self.initializer,
name="dense_1",
use_bias=False)
def call(self, inputs, **kwargs):
cls_index, start_positions, end_positions, is_impossible, p_mask, output = inputs
# output 512, ?, 1024
if len(start_positions.shape) == 1:
start_positions = K.expand_dims(start_positions, axis=-1)
cls_index = K.expand_dims(cls_index, axis=-1)
end_positions = K.expand_dims(end_positions, axis=-1)
is_impossible = K.expand_dims(is_impossible, axis=-1)
cls_index = K.squeeze(cls_index, -1)
start_positions = K.squeeze(start_positions, -1)
end_positions = K.squeeze(end_positions, -1)
is_impossible = K.squeeze(is_impossible, -1)
# logit of the start position
start_logits = self.dense(output)
start_logits = K.transpose(K.squeeze(start_logits, -1))
start_logits_masked = start_logits * (1 - p_mask) - 1e30 * p_mask
start_log_probs = keras.layers.Lambda(lambda x: tf.nn.log_softmax(x, -1))(start_logits_masked)
# logit of the end position
start_positions = K.cast(start_positions, dtype=tf.int32)
# tart_index_1 = K.one_hot(start_positions, self.max_seq_length)
start_index = keras.layers.Lambda(lambda x: tf.one_hot(x[0],
x[1], dtype=tf.float32))(
[start_positions, self.max_seq_length])
# start_features = tf.einsum("lbh,bl->bh", output, start_index)
start_features = keras.layers.Lambda(lambda x: tf.einsum("lbh,bl->bh", x[0], x[1]))([output, start_index])
start_features = K.expand_dims(start_features, 0)
start_features = K.tile(start_features, [self.max_seq_length, 1, 1])
tmp_concat = K.concatenate([output, start_features], axis=-1)
end_logits = self.dense_0(tmp_concat)
#end_logits = tf.contrib.layers.layer_norm(end_logits,begin_norm_axis=-1)
end_logits = self.layer_norm(end_logits)
end_logits = self.dense_1(end_logits)
end_logits = K.transpose(K.squeeze(end_logits, -1))
end_logits_masked = end_logits * (1 - p_mask) - 1e30 * p_mask
# end_log_probs = tf.nn.log_softmax(end_logits_masked, -1)
end_log_probs = keras.layers.Lambda(lambda x: tf.nn.log_softmax(x, -1))(end_logits_masked)
start_loss = - K.sum(start_log_probs * start_index, axis=-1)
start_loss = K.mean(start_loss)
end_positions = K.cast(end_positions, dtype=tf.int32)
# end_index = K.one_hot(end_positions_squeeze, self.max_seq_length)
end_index = keras.layers.Lambda(lambda x: tf.one_hot(x[0],
x[1], dtype=tf.float32))(
[end_positions, self.max_seq_length])
end_loss = - K.sum(end_log_probs * end_index, axis=-1)
end_loss = K.mean(end_loss)
total_loss = (start_loss + end_loss) * 0.5
# an additional layer to predict answerability
cls_index = K.cast(cls_index, dtype=tf.int32)
# cls_index = K.one_hot(cls_index, self.max_seq_length)
cls_index = keras.layers.Lambda(lambda x: tf.one_hot(x[0],
x[1], dtype=tf.float32))(
[cls_index, self.max_seq_length])
# cls_feature = tf.einsum("lbh,bl->bh", output, cls_index)
cls_feature = keras.layers.Lambda(lambda x: tf.einsum("lbh,bl->bh", x[0], x[1]))([output, cls_index])
# start_p = tf.nn.softmax(start_logits_masked, axis=-1, name="softmax_start")
start_p = keras.layers.Lambda(lambda x: tf.nn.softmax(x, axis=-1))(start_logits_masked)
# start_feature = tf.einsum("lbh,bl->bh", output, start_p)
start_feature = keras.layers.Lambda(lambda x:
tf.einsum("lbh,bl->bh", x[0], x[1]))([output, start_p])
# ans_feature = tf.concat([start_feature, cls_feature], -1)
ans_feature = K.concatenate([start_feature, cls_feature], -1)
ans_feature = self.dense_0_1(ans_feature)
ans_feature = keras.layers.Dropout(rate=0.1)(ans_feature, training=True)
cls_logits = self.dense_1_1(ans_feature)
cls_logits = K.squeeze(cls_logits, -1)
is_impossible = K.reshape(is_impossible, [-1])
regression_loss = keras.layers.Lambda(lambda x:
tf.nn.sigmoid_cross_entropy_with_logits(labels=x[0],
logits=x[1]))(
[is_impossible, cls_logits])
regression_loss = K.mean(regression_loss)
total_loss += regression_loss * 0.5
self.add_loss(total_loss, inputs=True)
return total_loss
def get_config(self):
config = {
'd_model': self.d_model,
'seq_len': self.max_seq_length,
}
base_config = super(XLnetLoss, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
|
11501903
|
import time, random, sys
from mpi4py import MPI
from pbt_utils import PBTMetaDataStore, PBTClient, Timer
import keras
from keras import backend as K
GET = 0
PUT = 1
def r2(y_true, y_pred):
SS_res = K.sum(K.square(y_true - y_pred))
SS_tot = K.sum(K.square(y_true - K.mean(y_true)))
return (1 - SS_res/(SS_tot + K.epsilon()))
def run(comm, worker_comm, model_file):
client = PBTClient(comm, 0)
model = keras.models.load_model(model_file, custom_objects={'r2' : r2})
timer = Timer("./timings_{}.csv".format(client.rank))
timer.start()
client.put_score(random.random())
model.save_weights("./weights/weights_{}.h5".format(client.rank))
client.release_write_lock(client.rank)
timer.end(PUT)
worker_comm.Barrier()
for i in range(3):
wait = random.uniform(1, 10)
time.sleep(wait)
timer.start()
rank, score = client.get_best_score(lock_weights=True)
model.load_weights("./weights/weights_{}.h5".format(rank))
client.release_read_lock(rank)
timer.end(GET)
wait = random.uniform(1, 10)
time.sleep(wait)
timer.start()
client.put_score(random.uniform(10, 100), lock_weights=True)
model.save_weights("./weights/weights_{}.h5".format(client.rank))
client.release_write_lock(client.rank)
timer.end(PUT)
timer.close()
client.done()
def main(model_file):
comm = MPI.COMM_WORLD
rank = comm.Get_rank()
group = comm.Get_group().Excl([0])
worker_comm = comm.Create(group)
if rank == 0:
data_store = PBTMetaDataStore(comm)
data_store.run()
else:
run(comm, worker_comm, model_file)
if __name__ == '__main__':
main(sys.argv[1])
|
11501951
|
from scipy import stats
from scipy import sparse
from numpy import array
import numpy as np
from scipy.spatial import distance
evaluate_euclidean_representations = False
time_dimensions = 3
nb_splits = 5
ambient_euclidean_dimensionality = 6
dimensionality_of_ambient_space = 5
beta = -1.0
i_list = []
j_list = []
v_list = []
fc = open("C_matrix.txt","r")
for fline in fc:
l = fline.split(" ")
i_list.append(int(l[0]))
j_list.append(int(l[1]))
v_list.append(-int(l[2]))
fc.close()
n = 34
I = array(i_list)
J = array(j_list)
V = array(v_list)
edges_dict = {}
for i in range(len(I)):
edges_dict[(I[i],J[i])] = abs(V[i])
edges_dict[(J[i],I[i])] = abs(V[i])
C = sparse.coo_matrix((V,(I,J)),shape=(n,n))
C = C.toarray()
C = C + C.transpose()
C_sum = np.sum(C,axis=0)
top_10 = [33,0,32,2,1,31,23,3,8,13]
top_5 = [33,0,32,2,1]
recall_at_1 = 0.0
rank_first_leader = []
rank_second_leader = []
rho5_list = []
rho10_list = []
for i in range(nb_splits):
if evaluate_euclidean_representations:
file_name = "zachary_data/euclidean/%d/d.txt" % (i+1)
D = np.loadtxt(file_name, usecols=range(n))
else:
file_name = "zachary_data/d_%d_q_%d/%d/d.txt" % (dimensionality_of_ambient_space , time_dimensions, i+1)
D = np.loadtxt(file_name, usecols=range(n))
D = np.sum(D,axis=0)
sorted_D = np.argsort(D)
search_second_leader = False
for j in range(n):
if (sorted_D[j] == 0) or (sorted_D[j] == n-1):
if search_second_leader:
rank_second_leader.append(j+1)
continue
else:
search_second_leader = True
rank_first_leader.append(j+1)
rho5, pval5 = stats.spearmanr(C_sum[top_5],D[top_5])
rho10, pval10 = stats.spearmanr(C_sum[top_10],D[top_10])
rho5_list.append(rho5)
rho10_list.append(rho10)
if evaluate_euclidean_representations:
print("Euclidean space of dimensionality %d" % ambient_euclidean_dimensionality)
else:
print("dimensionality of the ambient space = %d" % dimensionality_of_ambient_space)
if time_dimensions == 1:
print("hyperbolic case")
elif time_dimensions == dimensionality_of_ambient_space :
print("spherical case")
else:
print("ultrahyperbolic case with %d time dimensions" % time_dimensions)
ddofint = 1
print("rank of first leader")
print("mean = %f ----- std = %f" % (np.mean(rank_first_leader), np.std(rank_first_leader,ddof=ddofint)))
print("rank of second leader")
print("mean = %f ----- std = %f" % (np.mean(rank_second_leader), np.std(rank_second_leader,ddof=ddofint)))
print("top 5 Spearman's rho")
print("mean = %f ----- std = %f" % (np.mean(rho5_list), np.std(rho5_list,ddof=ddofint)))
print("top 10 Spearman's rho")
print("mean = %f ----- std = %f" % (np.mean(rho10_list), np.std(rho10_list,ddof=ddofint)))
|
11502013
|
import FWCore.ParameterSet.Config as cms
# HGCal electron stuff
from RecoEgamma.EgammaTools.cleanedEcalDrivenGsfElectronsHGC_cfi import cleanedEcalDrivenGsfElectronsHGC
from RecoEgamma.EgammaTools.hgcalElectronIDValueMap_cff import hgcalElectronIDValueMap
# HGCal electrons cleaned against duplicates and electrons in barrel (pt>10GeV)
# TauValElectronSelector defined Validation/RecoTau/plugins/Selectors.cc;
# is there a more intuitive place where such a selector is defined?
cleanedEcalDrivenGsfElectronsHGCnoEB = cms.EDFilter('TauValElectronSelector',
cut = cms.string('!isEB && pt >= 10.'),
src = cms.InputTag('cleanedEcalDrivenGsfElectronsHGC')
)
# Electron collection merger
mergedGsfElectronsForTauId = cms.EDProducer('GsfElectronCollectionMerger',
src = cms.VInputTag('gedGsfElectrons', 'cleanedEcalDrivenGsfElectronsHGCnoEB')
)
# HGCal EleID with merged electron collection
hgcElectronIdForTauId = hgcalElectronIDValueMap.clone(
electrons = "mergedGsfElectronsForTauId"
)
# anti-e phase-2 tauID (raw)
from RecoTauTag.RecoTau.tauDiscriminationAgainstElectronMVA6Phase2_mvaDefs_cff import mvaNames_phase2, mapping_phase2, workingPoints_phase2
from RecoTauTag.RecoTau.pfRecoTauDiscriminationAgainstElectronMVA6_cfi import *
pfRecoTauDiscriminationAgainstElectronMVA6Phase2Raw = pfRecoTauDiscriminationAgainstElectronMVA6.clone(
#Note: PFTauProducer and Prediscriminants have to be set in the final cfg
srcElectrons = "mergedGsfElectronsForTauId",
isPhase2 = True,
vetoEcalCracks = False,
hgcalElectronIDs = [cms.InputTag("hgcElectronIdForTauId", key) for key in hgcElectronIdForTauId.variables],
**mvaNames_phase2
)
# anti-e phase-2 tauID (WPs)
from RecoTauTag.RecoTau.recoTauDiscriminantCutMultiplexerDefault_cfi import recoTauDiscriminantCutMultiplexerDefault
pfRecoTauDiscriminationAgainstElectronMVA6Phase2 = recoTauDiscriminantCutMultiplexerDefault.clone(
#Note: PFTauProducer and Prediscriminants have to be set in the final cfg
toMultiplex = "pfRecoTauDiscriminationAgainstElectronMVA6Phase2Raw",
mapping = mapping_phase2,
rawValues = ["discriminator", "category"],
workingPoints = workingPoints_phase2
)
electronsForTauDiscriminationAgainstElectronMVA6Phase2Task = cms.Task(
cleanedEcalDrivenGsfElectronsHGC,
cleanedEcalDrivenGsfElectronsHGCnoEB,
mergedGsfElectronsForTauId,
hgcElectronIdForTauId
)
pfRecoTauDiscriminationAgainstElectronMVA6Phase2Task = cms.Task(
electronsForTauDiscriminationAgainstElectronMVA6Phase2Task,
pfRecoTauDiscriminationAgainstElectronMVA6Phase2Raw,
pfRecoTauDiscriminationAgainstElectronMVA6Phase2
)
pfRecoTauDiscriminationAgainstElectronMVA6Phase2Seq = cms.Sequence(
pfRecoTauDiscriminationAgainstElectronMVA6Phase2Task
)
|
11502026
|
import torch
import torch.nn as nn
import torch.nn.functional as F
from functools import reduce
from functools import partial
from .basics import SpectralConv1d
class FNN1d(nn.Module):
def __init__(self, modes, width, layers=None):
super(FNN1d, self).__init__()
"""
The overall network. It contains several layers of the Fourier layer.
1. Lift the input to the desire channel dimension by self.fc0 .
2. 4 layers of the integral operators u' = (W + K)(u).
W defined by self.w; K defined by self.conv .
3. Project from the channel space to the output space by self.fc1 and self.fc2 .
input: the solution of the initial condition and location (a(x), x)
input shape: (batchsize, x=s, c=2)
output: the solution of a later timestep
output shape: (batchsize, x=s, c=1)
"""
self.modes1 = modes
self.width = width
if layers is None:
layers = [width] * 4
self.fc0 = nn.Linear(2, layers[0]) # input channel is 2: (a(x), x)
self.sp_convs = nn.ModuleList([SpectralConv1d(
in_size, out_size, self.modes1) for in_size, out_size in zip(layers, layers[1:])])
self.ws = nn.ModuleList([nn.Conv1d(in_size, out_size, 1)
for in_size, out_size in zip(layers, layers[1:])])
self.fc1 = nn.Linear(layers[-1], 128)
self.fc2 = nn.Linear(128, 1)
def forward(self, x):
length = len(self.ws)
x = self.fc0(x)
x = x.permute(0, 2, 1)
for i, (speconv, w) in enumerate(zip(self.sp_convs, self.ws)):
x1 = speconv(x)
x2 = w(x)
x = x1 + x2
if i != length - 1:
x = F.relu(x)
x = x.permute(0, 2, 1)
x = self.fc1(x)
x = F.relu(x)
x = self.fc2(x)
return x
|
11502047
|
from my_fake_useragent import UserAgent
import re
from urllib import request
# from tradingSystem.models import News
def gen_news():
ua = UserAgent()
user_agent = ua.random()
referer = 'https://tushare.pro/login?next=%2Fnews%2Fnews_sina'
headers = {
'User-Agent': user_agent,
'Host': 'tushare.pro',
'Origin': 'https://tushare.pro',
'Referer': referer
}
stockPageRequest = request.urlopen('http://finance.eastmoney.com/news/cdfsd.html')
htmlTitleContent = str(stockPageRequest.read(), 'utf-8')
# 正则匹配标题
titlePattern = re.compile('<span class="l3 a3">title="(.*?)"</span>', re.S)
p_title = 'title="(.*?)"(.*?)'
title = re.findall(p_title, htmlTitleContent)
title = [t[0] for t in title if not t[0].find('【')]
news = []
for t in title:
a = t.find('【')
b = t.find('】')
news.append({'title': t[a+1:b], 'content': t[b+1:]})
# news = News.objects.all()
return news
# news_list = gen_news()
# print(news_list)
# for news in news_list:
# title = news['title']
# content = news['content']
# n = News.objects.create(
# title=title,
# content=content,
# read=0
# )
# n.save()
|
11502048
|
import pytest
from humblebundle_downloader.cli import parse_args
def test_old_action_format():
with pytest.raises(DeprecationWarning):
_ = parse_args(['download', '-l', 'some_path', '-c', 'fake_cookie'])
def test_no_action():
args = parse_args(['-l', 'some_path', '-c', 'fake_cookie'])
assert args.library_path == 'some_path'
assert args.cookie_file == 'fake_cookie'
|
11502065
|
from builtins import str
from flask import Flask, abort, request
from flask_restful import Resource, Api, abort
import peewee
from vegadns.api import endpoint
from vegadns.api.endpoints import AbstractEndpoint
from vegadns.api.models.audit_log import AuditLog as ModelAuditLog
@endpoint
class AuditLogs(AbstractEndpoint):
route = '/audit_logs'
sort_fields = {
'time': ModelAuditLog.time,
'log_id': ModelAuditLog.log_id,
'domain_id': ModelAuditLog.domain_id
}
def get(self):
self.auth.account.load_domains()
domain_id_list = []
requested_domain_ids = []
domain_ids = request.args.get(
"domain_ids",
None
)
search = request.args.get("search", "")
# check for provided list of domain ids
if domain_ids is not None:
requested_domain_ids = domain_ids.replace(" ", "").split(",")
if self.auth.account.account_type != "senior_admin":
# check read permissions
for d in requested_domain_ids:
if not str.isdigit(str(d)):
abort(400, message="invalid domain_ids value")
if self.auth.account.can_read_domain(d) or \
self.auth.account.in_global_acl_emails(
self.auth.account.email
):
domain_id_list.append(d)
else:
for d in requested_domain_ids:
if not str.isdigit(str(d)):
abort(400, message="invalid domain_ids value")
domain_id_list = requested_domain_ids
else:
# only build list for non-senior_admin users
if self.auth.account.account_type != "senior_admin":
for d in self.auth.account.domains:
domain_id_list.append(d)
# get audit logs
total_logs = 0
if self.auth.account.account_type == "senior_admin" or \
self.auth.account.in_global_acl_emails(
self.auth.account.email
):
if len(domain_id_list) == 0:
logs = ModelAuditLog.select().where(
ModelAuditLog.entry ** ('%' + search + '%')
)
else:
logs = ModelAuditLog.select().where(
ModelAuditLog.domain_id << domain_id_list,
ModelAuditLog.entry ** ('%' + search + '%')
)
total_logs = logs.count()
logs = self.paginate_query(logs, request.args)
logs = self.sort_query(logs, request.args)
else:
if len(domain_id_list) > 0:
logs = ModelAuditLog.select().where(
ModelAuditLog.domain_id << domain_id_list,
ModelAuditLog.entry ** ('%' + search + '%')
)
total_logs = logs.count()
logs = self.paginate_query(logs, request.args)
logs = self.sort_query(logs, request.args)
else:
logs = []
audit_logs = []
for l in logs:
audit_logs.append(l.to_clean_dict())
return {
'status': 'ok',
'audit_logs': audit_logs,
'total_audit_logs': total_logs
}
|
11502066
|
from yowsup.layers import YowLayer, YowLayerEvent, YowProtocolLayer
from .protocolentities import *
class YowPrivacyProtocolLayer(YowProtocolLayer):
def __init__(self):
handleMap = {
"iq": (self.recvIq, self.sendIq)
}
super(YowPrivacyProtocolLayer, self).__init__(handleMap)
def __str__(self):
return "Privacy Layer"
def sendIq(self, entity):
if entity.getXmlns() == "jabber:iq:privacy":
self.entityToLower(entity)
def recvIq(self, node):
pass
|
11502075
|
from django.db.models import FloatField, Func, Value
class TrigramWordSimilarity(Func):
output_field = FloatField()
function = 'WORD_SIMILARITY'
def __init__(self, expression, string, **extra):
if not hasattr(string, 'resolve_expression'):
string = Value(string)
super().__init__(string, expression, **extra)
class LogAge(Func):
"""Calculate log 2 of days since datetime column"""
# Minimum age 1 day. Prevent log of zero error and unintended large
# effect of log of very small inputs.
output_field = FloatField()
template = (
f'greatest(1.0, log(2::numeric, ('
'abs(extract(epoch FROM (TIMESTAMP '
"'%(when)s' - "
'COALESCE(%(table)s.%(timefield)s,%(table)s.created)'
'))) / (60 * 60 * 24))::numeric'
'))'
)
# greatest(1.0, log(2, number))
# return at least 1.0 to avoid zero division or very skewed results
# for logs close to zero
# abs(extract(epoch FROM (when - then)))
# Extract total seconds in timedelta `now - then`
# `epoch` = 1970-01-01 = unix epoch = total seconds
# / (60 * 60 * 24)
# Divide by minutes and seconds and hours: seconds -> days
# ::numeric
# Cast result as `numeric` using PostgreSQL type cast notation
# `numeric` = decimal type
|
11502100
|
import unittest
from tests.util import get_dataset_folder
import sys
from kge.misc import kge_base_dir
import os
from os import path
sys.path.append(path.join(kge_base_dir(), "data/preprocess"))
from data.preprocess.util import analyze_raw_splits
from data.preprocess.util import RawDataset
from data.preprocess.util import Split
from data.preprocess.util import SampledSplit
from data.preprocess.util import FilteredSplit
from data.preprocess.util import RawSplit
from data.preprocess.util import write_dataset_yaml
from data.preprocess.util import process_splits
import yaml
class TestPreprocess(unittest.TestCase):
def setUp(self) -> None:
self.dataset_name = "dataset_preprocess"
self.dataset_folder = get_dataset_folder(self.dataset_name)
def tearDown(self) -> None:
self.remove_del_files()
def test_analyze_splits(self):
raw_splits = TestPreprocess.get_raw_splits()
raw_dataset: RawDataset = analyze_raw_splits(
raw_splits=list(raw_splits.values()), folder=self.dataset_folder,
)
# check if objects are collected correctly
self.assertTrue(
all(
[
rel in raw_dataset.relation_map.keys()
for rel in ["r1", "r2", "r3", "r4"]
]
)
)
self.assertTrue(
all([ent in raw_dataset.entity_map.keys() for ent in ["a", "b", "c", "d"]])
)
# check entity/relation index for uniqueness
entity_index = list(raw_dataset.entity_map.values())
self.assertEqual(entity_index, list(set(entity_index)))
relation_index = list(raw_dataset.relation_map.values())
self.assertEqual(relation_index, list(set(relation_index)))
# check entity/relation index for completeness and erroneous entries
for index in [entity_index, relation_index]:
length = len(index)
correct_index = list(range(length))
self.assertEqual(index, correct_index)
# check if entity/relation maps have been written
self.assertTrue(
os.path.isfile(os.path.join(self.dataset_folder, "entity_ids.del"))
)
self.assertTrue(
os.path.isfile(os.path.join(self.dataset_folder, "relation_ids.del"))
)
# check sizes of the raw data
self.assertTrue(raw_splits["train"].size == 6)
self.assertTrue(raw_splits["valid"].size == 5)
self.assertTrue(raw_splits["test"].size == 4)
def test_write_splits(self):
raw_splits = TestPreprocess.get_raw_splits()
raw_dataset: RawDataset = analyze_raw_splits(
raw_splits=list(raw_splits.values()), folder=self.dataset_folder,
)
self.set_splits(raw_splits["train"], raw_splits["valid"], raw_splits["test"])
# write and check all files have been created and sizes are tracked correctly
for split in raw_dataset.raw_splits:
self._test_write_splits(split, raw_dataset)
# explicitly check if filtering is correct
test = raw_splits["test"]
for split in test.splits:
if isinstance(split, FilteredSplit):
options = split.options
filename = options["filename"]
f_path = os.path.join(self.dataset_folder, filename)
with open(f_path, "r") as f:
triples = list(map(lambda s: s.strip().split("\t"), f.readlines()))
for triple in triples:
# the index of the unseen relation and entity is 3 respectively (d, r4)
# ensure this has been filtered out correctly
self.assertFalse(triple[0] == 3)
self.assertFalse(triple[1] == 3)
self.assertFalse(triple[2] == 3)
def _test_write_splits(self, split, dataset):
split.write_splits(dataset.entity_map, dataset.relation_map, dataset.folder)
for split in split.splits:
filename = split.options["filename"]
f_path = os.path.join(self.dataset_folder, filename)
# check correct file has been written
self.assertTrue(os.path.isfile(f_path))
with open(f_path, "r") as f:
# check the correct size has been tracked
data = f.readlines()
self.assertTrue(split.options["size"] == len(data))
def test_write_dataset_config(self):
# check if the dataset.yaml file has been written as expected
raw_splits = TestPreprocess.get_raw_splits()
raw_dataset: RawDataset = analyze_raw_splits(
raw_splits=list(raw_splits.values()), folder=self.dataset_folder,
)
self.set_splits(raw_splits["train"], raw_splits["valid"], raw_splits["test"])
process_splits(raw_dataset)
# write config
write_dataset_yaml(raw_dataset.config, self.dataset_folder)
# check file has been written
yaml_path = os.path.join(self.dataset_folder, "dataset.yaml")
self.assertTrue(os.path.isfile(yaml_path))
# check correctness of significant keys
with open(yaml_path, "r") as yaml_file:
options = yaml.load(yaml_file, Loader=yaml.SafeLoader)["dataset"]
self.assertTrue(options["files.train.size"] == 6)
self.assertTrue(options["files.valid.size"] == 5)
self.assertTrue(options["files.test.size"] == 4)
self.assertTrue(options["files.valid_without_unseen.size"] == 2)
self.assertTrue(options["files.test_without_unseen.size"] == 1)
self.assertTrue(options["files.train_sample.size"] == 3)
self.assertTrue(options["num_entities"] == 4)
self.assertTrue(options["num_relations"] == 4)
os.remove(yaml_path)
def remove_del_files(self):
files = os.listdir(self.dataset_folder)
for item in files:
if item.endswith(".del"):
os.remove(os.path.join(self.dataset_folder, item))
@staticmethod
def get_raw_splits():
S, P, O = 0, 1, 2
train_raw = RawSplit(
file="train.txt",
field_map={"S": S, "P": P, "O": O},
collect_entities=True,
collect_relations=True,
)
valid_raw = RawSplit(file="valid.txt", field_map={"S": S, "P": P, "O": O},)
test_raw = RawSplit(file="test.txt", field_map={"S": S, "P": P, "O": O},)
return {"train": train_raw, "valid": valid_raw, "test": test_raw}
def set_splits(self, train_raw: RawSplit, valid_raw: RawSplit, test_raw: RawSplit):
train = Split(
raw_split=train_raw,
key="train",
options={"type": "triples", "filename": "train.del", "split_type": "train"},
)
train_sample = SampledSplit(
raw_split=train_raw,
key="train_sample",
sample_size=3,
options={
"type": "triples",
"filename": "train_sample.del",
"split_type": "train",
},
)
train_raw.splits.extend([train, train_sample])
valid = Split(
raw_split=valid_raw,
key="valid",
options={"type": "triples", "filename": "valid.del", "split_type": "valid"},
)
valid_wo_unseen = FilteredSplit(
raw_split=valid_raw,
key="valid_without_unseen",
filter_with=train_raw,
options={
"type": "triples",
"filename": "valid_without_unseen.del",
"split_type": "valid",
},
)
valid_raw.splits.extend([valid, valid_wo_unseen])
test = Split(
raw_split=test_raw,
key="test",
options={"type": "triples", "filename": "test.del", "split_type": "test"},
)
test_wo_unseen = FilteredSplit(
raw_split=test_raw,
key="test_without_unseen",
filter_with=train_raw,
options={
"type": "triples",
"filename": "test_without_unseen.del",
"split_type": "test",
},
)
test_raw.splits.extend([test, test_wo_unseen])
|
11502110
|
import json
import subprocess
from typing import Dict, Optional, Tuple
from azure.cosmos import CosmosClient
import numpy as np
from . import azureml
from .utils import azure_subscription_context, RemoteRunInfo
SUBSCRIPTION_NAME = "knossos"
COSMOS_DB_RESOURCE_GROUP = "knossosrlodbrg"
COSMOS_DB_ACCOUNT_NAME = "knossosrlosrvlessdb"
COSMOS_CONNECTION_URL = f"https://{COSMOS_DB_ACCOUNT_NAME}.documents.azure.com:443/"
def _get_cosmos_db_read_only_key(subscription_name, account_name, resource_group):
print(
f"Retrieving access keys for Cosmos DB {account_name} in resource group {resource_group}",
)
with azure_subscription_context(subscription_name):
return json.loads(
subprocess.check_output(
f"az cosmosdb keys list -n {account_name} -g {resource_group} --type read-only-keys",
shell=True,
)
)["primaryReadonlyMasterKey"]
def get_cosmos_container(container_name, read_only=True, allow_interactive=True):
if read_only:
cosmos_db_key = _get_cosmos_db_read_only_key(
SUBSCRIPTION_NAME, COSMOS_DB_ACCOUNT_NAME, COSMOS_DB_RESOURCE_GROUP
)
else:
# This may raise if authentication fails
cosmos_db_key = azureml.get_secret(
"cosmosdbkey", allow_interactive=allow_interactive
)
client = CosmosClient(COSMOS_CONNECTION_URL, cosmos_db_key)
db = client.get_database_client("knossosrlodb")
return db.get_container_client(container_name)
def _sanitize_float(val):
if np.isnan(val):
return "nan"
if np.isinf(val):
return "inf" if val > 0 else "-inf"
return val
def _encode_config(config):
""" Cleanup nan / inf from config ahead of JSON serialization so that Cosmos DB can handle it.
"""
return {
# Only handles top-level nan / inf
k: _sanitize_float(v) if isinstance(v, float) else v
for k, v in config.items()
}
def _decode_config(config):
return {
k: float(v) if v in ["nan", "inf", "-inf"] else v for k, v in config.items()
}
def upload_run_to_db(
config: Dict, info: Optional[RemoteRunInfo] = None, allow_interactive: bool = True
):
try:
container = get_cosmos_container(
"runs", read_only=False, allow_interactive=allow_interactive
)
except Exception as e:
# Print out the error and give up upload
print(f"Cannot upload config to Cosmos DB. Got exception {e}")
return
if not info:
info = azureml.get_current_run_info()
container.create_item(
{
"id": config["run_id"],
"config": _encode_config(config),
"remote_run_info": info._asdict(),
}
)
class RunInfoNotFound(Exception):
pass
def get_run_from_db(run_id: str) -> Tuple[Dict, RemoteRunInfo]:
container = get_cosmos_container("runs")
query = container.query_items(
f'SELECT * FROM c WHERE c.id = "{run_id}"', enable_cross_partition_query=True
)
try:
doc = next(query)
config = _decode_config(doc["config"])
info = RemoteRunInfo(**doc["remote_run_info"])
return config, info
except StopIteration as e:
raise RunInfoNotFound(
f"""Could not find run {run_id} in the Cosmos DB. It could be
- a run that ran in a different workspace (e.g., resrchvc),
- a run that predates the DB, or
- an azure batch run.
"""
) from e
def get_remote_run_info_from_db(run_id: str) -> RemoteRunInfo:
_, info = get_run_from_db(run_id)
return info
def check_and_upload_config_to_db(config: Dict, info: RemoteRunInfo):
run_id = config["run_id"]
try:
remote_config, remote_info = get_run_from_db(run_id)
except RunInfoNotFound:
print(f"Could not find run {run_id} in Cosmos DB. Uploading...")
upload_run_to_db(config, info)
return
assert remote_config == config
assert remote_info == info
|
11502147
|
import torch
from random import sample
def train(model, optimizer, loss,
train_loader, epochs, checkpoint_path, device='cpu'):
'''
:param model: the model to be trained
:param optimizer: the optimizer for the training session
:param loss: the CL loss function for the NE representations
:param train_loader: the dataloader for the training data
:param epochs: the number of epochs for the training
:param checkpoint_path: the path where the trained model will be saved
:param device: 'cpu' or 'cuda'
:return: the trained model (final version not the best one based on validation performance)
and a dictionary with the losses (training and validation)
'''
acc_training_loss = []
for epoch in range(epochs):
training_loss = 0.0
#################
# Training step #
#################
print('-------------')
print('Training Step')
print('-------------')
model.train()
c1 = 0
for batch in train_loader:
# Take the inputs from the batch
enc_sent, atte_mask, ne_tags = batch
# Run the forward pass for the batch
ne_rep_list = []
ne_tag_list = []
for in1, in2, in3 in zip(enc_sent, atte_mask, ne_tags):
# Place the inputs in the
# selected device (GPU or CPU)
in1 = in1.to(device)
in2 = in2.to(device)
ne_rep = model(sent_id=in1, mask=in2)
ne_rep_list.extend(ne_rep)
ne_tag_list.extend(in3)
# Find the ne loss in the batch-level
loss_batch = loss(ne_rep_list, ne_tag_list)
# Before the backward pass, use the optimizer object to zero all of the
# gradients for the variables it will update (which are the learnable
# weights of the model). This is because by default, gradients are
# accumulated in buffers( i.e, not overwritten) whenever .backward() is called.
optimizer.zero_grad()
# Run a backpropagation pass
loss_batch.backward()
# Gradient descent step
optimizer.step()
# Add the loss of the batch
training_loss += loss_batch.data.item()
# Increment the counter (number of batches)
c1 += 1
if c1 % 30 == 0:
print('{} batches completed.'.format(c1))
# Find the average training loss over the batches
training_loss /= c1
# Save the training loss
acc_training_loss.append(training_loss)
# Print the loss every 10 epochs
# if epoch % 9 == 0:
print('Epoch: {}'.format(epoch + 1))
print('Training Loss: {:.4f}'.format(training_loss))
print('_________________________')
# Save the checkpoint at the end of each epoch
# Create checkpoint variable and add important data
if epoch + 1 == epochs:
# Create checkpoint variable and add important data
checkpoint = {'epoch': epoch + 1,
'state_dict': model.state_dict(),
'optimizer': optimizer.state_dict(),
'training loss': training_loss}
# save checkpoint as best model
torch.save(checkpoint, checkpoint_path + 'final_trained_model.pt')
'''
else:
checkpoint = {'epoch': epoch + 1,
'state_dict': model.state_dict(),
'optimizer': optimizer.state_dict(),
'training loss': training_loss}
torch.save(checkpoint, checkpoint_path + 'trained_model_epoch_' + str(epoch + 1) + '.pt')
'''
# Dictionary with the losses
losses_dict = {'training loss': acc_training_loss}
return model, losses_dict
|
11502165
|
import MySQLdb
import json
import tornado.web
import tornado.ioloop
import tornado.httpserver
import tornado.websocket
class Storage(dict):
def __getitem__(self, item):
return self[item]
def __setitem__(self, key, value):
self[key] = value
class BaseSocketHandler(tornado.websocket.WebSocketHandler):
clients = set()
def __init__(self, application, request, **kwargs):
self.session = Storage()
db = MySQLdb.connect(host="localhost", user="root", passwd="", db="test", charset="utf8")
self.db = db.cursor()
tornado.websocket.WebSocketHandler.__init__(self,application, request, **kwargs)
@staticmethod
def send_message(message):
print 'Send: %s' % message
for client in BaseSocketHandler.clients:
client.write_message(json.dumps(message))
class WebSocketHandler(BaseSocketHandler):
def check_origin(self, origin):
return True
def open(self):
self.session.id = str(id(self))
print 'Request: %s' % self.session.id
self.clients.add(self)
def on_close(self):
print 'Close: %s' % self.session.id
self.clients.remove(self)
def on_message(self, message):
print 'Recv: %s' % message
try:
self.db.execute("select table_name from information_schema.tables where table_schema='%s'" % message)
data = self.db.fetchall()[0]
self.send_message({'text': data[0]})
except Exception, e:
self.send_message({'text': str(e)})
class Application(tornado.web.Application):
def __init__(self):
handlers = [
('/', WebSocketHandler),
]
settings = dict(debug=True,)
tornado.web.Application.__init__(self, handlers=handlers, **settings)
if __name__ == '__main__':
http_server = tornado.httpserver.HTTPServer(Application())
http_server.listen(8888)
tornado.ioloop.IOLoop.instance().start()
|
11502213
|
import requests,json
import base64
with open("ffff.jpg", "rb") as image_file:
base64str = base64.b64encode(image_file.read()).decode("utf-8")
payload = json.dumps({
"base64str": base64str,
"threshold": 0.5
})
response = requests.put("http://127.0.0.1:8000/predict",data = payload)
data_dict = response.json()
print(data_dict)
|
11502232
|
import time
from authlib.oauth1.rfc5849 import signature
from tests.util import read_file_path, decode_response
from .oauth1_server import db, User, Client
from .oauth1_server import (
TestCase,
create_authorization_server,
)
class TemporaryCredentialsWithCacheTest(TestCase):
USE_CACHE = True
def prepare_data(self):
self.server = create_authorization_server(self.app, self.USE_CACHE)
user = User(username='foo')
db.session.add(user)
db.session.commit()
client = Client(
user_id=user.id,
client_id='client',
client_secret='secret',
default_redirect_uri='https://a.b',
)
db.session.add(client)
db.session.commit()
def test_temporary_credential_parameters_errors(self):
self.prepare_data()
url = '/oauth/initiate'
rv = self.client.get(url)
data = decode_response(rv.data)
self.assertEqual(data['error'], 'method_not_allowed')
# case 1
rv = self.client.post(url)
data = decode_response(rv.data)
self.assertEqual(data['error'], 'missing_required_parameter')
self.assertIn('oauth_consumer_key', data['error_description'])
# case 2
rv = self.client.post(url, data={'oauth_consumer_key': 'client'})
data = decode_response(rv.data)
self.assertEqual(data['error'], 'missing_required_parameter')
self.assertIn('oauth_callback', data['error_description'])
# case 3
rv = self.client.post(url, data={
'oauth_consumer_key': 'client',
'oauth_callback': 'invalid_url'
})
data = decode_response(rv.data)
self.assertEqual(data['error'], 'invalid_request')
self.assertIn('oauth_callback', data['error_description'])
# case 4
rv = self.client.post(url, data={
'oauth_consumer_key': 'invalid-client',
'oauth_callback': 'oob'
})
data = decode_response(rv.data)
self.assertEqual(data['error'], 'invalid_client')
def test_validate_timestamp_and_nonce(self):
self.prepare_data()
url = '/oauth/initiate'
# case 5
rv = self.client.post(url, data={
'oauth_consumer_key': 'client',
'oauth_callback': 'oob'
})
data = decode_response(rv.data)
self.assertEqual(data['error'], 'missing_required_parameter')
self.assertIn('oauth_timestamp', data['error_description'])
# case 6
rv = self.client.post(url, data={
'oauth_consumer_key': 'client',
'oauth_callback': 'oob',
'oauth_timestamp': str(int(time.time()))
})
data = decode_response(rv.data)
self.assertEqual(data['error'], 'missing_required_parameter')
self.assertIn('oauth_nonce', data['error_description'])
# case 7
rv = self.client.post(url, data={
'oauth_consumer_key': 'client',
'oauth_callback': 'oob',
'oauth_timestamp': '123'
})
data = decode_response(rv.data)
self.assertEqual(data['error'], 'invalid_request')
self.assertIn('oauth_timestamp', data['error_description'])
# case 8
rv = self.client.post(url, data={
'oauth_consumer_key': 'client',
'oauth_callback': 'oob',
'oauth_timestamp': 'sss'
})
data = decode_response(rv.data)
self.assertEqual(data['error'], 'invalid_request')
self.assertIn('oauth_timestamp', data['error_description'])
# case 9
rv = self.client.post(url, data={
'oauth_consumer_key': 'client',
'oauth_callback': 'oob',
'oauth_timestamp': '-1',
'oauth_signature_method': 'PLAINTEXT'
})
self.assertEqual(data['error'], 'invalid_request')
self.assertIn('oauth_timestamp', data['error_description'])
def test_temporary_credential_signatures_errors(self):
self.prepare_data()
url = '/oauth/initiate'
rv = self.client.post(url, data={
'oauth_consumer_key': 'client',
'oauth_callback': 'oob',
'oauth_signature_method': 'PLAINTEXT'
})
data = decode_response(rv.data)
self.assertEqual(data['error'], 'missing_required_parameter')
self.assertIn('oauth_signature', data['error_description'])
rv = self.client.post(url, data={
'oauth_consumer_key': 'client',
'oauth_callback': 'oob',
'oauth_timestamp': str(int(time.time())),
'oauth_nonce': 'a'
})
data = decode_response(rv.data)
self.assertEqual(data['error'], 'missing_required_parameter')
self.assertIn('oauth_signature_method', data['error_description'])
rv = self.client.post(url, data={
'oauth_consumer_key': 'client',
'oauth_signature_method': 'INVALID',
'oauth_callback': 'oob',
'oauth_timestamp': str(int(time.time())),
'oauth_nonce': 'b',
'oauth_signature': 'c'
})
data = decode_response(rv.data)
self.assertEqual(data['error'], 'unsupported_signature_method')
def test_plaintext_signature(self):
self.prepare_data()
url = '/oauth/initiate'
# case 1: use payload
rv = self.client.post(url, data={
'oauth_consumer_key': 'client',
'oauth_callback': 'oob',
'oauth_signature_method': 'PLAINTEXT',
'oauth_signature': 'secret&'
})
data = decode_response(rv.data)
self.assertIn('oauth_token', data)
# case 2: use header
auth_header = (
'OAuth oauth_consumer_key="client",'
'oauth_signature_method="PLAINTEXT",'
'oauth_callback="oob",'
'oauth_signature="secret&"'
)
headers = {'Authorization': auth_header}
rv = self.client.post(url, headers=headers)
data = decode_response(rv.data)
self.assertIn('oauth_token', data)
# case 3: invalid signature
rv = self.client.post(url, data={
'oauth_consumer_key': 'client',
'oauth_callback': 'oob',
'oauth_signature_method': 'PLAINTEXT',
'oauth_signature': 'invalid-signature'
})
data = decode_response(rv.data)
self.assertEqual(data['error'], 'invalid_signature')
def test_hmac_sha1_signature(self):
self.prepare_data()
url = '/oauth/initiate'
params = [
('oauth_consumer_key', 'client'),
('oauth_callback', 'oob'),
('oauth_signature_method', 'HMAC-SHA1'),
('oauth_timestamp', str(int(time.time()))),
('oauth_nonce', 'hmac-sha1-nonce'),
]
base_string = signature.construct_base_string(
'POST', 'http://localhost/oauth/initiate', params
)
sig = signature.hmac_sha1_signature(base_string, 'secret', None)
params.append(('oauth_signature', sig))
auth_param = ','.join(['{}="{}"'.format(k, v) for k, v in params])
auth_header = 'OAuth ' + auth_param
headers = {'Authorization': auth_header}
# case 1: success
rv = self.client.post(url, headers=headers)
data = decode_response(rv.data)
self.assertIn('oauth_token', data)
# case 2: exists nonce
rv = self.client.post(url, headers=headers)
data = decode_response(rv.data)
self.assertEqual(data['error'], 'invalid_nonce')
def test_rsa_sha1_signature(self):
self.prepare_data()
url = '/oauth/initiate'
params = [
('oauth_consumer_key', 'client'),
('oauth_callback', 'oob'),
('oauth_signature_method', 'RSA-SHA1'),
('oauth_timestamp', str(int(time.time()))),
('oauth_nonce', 'rsa-sha1-nonce'),
]
base_string = signature.construct_base_string(
'POST', 'http://localhost/oauth/initiate', params
)
sig = signature.rsa_sha1_signature(
base_string, read_file_path('rsa_private.pem'))
params.append(('oauth_signature', sig))
auth_param = ','.join(['{}="{}"'.format(k, v) for k, v in params])
auth_header = 'OAuth ' + auth_param
headers = {'Authorization': auth_header}
rv = self.client.post(url, headers=headers)
data = decode_response(rv.data)
self.assertIn('oauth_token', data)
# case: invalid signature
auth_param = auth_param.replace('rsa-sha1-nonce', 'alt-sha1-nonce')
auth_header = 'OAuth ' + auth_param
headers = {'Authorization': auth_header}
rv = self.client.post(url, headers=headers)
data = decode_response(rv.data)
self.assertEqual(data['error'], 'invalid_signature')
def test_invalid_signature(self):
self.app.config.update({
'OAUTH1_SUPPORTED_SIGNATURE_METHODS': ['INVALID']
})
self.prepare_data()
url = '/oauth/initiate'
rv = self.client.post(url, data={
'oauth_consumer_key': 'client',
'oauth_callback': 'oob',
'oauth_signature_method': 'PLAINTEXT',
'oauth_signature': 'secret&'
})
data = decode_response(rv.data)
self.assertEqual(data['error'], 'unsupported_signature_method')
rv = self.client.post(url, data={
'oauth_consumer_key': 'client',
'oauth_callback': 'oob',
'oauth_signature_method': 'INVALID',
'oauth_timestamp': str(int(time.time())),
'oauth_nonce': 'invalid-nonce',
'oauth_signature': 'secret&'
})
data = decode_response(rv.data)
self.assertEqual(data['error'], 'unsupported_signature_method')
def test_register_signature_method(self):
self.prepare_data()
def foo():
pass
self.server.register_signature_method('foo', foo)
self.assertEqual(self.server.SIGNATURE_METHODS['foo'], foo)
class TemporaryCredentialsNoCacheTest(TemporaryCredentialsWithCacheTest):
USE_CACHE = False
|
11502264
|
from django.conf.urls import patterns, url
from django.core.urlresolvers import reverse_lazy
from django.views.generic import RedirectView
from play_api import views
urlpatterns = patterns('',
url(r'^api/registration/$', views.api_registration ,name='api_registration'),
url(r'^api/facebook_auth/$', views.facebook_auth ,name='facebook_auth'),
url(r'^api/v1/login/$', views.api_v1_login ,name='api_login'),
url(r'^api/v1/home/$', views.api_v1_home ,name='api_home'),
url(r'^api/v1/logout/$', views.api_v1_logout ,name='api_logout'),
url(r'^api/v1/my_events/$', views.api_v1_my_events ,name='api_my_events'),
url(r'^api/v1/events/$', views.api_v1_events ,name='api_events'),
url(r'^api/v1/my_coupons/$', views.api_v1_my_coupons ,name='api_my_coupons'),
url(r'^api/v1/coupons/$', views.api_v1_coupons ,name='api_coupons'),
url(r'^api/v1/leaderboard/$', views.api_v1_leaderboard ,name='api_leaderboard'),
url(r'^api/v1/history_events/$', views.api_v1_history_events ,name='api_history_events'),
url(r'^api/v1/history_coupons/$', views.api_v1_history_coupons ,name='api_history_coupons'),
url(r'^api/v2/login/$', views.api_v2_login ,name='api_login'),
url(r'^api/v2/add_event/$', views.api_v2_add_event ,name='api_add_event'),
url(r'^api/v2/add_coupon/$', views.api_v2_add_coupon ,name='api_add_coupon'),
url(r'^api/v2/home/$', views.api_v2_home ,name='api_home'),
url(r'^api/v2/logout/$', views.api_v2_logout ,name='api_logout'),
url(r'^api/v2/my_events/$', views.api_v2_my_events ,name='api_my_events'),
url(r'^api/v2/events/$', views.api_v2_events ,name='api_events'),
url(r'^api/v2/my_coupons/$', views.api_v2_my_coupons ,name='api_my_coupons'),
url(r'^api/v2/coupons/$', views.api_v2_coupons ,name='api_coupons'),
url(r'^api/v2/leaderboard/$', views.api_v2_leaderboard ,name='api_leaderboard'),
url(r'^api/v2/history_events/$', views.api_v2_history_events ,name='api_history_events'),
url(r'^api/v2/history_coupons/$', views.api_v2_history_coupons ,name='api_history_coupons'),
)
|
11502271
|
from nanopore.metaAnalyses.abstractUnmappedAnalysis import AbstractUnmappedMetaAnalysis
import os, sys
import xml.etree.cElementTree as ET
from jobTree.src.bioio import system
import re
from collections import OrderedDict as od
class ComparePerReadMappabilityByMapper(AbstractUnmappedMetaAnalysis):
"""Finds which base mappers mapped which reads"""
def run(self):
for readType in self.readTypes:
sortedBaseMappers = [x for x in sorted(self.baseMappers) if x != "Combined"]
outf = open(os.path.join(self.outputDir, readType + "_perReadMappability.tsv"), "w")
outf.write("Read\tReadFastqFile\t"); outf.write("\t".join(sortedBaseMappers)); outf.write("\n")
for read in self.reads:
if read.readType == readType:
tmp = od([[x, 0] for x in sortedBaseMappers])
if read.is_mapped is True:
for mapper, reference in read.get_map_ref_pair():
baseMapper = re.findall("[A-Z][a-z]*", mapper)[0]
#hacky way to avoid including 'combined' analysis
if baseMapper != "Combined" and tmp[baseMapper] == 0:
tmp[baseMapper] = 1
outf.write("\t".join([read.name, os.path.basename(read.readFastqFile)] + map(str, tmp.values()))); outf.write("\n")
outf.close()
system("Rscript nanopore/metaAnalyses/vennDiagram.R {} {}".format(os.path.join(self.outputDir, readType + "_perReadMappability.tsv"), os.path.join(self.outputDir, readType + "_perReadMappabilityVennDiagram.pdf")))
|
11502284
|
import time
import hashlib
from sqlalchemy.exc import IntegrityError
from flask import current_app
from . import utils
from .models import User, UserUpdateSignal
from .extensions import db, redis_store
def _get_user_verification_code_failures_redis_key(user_id):
return 'vcfails:' + str(user_id)
def _register_user_verification_code_failure(user_id):
expiration_seconds = max(current_app.config['LOGIN_VERIFICATION_CODE_EXPIRATION_SECONDS'], 24 * 60 * 60)
key = _get_user_verification_code_failures_redis_key(user_id)
with redis_store.pipeline() as p:
p.incrby(key)
p.expire(key, expiration_seconds)
num_failures = int(p.execute()[0] or '0')
return num_failures
def _clear_user_verification_code_failures(user_id):
redis_store.delete(_get_user_verification_code_failures_redis_key(user_id))
class UserLoginsHistory:
"""Contain identification codes from the last logins of a given user."""
REDIS_PREFIX = 'cc:'
def __init__(self, user_id):
self.max_count = current_app.config['LOGIN_VERIFIED_DEVICES_MAX_COUNT']
self.key = self.REDIS_PREFIX + str(user_id)
@staticmethod
def calc_hash(s):
return hashlib.sha224(s.encode('ascii')).hexdigest()
def contains(self, element):
emement_hash = self.calc_hash(element)
return emement_hash in redis_store.zrevrange(self.key, 0, self.max_count - 1)
def add(self, element):
emement_hash = self.calc_hash(element)
with redis_store.pipeline() as p:
p.zremrangebyrank(self.key, 0, -self.max_count)
p.zadd(self.key, {emement_hash: time.time()})
p.execute()
def clear(self):
redis_store.delete(self.key)
class RedisSecretHashRecord:
class ExceededMaxAttempts(Exception):
"""Too many failed attempts to enter the correct code."""
@property
def key(self):
return self.REDIS_PREFIX + self.secret
@classmethod
def create(cls, _secret=None, **data):
instance = cls()
instance.secret = _secret or utils.generate_random_secret()
instance._data = data
with redis_store.pipeline() as p:
p.hmset(instance.key, data)
p.expire(instance.key, current_app.config[cls.EXPIRATION_SECONDS_CONFIG_FIELD])
p.execute()
return instance
@classmethod
def from_secret(cls, secret):
instance = cls()
instance.secret = secret
instance._data = dict(zip(cls.ENTRIES, redis_store.hmget(instance.key, cls.ENTRIES)))
return instance if instance._data.get(cls.ENTRIES[0]) is not None else None
def delete(self):
redis_store.delete(self.key)
def __getattr__(self, name):
return self._data[name]
def increment_key_with_limit(key, limit=None, period_seconds=1):
if redis_store.ttl(key) < 0:
redis_store.set(key, '1', ex=period_seconds)
value = 1
else:
value = redis_store.incrby(key)
if limit is not None and int(value) > limit:
raise ExceededValueLimitError()
return value
class ExceededValueLimitError(Exception):
"""The maximum value of a key has been exceeded."""
class LoginVerificationRequest(RedisSecretHashRecord):
EXPIRATION_SECONDS_CONFIG_FIELD = 'LOGIN_VERIFICATION_CODE_EXPIRATION_SECONDS'
REDIS_PREFIX = 'vcode:'
ENTRIES = ['user_id', 'code', 'challenge_id', 'email', 'remember_me']
@classmethod
def create(cls, **data):
# We register a "code failure" after the creation of each
# login verification request. This prevents maliciously
# creating huge numbers of them.
instance = super().create(**data)
instance.register_code_failure()
return instance
def is_correct_recovery_code(self, recovery_code):
user = User.query.filter_by(user_id=int(self.user_id)).one()
normalized_recovery_code = utils.normalize_recovery_code(recovery_code)
return user.recovery_code_hash == utils.calc_crypt_hash(user.salt, normalized_recovery_code)
def register_code_failure(self):
num_failures = _register_user_verification_code_failure(self.user_id)
if num_failures > current_app.config['SECRET_CODE_MAX_ATTEMPTS']:
self.delete()
raise self.ExceededMaxAttempts()
def accept(self, clear_failures=False):
if clear_failures:
_clear_user_verification_code_failures(self.user_id)
self.delete()
class SignUpRequest(RedisSecretHashRecord):
EXPIRATION_SECONDS_CONFIG_FIELD = 'SIGNUP_REQUEST_EXPIRATION_SECONDS'
REDIS_PREFIX = 'signup:'
ENTRIES = ['email', 'cc', 'recover', 'has_rc']
def is_correct_recovery_code(self, recovery_code):
user = User.query.filter_by(email=self.email).one()
normalized_recovery_code = utils.normalize_recovery_code(recovery_code)
return user.recovery_code_hash == utils.calc_crypt_hash(user.salt, normalized_recovery_code)
def register_code_failure(self):
num_failures = int(redis_store.hincrby(self.key, 'fails'))
if num_failures >= current_app.config['SECRET_CODE_MAX_ATTEMPTS']:
self.delete()
raise self.ExceededMaxAttempts()
def accept(self, password):
self.delete()
if self.recover:
recovery_code = None
user = User.query.filter_by(email=self.email).one()
user.password_hash = utils.calc_crypt_hash(user.salt, password)
# After changing the password, we "forget" past login
# verification failures, thus guaranteeing that the user
# will be able to log in immediately.
_clear_user_verification_code_failures(user.user_id)
else:
salt = utils.generate_password_salt(current_app.config['PASSWORD_HASHING_METHOD'])
if current_app.config['USE_RECOVERY_CODE']:
recovery_code = utils.generate_recovery_code()
recovery_code_hash = utils.calc_crypt_hash(salt, recovery_code)
else:
recovery_code = None
recovery_code_hash = None
user = User(
email=self.email,
salt=salt,
password_hash=utils.calc_crypt_hash(salt, password),
recovery_code_hash=recovery_code_hash,
two_factor_login=True,
)
db.session.add(user)
if current_app.config['SEND_USER_UPDATE_SIGNAL']:
db.session.add(UserUpdateSignal(user=user, email=user.email))
db.session.commit()
self.user_id = user.user_id
return recovery_code
class ChangeEmailRequest(RedisSecretHashRecord):
EXPIRATION_SECONDS_CONFIG_FIELD = 'CHANGE_EMAIL_REQUEST_EXPIRATION_SECONDS'
REDIS_PREFIX = 'setemail:'
ENTRIES = ['email', 'old_email', 'user_id']
class EmailAlredyRegistered(Exception):
"""The new email is already registered."""
def accept(self):
self.delete()
user_id = int(self.user_id)
user = User.query.filter_by(user_id=user_id, email=self.old_email).one()
user.email = self.email
if current_app.config['SEND_USER_UPDATE_SIGNAL']:
db.session.add(UserUpdateSignal(user=user, email=self.email))
try:
db.session.commit()
except IntegrityError:
db.session.rollback()
raise self.EmailAlredyRegistered()
class ChangeRecoveryCodeRequest(RedisSecretHashRecord):
EXPIRATION_SECONDS_CONFIG_FIELD = 'CHANGE_RECOVERY_CODE_REQUEST_EXPIRATION_SECONDS'
REDIS_PREFIX = 'changerc:'
ENTRIES = ['email']
def accept(self):
self.delete()
recovery_code = utils.generate_recovery_code()
user = User.query.filter_by(email=self.email).one()
user.recovery_code_hash = utils.calc_crypt_hash(user.salt, recovery_code)
db.session.commit()
return recovery_code
|
11502292
|
from ..objects.shipment import Shipment
from .base import ResourceBase
class Shipments(ResourceBase):
order_id = None
def get_resource_object(self, result):
return Shipment(result, self.client)
def get_resource_name(self):
return f"orders/{self.order_id}/shipments"
def with_parent_id(self, order_id):
self.order_id = order_id
return self
def on(self, order):
return self.with_parent_id(order.id)
|
11502303
|
import os.path
import argparse
import time
import torch
import torch.nn as nn
import torch.optim as optim
from torch.optim import lr_scheduler
from torchvision import models, transforms
from LFLSeg_dataset import LFLSegDataset
parser = argparse.ArgumentParser()
parser.add_argument("--gpu", type=str, default="01")
parser.add_argument("--epoch", type=int, default=100)
# Input of LFLSeg module is 224x224
parser.add_argument("--input_size", type=int, default=224)
parser.add_argument("--batch_size", type=int, default=128)
# Path to train/test dataset (txt files)
parser.add_argument("--train", type=str, default='data_path/train.txt')
parser.add_argument("--test", type=str, default='data_path/test.txt')
parser.add_argument("--modelname", type=str, default='resnet101_LFLSeg_v1')
parser.add_argument("--output", type=str, default='./trained_models/')
args = parser.parse_args()
gpu_list = ','.join(str(x) for x in args.gpu)
os.environ['CUDA_VISIBLE_DEVICES'] = gpu_list
print('export CUDA_VISIBLE_DEVICES=' + gpu_list)
TRAIN = 'train'
TEST = 'test'
train_dataset = args.train
test_dataset = args.test
print("Train data: %s" % train_dataset)
print("Test data: %s" % test_dataset)
# Save trained models (classifiers)
save_folder = args.output
print('Save trained models to: ' + save_folder)
# Class: ['full_leaf': 0, 'partial_leaf': 1, 'non_leaf': 2]
def train_model(model, log_filename, optimizer, criterion, scheduler, dataloaders, num_epochs=args.epoch):
since = time.time()
best_acc = 0.0
log_file = open(os.path.join(save_folder,'train_log_' + log_filename + '.txt'), 'w')
for epoch in range(1, num_epochs+1):
print('Epoch {}/{}'.format(epoch, num_epochs))
print('-' * 20)
log_file.write('Epoch {}/{}'.format(epoch, num_epochs) + '\n')
log_file.write('-' * 20 + '\n')
# Each epoch has a training and test phase
for phase in [TRAIN, TEST]:
if phase == 'train':
scheduler.step()
model.train() # Set model to training mode
else:
model.eval() # Set model to evaluate mode
running_loss = 0.0
running_corrects = 0.0
# Iterate over data.
for inputs, labels in dataloaders[phase]:
inputs = inputs.to('cuda')
labels = labels.to('cuda')
# zero the parameter gradients
optimizer.zero_grad()
# forward
# track history if only in train
with torch.set_grad_enabled(phase == 'train'):
outputs = model(inputs)
_, preds = torch.max(outputs, 1)
loss = criterion(outputs, labels)
# backward + optimize only if in training phase
if phase == 'train':
loss.backward()
optimizer.step()
# statistics
running_loss += loss.item() * inputs.size(0)
running_corrects += torch.sum(preds == labels.data)
epoch_loss = running_loss / len(dataloaders[phase].dataset)
epoch_acc = running_corrects.double() / len(dataloaders[phase].dataset)
print('{} Loss: {:.4f} Acc: {:.4f}'.format(
phase, epoch_loss, epoch_acc))
log_file.write('{} Loss: {:.4f} Acc: {:.4f}'.format(
phase, epoch_loss, epoch_acc) + '\n')
if phase == 'test' and epoch_acc > best_acc:
best_acc = epoch_acc
## Saving model
if isinstance(model, nn.DataParallel):
temp_model = model.module
state_dict = temp_model.state_dict()
for key, param in state_dict.items():
state_dict[key] = param.cpu()
torch.save(state_dict, os.path.join(save_folder, str(epoch) + '_best_model_' + log_filename + '.pth'))
print("Saved best_model at epoch {}".format(epoch))
log_file.write('Saved best_model at epoch {}\n'.format(epoch))
# Saving model every 10 epoch
if(epoch % 10 == 0):
if isinstance(model, nn.DataParallel):
temp_model = model.module
state_dict = temp_model.state_dict()
for key, param in state_dict.items():
state_dict[key] = param.cpu()
torch.save(state_dict, os.path.join(save_folder, 'trained_' + log_filename + '_%d.pth' % epoch))
print()
log_file.write('\n')
time_elapsed = time.time() - since
print('Training complete in {:.0f}m {:.0f}s'.format(
time_elapsed // 60, time_elapsed % 60))
def main():
data_transforms = {
TRAIN: transforms.Compose([
transforms.RandomResizedCrop(size=args.input_size, scale=(0.8, 1.0), interpolation=3),
transforms.RandomHorizontalFlip(),
transforms.RandomVerticalFlip(),
transforms.ToTensor(),
transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))
]),
TEST: transforms.Compose([
transforms.Resize(size=(args.input_size, args.input_size), interpolation=3),
transforms.ToTensor(),
transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))
])
}
image_datasets = {
TRAIN: LFLSegDataset(
txt_path=train_dataset,
transform=data_transforms[TRAIN]
),
TEST: LFLSegDataset(
txt_path=test_dataset,
transform=data_transforms[TEST]
)
}
dataloaders = {
x: torch.utils.data.DataLoader(
image_datasets[x], batch_size=args.batch_size,
shuffle=True, num_workers=32
)
for x in [TRAIN, TEST]
}
model_ft = models.resnet101(pretrained=True)
model_name = args.modelname
print(model_name)
print(train_dataset)
print("Number of epoch: %d"%args.epoch)
# Replace final layer with 3 outputs (full leaf, partial leaf, non-leaf)
num_ftrs = model_ft.fc.in_features
model_ft.fc = nn.Linear(num_ftrs, 3)
for param in model_ft.parameters():
param.requires_grad = True
model_ft = nn.DataParallel(model_ft)
model_ft = model_ft.to('cuda')
criterion = nn.CrossEntropyLoss().to('cuda')
optimizer_ft = optim.SGD(model_ft.parameters(), lr=0.001, momentum=0.9)
exp_lr_scheduler = lr_scheduler.StepLR(optimizer_ft, step_size=7, gamma=0.1)
train_model(model_ft, model_name, optimizer_ft, criterion, exp_lr_scheduler, dataloaders)
if __name__ == '__main__':
main()
|
11502344
|
def init(args=None):
from .args import parse_args
args = parse_args(args)
from .models import init_db
init_db('sqlite:///' + args.db_path)
from .log import init_log
sql = (args.operation != 'audit')
init_log('DEBUG' if args.debug else 'INFO', sql=sql)
return args
def run_web(args):
import sys
from logging import getLogger
from gevent.pywsgi import WSGIServer
from .flask import app, backend
from .cert import load_selfsigned_cert
import crackomatic.constants as constants
log = getLogger(__name__)
try:
if args.key and args.cert:
key, cert = args.key, args.cert
else:
log.warning(
"No certifcate or key specified; using a"
" selfsigned certificate. You should supply a"
" proper certificate!"
)
key, cert = load_selfsigned_cert(args.local_address)
app.config.update(dict(
debug=args.debug,
host=args.local_address,
port=args.port,
use_reloader=False,
))
constants.URL = 'https://%s:%d' % (args.local_address, args.port)
http_server = WSGIServer(
(args.local_address, args.port),
app,
keyfile=key,
certfile=cert,
# log=log, # This would clutter our database logs
)
http_server.serve_forever()
except (KeyboardInterrupt, SystemExit):
backend.clean_up()
sys.exit()
def main(args=None):
args = init(args)
if args.operation == 'web':
run_web(args)
elif args.operation == 'audit':
import json
from .audit import get_audit_sample, print_audit_description, \
perform_audit
if args.sample:
sample = get_audit_sample()
return json.dumps(sample, indent=4, sort_keys=True)
elif args.description:
print_audit_description()
else:
perform_audit(args.audit_file, interactive=args.interactive)
elif args.operation == 'user':
from .user import perform_user_action
perform_user_action(args.action, args.username)
if __name__ == "__main__":
main()
|
11502389
|
import bpy
import bpy_types
import asyncio
import time
import os
from asyncio import Future
class BlenderFuture(Future):
futures = {}
future_counter = 0
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.id = self.__class__.future_counter
self.__class__.future_counter += 1
self.__class__.futures[self.id] = self
def __del__(self):
del self__class__.futures[self.id]
super().__del__()
class TemporaryDialogOperatorClass(bpy.types.Operator):
""" An operator to turn a file dialog into an asyncio task. """
bl_label = ""
bl_idname = 'asyncio.temp_file_dialog'
filepath = bpy.props.StringProperty(subtype="FILE_NAME")
future_id = bpy.props.IntProperty()
def execute(self, context):
self.future.set_result(self.filepath)
return {'FINISHED'}
def invoke(self, context, event):
context.window_manager.fileselect_add(self)
return {'RUNNING_MODAL'}
def __del__(self):
if not self.future.cancelled() and not self.future.done():
self.future.set_result(None)
bpy.utils.register_class(TemporaryDialogOperatorClass)
async def open_file_dialog():
bl_idname = "asyncio.file_dialog"
future = BlenderFuture()
TemporaryDialogOperatorClass.future = future
bpy.ops.asyncio.temp_file_dialog("INVOKE_DEFAULT")
return await future
properties = []
class AsyncDialog(object, metaclass=bpy_types.OrderedMeta):
""" Base Class for Dialog specifications. It's necessary
to make sure the user's dialog class has ordered
properties, without actually being an operator. """
pass
class TestDialog(AsyncDialog):
my_float = bpy.props.FloatProperty(name="Some Floating Point")
my_bool = bpy.props.BoolProperty(name="Toggle Option")
my_string = bpy.props.StringProperty(name="String Value")
async def open_dialog(dialog_class):
class DialogOperator(bpy.types.Operator, TestDialog):
bl_idname = "object.dialog_operator"
bl_label = "Simple Dialog Operator"
def execute(self, context):
result = {}
for key, value in self.rna_type.properties.items():
result[key] = getattr(self, key)
self.future.set_result(result)
return {'FINISHED'}
def invoke(self, context, event):
wm = context.window_manager
return wm.invoke_props_dialog(self)
def __del__(self):
if not self.future.cancelled() and not self.future.done():
self.future.set_result(None)
future = asyncio.Future()
DialogOperator.future = future
bpy.utils.register_class(DialogOperator)
bpy.ops.object.dialog_operator('INVOKE_DEFAULT')
result = await future
return result
|
11502453
|
from ebonite import Ebonite, create_model
def model_function(data):
"""Dummy function is our model"""
return data
def main():
ebnt = Ebonite.local(clear=True)
task = ebnt.get_or_create_task('local_deployment', 'local_deployment')
model = create_model(model_function, 0, model_name='dummy_function')
task.add_model(model)
image = ebnt.create_image(model, 'dummy_image', force_overwrite=True)
instance = ebnt.create_instance(image, 'dummy_service')
instance.run()
for log in instance.logs(stream=True):
try:
print(log, end='')
except KeyboardInterrupt: # FIXME does not work since we stuck in generator
break
ebnt.delete_instance(instance)
if __name__ == '__main__':
main()
|
11502478
|
from my_temp import Temp
# 对象被s1和s2引用
s1 = Temp()
s2 = s1
del s1 # 只删除s1,新创建的对象并没有被删除
print("-" * 10)
# ----------
# 你被干掉了 ==> 程序退出了
|
11502497
|
import torch
import torch.nn.functional as F
import math
def item(tensor):
if hasattr(tensor, 'item'):
return tensor.item()
if hasattr(tensor, '__getitem__'):
return tensor[0]
return tensor
def softmax(x, dim, onnx_trace=False):
if onnx_trace:
return F.softmax(x.float(), dim=dim)
else:
return F.softmax(x, dim=dim, dtype=torch.float32)
def log_softmax(x, dim, onnx_trace=False):
if onnx_trace:
return F.log_softmax(x.float(), dim=dim)
else:
return F.log_softmax(x, dim=dim, dtype=torch.float32)
def get_perplexity(loss):
try:
return '{:.2f}'.format(math.pow(2, loss))
except OverflowError:
return float('inf')
def apply_to_sample(f, sample):
if len(sample) == 0:
return {}
def _apply(x):
if torch.is_tensor(x):
return f(x)
elif isinstance(x, dict):
r = {key: _apply(value) for key, value in x.items()}
return r
# return {
# key: _apply(value)
# for key, value in x.items()
# }
elif isinstance(x, list):
return [_apply(x) for x in x]
else:
return x
return _apply(sample)
def strip_pad(tensor, pad):
return tensor[tensor.ne(pad)]
def move_to_cuda(sample):
def _move_to_cuda(tensor):
return tensor.cuda()
return apply_to_sample(_move_to_cuda, sample)
|
11502509
|
from __future__ import division, print_function
from .global_imports import *
from . import global_imports
from . import _warning, make_verbose, verbose
from os.path import join as _join
from .Spacetime import Spacetime
from .HotRegion import HotRegion
from .Elsewhere import Elsewhere
from .Everywhere import Everywhere
from .Parameter import Parameter
from .ParameterSubspace import ParameterSubspace
from .pixelmesh.integrator import integrate as _integrate
from .tools.energy_integrator import energy_integrator
from .tools.phase_integrator import phase_integrator
try:
_mpl
except NameError:
pass
else:
import matplotlib
from matplotlib import pyplot as plt
from matplotlib.figure import Figure
from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas
from matplotlib import rcParams
from matplotlib.ticker import MultipleLocator, AutoLocator, AutoMinorLocator
from matplotlib import gridspec
from matplotlib import cm
from matplotlib import animation
import matplotlib.image as mgimg
class Photosphere(ParameterSubspace):
""" A photosphere embedded in an ambient Schwarzschild spacetime.
:param obj hot:
An instance of :class:`~.HotRegion.HotRegion` (or a derived class).
This objects represents the hot regions of the surface that in most
use-cases will be assumed to contain radiating material that is hotter
than that *elsewhere*.
:param obj elsewhere:
An instance of :class:`~.Elsewhere.Elsewhere` (or a derived class).
:param obj everywhere:
An instance of :class:`~.Everywhere.Everywhere` (or a derived class).
Note that if you use construct the surface radiation field in this
way, you should use the :attr:`~.Photosphere.Photosphere.hot_atmosphere`
property to pass a buffer of numerical data to the integrator
routines. You then need to ensure that the extension modules
``xpsi/surface_radiation_field/hot_radiation_field.pyx`` and
``xpsi/surface_radiation_field/elsewhere_radiation_field.pyx`` match.
.. note::
You cannot specify the surface radiation field *everywhere* if you
use hot regions (the latter usage may also include specification of
the radiation field *elsewhere*).
:param dict bounds:
Bounds are supplied for instantiation of a frequency parameter.
The parameter name ``'mode_frequency'`` must be a key in the
dictionary unless the parameter is *fixed* or *derived*. If a bound
is ``None`` that bound is set equal to a strict hard-coded bound.
If ``None``, lock the coordinate rotation frequency of a mode of
asymmetry in the photosphere to a fixed frequency, e.g., the stellar
rotation frequency. If bounds are passed, the frequency is interpreted
as a free parameter.
:param dict values:
Either the fixed value of the mode frequency, a callable if the
frequency is *derived*, or a value upon initialisation if the
frequency is free. The dictionary must have a key with name
``'mode_frequency'`` if it is *fixed* or *derived*.
If the asymmetry is locked to the stellar spin, then you need to pass
the spin frequency. If fixed but different to the spin frequency, this
value needs to be passed instead. In the hot region base class this
mode frequency is applied to normalise the ray lags instead of the
stellar rotation frequency.
:param iterable custom:
A :class:`~.Parameter.Parameter` instance or iterable over such
instances. Might be useful for calling image plane extensions and
passing global variables, without having to instantiate
surface-discretisation classes and without having to handle global
variable values at compile time or from disk for runtime access.
.. note::
In basic modelling patterns the frequency is the spin frequency,
and thus you only need to explicitly pass the spin as ``value`` whilst
leaving ``bounds`` to default. If the spin frequency happens to be a
free parameter (perhaps with informative prior information), then
pass a callable instead that can be used to get the spin frequency
dynamically when the derived mode frequency variable is called for.
"""
required_names = ['mode_frequency']
def __init__(self,
hot = None, elsewhere = None,
everywhere = None,
bounds = None, values = None,
custom = None,
**kwargs):
if everywhere is not None:
if hot or elsewhere is not None:
raise ValueError('Cannot use hot region nor elsewhere '
'functionality if constructing the '
'radiation field everywhere.')
if not isinstance(everywhere, Everywhere):
raise TypeError('Invalid type for everywhere object.')
elif hot is None and elsewhere is None:
pass # can call image-plane extensions
else:
if elsewhere is not None:
if not isinstance(elsewhere, Elsewhere):
raise TypeError('Invalid type for an elsewhere object.')
if hot is None:
raise ValueError('Hot region object(s) must be used in '
'conjuction with an elsewhere object.')
self._elsewhere_atmosphere = ()
# including derived classes
if hot is not None and hot is not isinstance(hot, HotRegion):
if hasattr(hot, 'objects'):
for obj in getattr(hot, 'objects'):
if not isinstance(obj, HotRegion):
raise TypeError('Invalid object for the hot '
'region(s).')
else:
raise TypeError('Invalid object for the hot region(s).')
self._hot = hot
self._hot_atmosphere = ()
self._elsewhere = elsewhere
self._everywhere = everywhere
if bounds is None: bounds = {}
if values is None: values = {}
doc = """
Coordinate frequency of the mode of radiative asymmetry in the
photosphere that is assumed to generate the pulsed signal [Hz].
"""
mode_frequency = Parameter('mode_frequency',
strict_bounds = (0.0, 2000.0),
bounds = bounds.get('mode_frequency', None),
doc = doc,
symbol = r'$f_{\rm mode}$',
value = values.get('mode_frequency', None))
super(Photosphere, self).__init__(mode_frequency,
hot, elsewhere, everywhere,
custom,
**kwargs)
@property
def hot_atmosphere(self):
""" Get the numerical atmosphere buffers for hot regions if used.
To preload a numerical atmosphere into a buffer, subclass and
overwrite the setter. The underscore attribute set by the setter
must be an :math:`n`-tuple whose :math:`n^{th}` element is an
:math:`(n-1)`-dimensional array flattened into a one-dimensional
:class:`numpy.ndarray`. The first :math:`n-1`
elements of the :math:`n`-tuple must each be an ordered one-dimensional
:class:`numpy.ndarray` of parameter values for the purpose of
multi-dimensional interpolation in the :math:`n^{th}` buffer. The
first :math:`n-1` elements must be ordered to match the index
arithmetic applied to the :math:`n^{th}` buffer. An example would be
``self._hot_atmosphere = (logT, logg, mu, logE, buf)``, where:
``logT`` is a logarithm of local comoving effective temperature;
``logg`` is a logarithm of effective surface gravity;
``mu`` is the cosine of the angle from the local surface normal;
``logE`` is a logarithm of the photon energy; and
``buf`` is a one-dimensional buffer of intensities of size given by
the product of sizes of the first :math:`n-1` tuple elements.
It is highly recommended that buffer preloading is used, instead
of loading from disk in the customisable radiation field extension
module, to avoid reading from disk for every signal
(likelihood) evaluation. This can be a non-negligible waste of compute
resources. By preloading in Python, the memory is allocated and
references to that memory are not in general deleted until a sampling
script exits and the kernel stops. The likelihood callback accesses
the same memory upon each call without I/O.
"""
return self._hot_atmosphere
@hot_atmosphere.setter
def hot_atmosphere(self, path):
""" Implement if required. """
raise NotImplementedError('Implement setter if required.')
@property
def elsewhere_atmosphere(self):
""" Get the numerical atmosphere buffers for elsewhere if used.
To preload a numerical atmosphere into a buffer, subclass and
overwrite the setter. The underscore attribute set by the setter
must be an :math:`n`-tuple whose :math:`n^{th}` element is an
:math:`(n-1)`-dimensional array flattened into a one-dimensional
:class:`numpy.ndarray`. The first :math:`n-1`
elements of the :math:`n`-tuple must each be an ordered one-dimensional
:class:`numpy.ndarray` of parameter values for the purpose of
multi-dimensional interpolation in the :math:`n^{th}` buffer. The
first :math:`n-1` elements must be ordered to match the index
arithmetic applied to the :math:`n^{th}` buffer. An example would be
``self._hot_atmosphere = (logT, logg, mu, logE, buf)``, where:
``logT`` is a logarithm of local comoving effective temperature;
``logg`` is a logarithm of effective surface gravity;
``mu`` is the cosine of the angle from the local surface normal;
``logE`` is a logarithm of the photon energy; and
``buf`` is a one-dimensional buffer of intensities of size given by
the product of sizes of the first :math:`n-1` tuple elements.
It is highly recommended that buffer preloading is used, instead
of loading from disk in the customisable radiation field extension
module, to avoid reading from disk for every signal
(likelihood) evaluation. This can be a non-negligible waste of compute
resources. By preloading in Python, the memory is allocated and
references to that memory are not in general deleted until a sampling
script exits and the kernel stops. The likelihood callback accesses
the same memory upon each call without I/O.
"""
return self._elsewhere_atmosphere
@elsewhere_atmosphere.setter
def elsewhere_atmosphere(self, path):
""" Implement if required. """
raise NotImplementedError('Implement setter if required.')
@property
def hot(self):
""" Get the instance of :class:`~.HotRegion.HotRegion`. """
return self._hot
@property
def elsewhere(self):
""" Get the instance of :class:`~.Elsewhere.Elsewhere`. """
return self._elsewhere
@property
def everywhere(self):
""" Get the instance of :class:`~.Everywhere.Everywhere`. """
return self._everywhere
@property
def spacetime(self):
""" Return instance of :class:`~.Spacetime.Spacetime`. """
return self._spacetime
@spacetime.setter
def spacetime(self, obj):
if not isinstance(obj, Spacetime):
raise TypeError('Invalid type for spacetime object.')
# otherwise store a reference to the spacetime object
self._spacetime = obj
def embed(self, fast_total_counts, threads):
""" Embed the photosphere in an ambient Schwarzschild spacetime.
In other words, generate a discrete representation of the photospheric
radiation field and the null mapping from the photosphere to infinity,
for use in flux integrators called by distant observers.
"""
if self._everywhere is not None:
self._everywhere.embed(self._spacetime,
self,
threads)
else:
if self._elsewhere is not None:
self._elsewhere.embed(self._spacetime, threads)
if self._hot is not None:
self._hot.embed(self._spacetime,
self,
fast_total_counts,
threads,
self._elsewhere._compute_cellParamVecs)
elif self._hot is not None:
self._hot.embed(self._spacetime,
self,
fast_total_counts,
threads)
def integrate(self, energies, threads):
""" Integrate over the photospheric radiation field.
:param energies:
A one-dimensional :class:`numpy.ndarray` of energies in keV.
:param int threads:
Number of ``OpenMP`` threads to spawn for signal integration.
"""
if self._everywhere is not None:
spectrum = self._everywhere.integrate(self._spacetime,
energies,
threads,
self._hot_atmosphere)
if spectrum.ndim == 1:
self._signal = ((spectrum.reshape(-1,1),),)
else:
self._signal = ((spectrum,),)
else:
if self._elsewhere is not None:
spectrum = self._elsewhere.integrate(self._spacetime,
energies,
threads,
*self._elsewhere_atmosphere)
if self._hot is not None:
self._signal = self._hot.integrate(self._spacetime,
energies,
threads,
self._hot_atmosphere,
self._elsewhere_atmosphere)
if not isinstance(self._signal[0], tuple):
self._signal = (self._signal,)
# add time-invariant component to first time-dependent component
if self._elsewhere is not None:
for i in range(self._signal[0][0].shape[1]):
self._signal[0][0][:,i] += spectrum
@property
def signal(self):
""" Get the stored signal.
:returns:
A tuple of tuples of *ndarray[m,n]*.
Here :math:`m` is the number of energies, and
:math:`n` is the number of phases. Units are photon/s/keV; the
distance is a fast parameter so the areal units are not yet
factored in. If the signal is a spectrum because the signal is
time-invariant, then :math:`n=1`.
"""
return self._signal
@property
def global_variables(self):
""" Get a vector of global surface radiation field variables.
:returns: An *ndarray[n]* of scalars required to evaluate variables
that control the radiation field w.r.t local comoving frames
across the stellar surface.
The following code block is how one would pass the properties of a
single-temperature circular ``HotRegion`` to the extension modules. If
you have more than one ``HotRegion`` object merged into the subspace
associated with the ``Photosphere`` object, they may each be prefixed,
meaning that the set of parameter names below would need to be prefixed
at the least, and unless you only want to image one ``HotRegion``, the
parameters of the ``HotRegions`` object are required.
.. highlight:: python
.. code-block:: python
return _np.array([self['super_colatitude'],
self['phase_shift'] * _2pi,
self['super_radius'],
self['super_temperature']])
The phase shift controls the initial rotational phase of the
``HotRegion`` when imaging commences.
"""
try:
return _np.array([self['temperature']])
except KeyError:
raise NotImplementedError('Subclass and provide an implementation.')
@property
def global_to_local_file(self):
try:
return self._global_to_local_file
except AttributeError:
return None
@global_to_local_file.setter
def global_to_local_file(self, filepath):
if not isinstance(filepath, _six.string_types):
raise TypeError('File path must be a string.')
elif not _os.path.isfile(filepath):
raise IOError('File does not exist.')
self._global_to_local_file = filepath
@property
def images(self):
""" Get the precomputed image information. """
return self._images
@images.setter
def images(self, images):
""" Store an *ndarray[i,j,k]* of images and associated information. """
try:
for i, obj in enumerate(images):
if not isinstance(obj, _np.ndarray):
if i < len(images) - 3:
raise TypeError('Image information was expected to be '
'contained in an ndarray.')
elif obj is not None and not isinstance(obj, float):
raise TypeError('Unexpected type for image information.')
except TypeError:
raise TypeError('An iterable of objects containing image '
'information must be supplied.')
if len(images) != 13:
raise ValueError('There must be six ndarray objects specifing '
'image information.')
msg = 'Image information element %i must have %i dimensions.'
# tuple elements:
# energy-phase resolved signal (2D array)
# x coordinate on image plane (1D array)
# y coordinate on image plane (1D array)
# colatitude mapped to point (x,y) on image plane (1D array)
# azimuth mapped to point (x,y) on image plane (1D array)
# radial coord mapped to point (x,y) on image plane (1D array)
# phase lag
# redshift
# aberrated ray angle to local surface normal
# elliptical image-plane radial array
# elliptical image-plane semi-major axis
# elliptical image-plane semi-minor axis
# energy-phase resolved specific intensity sky maps (3D array)
# the last element is None if intensities not cached
assert images[0].ndim == 2, msg % (0, 2)
assert images[1].ndim == 1, msg % (1, 1)
assert images[2].ndim == 1, msg % (2, 1)
assert images[3].ndim == 1, msg % (3, 1)
assert images[4].ndim == 1, msg % (4, 1)
assert images[5].ndim == 1, msg % (5, 1)
assert images[6].ndim == 1, msg % (6, 1)
assert images[7].ndim == 1, msg % (7, 1)
assert images[8].ndim == 1, msg % (8, 1)
assert images[9].ndim == 1, msg % (9, 1)
if images[12] is not None:
assert images[12].ndim == 3, msg % (12, 3)
_num_rays = len(images[1])
for i in range(2,9):
assert len(images[i] == _num_rays),\
('Ray map: array length mismatch (array at tuple index %i is '
'not equal in length to array at tuple index 1).' % i)
assert int( _m.sqrt( _num_rays - 1 ) ) == len(images[9]),\
('Ray map: array length mismatch for image-plane radial '
'coordinate array (array at tuple index 9).')
if images[12] is not None:
assert images[12].shape[0] == images[0].shape[1],\
('Intensity cache dimension 0 does not match the length of '
'dimension 1 of the specific flux array '
'(at tuple index 1), meaning the number of phases '
'is mismatched.')
assert images[12].shape[2] == _num_rays,\
('Intensity cache dimension 2 does not match the length of '
'ray map arrays (e.g., array at tuple index 1), meaning '
'the number of rays is mismatched.')
self._images = images
@images.deleter
def images(self):
del self._images
def load_image_data(self, directory):
""" Load imaging data from disk.
:param str directory:
Path to directory to load files from. Should contain files written
to disk by :meth:`write_image_data`.
"""
_d = directory
photon_specific_flux = _np.load(_join(_d, 'photon_specific_flux.npy'))
x_coordinate = _np.load(_join(_d, 'x_coordinate.npy'))
y_coordinate = _np.load(_join(_d, 'y_coordinate.npy'))
colatitude = _np.load(_join(_d, 'colatitude.npy'))
azimuth = _np.load(_join(_d, 'azimuth.npy'))
radial_coord = _np.load(_join(_d, 'radial_coord.npy'))
phase_lag = _np.load(_join(_d, 'phase_lag.npy'))
redshift = _np.load(_join(_d, 'redshift.npy'))
abberated_angle = _np.load(_join(_d, 'abberated_angle.npy'))
IP_radial_array = _np.load(_join(_d, 'IP_radial_array.npy'))
IP_ellipse_axes = _np.load(_join(_d, 'IP_ellipse_axes.npy'))
intensity = _np.load(_join(_d, 'intensity.npy'))
self.images = [photon_specific_flux,
x_coordinate,
y_coordinate,
colatitude,
azimuth,
radial_coord,
phase_lag,
redshift,
abberated_angle,
IP_radial_array,
IP_ellipse_axes[0],
IP_ellipse_axes[1],
intensity]
def write_image_data(self, directory):
""" Write imaging data to disk.
:param str directory:
Path to directory to write to. Must exist.
"""
_d = directory
_np.save(_join(_d, 'photon_specific_flux.npy'), self.images[0])
_np.save(_join(_d, 'x_coordinate.npy'), self.images[1])
_np.save(_join(_d, 'y_coordinate.npy'), self.images[2])
_np.save(_join(_d, 'colatitude.npy'), self.images[3])
_np.save(_join(_d, 'azimuth.npy'), self.images[4])
_np.save(_join(_d, 'radial_coord.npy'), self.images[5])
_np.save(_join(_d, 'phase_lag.npy'), self.images[6])
_np.save(_join(_d, 'redshift.npy'), self.images[7])
_np.save(_join(_d, 'abberated_angle.npy'), self.images[8])
_np.save(_join(_d, 'IP_radial_array.npy'), self.images[9])
_np.save(_join(_d, 'IP_ellipse_axes.npy'),
_np.array(self.images[10:12], dtype=_np.double))
_np.save(_join(_d, 'intensity.npy'), self.images[12])
@property
def photon_specific_flux(self):
""" Get the photon specific flux as a function of phase and energy.
:return: A two-dimensional :class:`numpy.ndarray`, where photon energy
varies with row number, and phase varies with column number.
"""
return self._images[0]
@property
def photon_specific_intensity(self):
""" Get the photon specific intensity.
Function of phase, energy and sky direction.
:return: A three-dimensional :class:`numpy.ndarray`, where the first
dimension is phase, the second dimension is photon energy,
and the third dimension is sky direction (flattened from
two-dimensional sky coordinates to one dimension).
"""
return self._images[12]
@make_verbose('Imaging the star', 'Star imaged')
def image(self,
reimage = False,
reuse_ray_map = True,
energies = None,
num_phases = None,
phases = None,
phases_in_cycles = False,
sqrt_num_rays = 100,
epsabs_ray = 1.0e-12,
epsrel_ray = 1.0e-12,
max_steps = 100000,
init_step = 0.1,
image_plane_radial_increment_power = 1.0 / 2.0,
threads = 1,
cache_intensities = False,
cache_energy_indices = None,
cache_phase_indices = None,
single_precision_intensities = True,
plot_sky_maps = False,
sky_map_kwargs = None,
animate_sky_maps = False,
free_memory = True,
animate_kwargs = None,
**kwargs):
""" Image the star as a function of phase and energy.
:param bool reimage:
(Re)image the star. If ``False``, but the spacetime configuration
has been updated or the photosphere parameters have been updated,
a warning will be generated. In principle, one might want to plot
sky maps using cached imaging information, or animate sky maps
using images on disk, so reimaging is not forced if (non-fixed)
parameters have been changed.
:param bool reuse_ray_map:
Reuse a precomputed ray map from the stellar surface to the image
plane. If the spacetime configuration has changed (non-fixed
parameters have changed), a cached ray map will *not* be reused. If
the spacetime configuration is unchanged, but resolution settings
have changed for ray tracing, pass ``False`` to adhere to the new
resolution settings.
:param ndarray[n] energies:
Energies in keV to evaluate incident specific intensities at.
:param int num_phases:
The number of phases spanning the unit interval (zero and unity
inclusive) to image at.
:param ndarray[m] phases:
Phases in *radians* or *cycles* at which to evaluate incident
specific intensities at. If not ``None``, takes precedence over
:obj:`num_phases`. The units need to be specified with the
:obj:`phases_in_cycles` keyword argument: if ``False``, give
the phase array in *radians*.
:param bool phases_in_cycles:
Is the phase array, if not ``None``, in units of rotational cycles?
:param int sqrt_num_rays:
Square-root of the number of rays. This is the level of
discretisation in both a radial coordinate and a polar coordinate
on an elliptical image plane.
.. note::
When the spacetime is static or extremely close to being static in
a numerical context, at the resolutions we are interested in, we
need to mitigate problems with rays that graze the pole
infinitesimally close to the polar axis. In the vicinity of the
polar coordinate singularity the ODE system is stiff and the
solution is unstable. The most straightforward way to mitigate this
is to perform a fallback forward Euler step for a ray that passes
exactly through the pole, and use that ray as an approximation for
the grazing ray that started very nearby on the image plane.
Internally, if a ray intersects the image plane at
:math:`x`-coordinate that is numerically very close to, but not
exactly, zero (which would mean alignment to the rotational axis),
it is approximated by a ray that intersects :math:`x=0`.
Image-plane interpolation of quantities (such as intensity) for
the purpose of visualisation will then smooth out any such
artefacts.
Moreover, as an additional measure against artefacts in the sky
maps in the vicinity of the rotational pole, rays are distributed
accordingingly. For example, if we request :math:`n=400` rays per
dimension, a maximal spacing of the rays from the rotational axis
is achieved by rotating the *spokes* of rays (by up to
:math:`\pm\pi/n`) so that no spoke is aligned (or anti-aligned)
with the :math:`y`-direction.
:param float epsabs_ray:
Absolute error tolerance per ray to adhere to during numerical
integration.
:param float epsrel_ray:
Relative error tolerance per ray to adhere to during numerical
integration.
:param int max_steps:
Maximum number of steps to permit per ray before forced termination
of integration.
:param float init_step:
The initial *suggested* step size at the image plane for the
affine parameter for each ray.
:param float image_plane_radial_increment_power:
Controls the behaviour of the radial discretisation.
Higher values towards unity result in linear spacing of rays with
the radial coordinate. Lower values towards zero squeeze the rays
towards the visible stellar limb, which is necessary for resolving
images of extended radiating elements. Values above unity are not
recommended, and would squeeze rays towards the image-plane origin,
compromising resolution at the stellar limb.
:param int threads:
Number of OpenMP threads to spawn for parallel blocks of code.
Parallel blocks include ray integration to generate a global ray
map from image plane to surface; and image calculation at a
sequence of rotational phases.
:param float cache_intensities:
Cache the photon specific intensity sky maps in memory, as a
function of phase and energy? The type must be a float (greater than
or equal to zero) or ``False``. The value represents the limiting
size in GB that can be allocated for the intensity cache. Defaults
to zero because this dominates memory consumption. You need to
activate this option if you want to plot the sky maps (see below).
To activate, supply a limit. A hard limit of 2 GB is imposed for
safety. To override, use the secret :obj:`_OVERRIDE_MEM_LIM`
keyword argument to supply a positive limit in GB.
:param ndarray[m] cache_phase_indices:
A one-dimensional :class:`numpy.ndarray` of ``dtype=numpy.int32``,
specifying the phase-array indices to cache intensities at. This
is useful to save memory when you want to plot specific intensity
skymaps but also compute the specific flux at many more phases. If
``None``, intensities will be cached at all phases subject to
memory constraints. Note that the order of the list matters for
plotting order, so the indices should generally increase, as should
the phases themselves. If plotting the pulse-profile and spectrum,
then this is a case where many more phases are useful for the
resolving specific flux pulse-profile than are needed to plot
specific intensity skymaps and specific flux spectra at three
representative phases.
:param ndarray[m] cache_energy_indices:
A one-dimensional :class:`numpy.ndarray` of ``dtype=numpy.int32``,
specifying the energy-array indices to cache intensities at. This
is useful to save memory when you want to plot specific intensity
skymaps but also compute the specific flux at many more energies. If
``None``, intensities will be cached at all energies subject to
memory constraints. Note that the order of the list matters for
plotting order, so the indices should generally increase, as should
the energies themselves. If plotting the pulse-profile and spectrum,
then this is a case where many more energies are useful for the
resolving specific flux spectrum than are needed to plot specific
intensity skymaps and specific flux pulse-profiles at three
representative energies.
:param bool single_precision_intensities:
Cache the intensities in single precision? In most use cases,
double precision is simply unnecessary, and because memory
consumption can be high, choosing single precision can reduce
memory requirements by a factor of two. Note that this only applies
to the caching of intensities, not the calculation of intensities,
which is done safely in double precision; only the final caching
operation is a demotion cast to single precision. The default
is single precision caching. Option ignored if intensities are not
cached.
:param bool plot_sky_maps:
Plot (specific) intenity sky maps at a sequence of phases, or
by averaging over phase. Maps can be made at one more energies
or energy intervals. The images will be written to disk and
can be used as frames in an animated sequence.
:param dict sky_map_kwargs:
Dictionary of keyword arguments passed to
:meth:`~Photosphere._plot_sky_maps`. Refer to the associated
method docstring for available options.
:param bool animate_sky_maps:
Compile images from disk into an animated sequence.
:param bool free_memory:
Try to free the imaging information before animating a sequence of
sky maps written to disk, to try to avoid high memory usage. For
safety the default is to free the memory, so deactivate this at your
own risk. If there are other non-weak references created to the
underlying objects, the memory may fail to be freed. In the
methods below, the aim is that the native garbage collection cleans
up the references because they only exist in the method local scope
(no closures or globals).
.. note::
Memory used for plotting the sky maps and loading the images from
disk to animate a phase sequence might not be straightforwardly
freed despite efforts to do so, because of non-weak references
covertly held by the matplotlib module.
:param dict animate_kwargs:
Dictionary of keyword arguments passed to
:meth:`~Photosphere._animate`. Refer to the associated method
docstring for available options.
:param bool deactivate_all_verbosity:
Deactivate the verbose output? Note that despite this keyword
argument not appearing in the method signature, it is a valid
switch.
"""
ref = self._spacetime # geometry shortcut saves characters
try:
_DV = deactivate_verbosity
except NameError:
_DV = False
_exc = ValueError('You need to cache intensity sky maps if you '
'want to plot them.')
try:
self.images
except AttributeError:
if not reimage:
if plot_sky_maps:
raise _exc
else:
yield ('Warning: star will not be reimaged... assuming '
'images exist on disk.')
else:
if not reimage and plot_sky_maps and self.images[-1] is None:
raise _exc
if phases is not None and not isinstance(phases, _np.ndarray):
raise TypeError('Imaging phases must be in a 1D ndarray.')
elif isinstance(phases, _np.ndarray):
if phases_in_cycles:
if phases[0] != 0.0 or phases[-1] != 1.0:
_warning('Phase array does not span the unit interval.')
phases *= _2pi
elif phases is None:
if num_phases is None or not isinstance(num_phases, int):
raise TypeError('Integer number of phases required.')
phases = _np.linspace(0.0, 1.0, num_phases) * _2pi
if not isinstance(energies, _np.ndarray):
raise TypeError('Imaging energies must be in a 1D ndarray.')
time_is_space = sky_map_kwargs.get('time_is_space', False)
if reimage:
if plot_sky_maps and not cache_intensities:
raise _exc
if cache_intensities:
_override_mem_lim = kwargs.get('_OVERRIDE_MEM_LIM', 1.0)
if not isinstance(_override_mem_lim, float):
raise TypeError('Intensity cache limit override must be a '
'float.')
elif _override_mem_lim < 0.0:
raise ValueError('Intensity cache limit override must be '
'positive or zero.')
if not isinstance(cache_intensities, float):
raise TypeError('Intensity cache limit must be a float.')
elif not 0.0 <= cache_intensities <= _override_mem_lim:
raise ValueError('Intensity cache limit must be positive '
'and less than the safety limit, which '
'in turn can be overridden as described '
'in the method docstring.')
if cache_energy_indices is None:
cache_energy_indices = _np.arange(len(energies),
dtype=_np.int32)
elif not isinstance(cache_energy_indices, _np.ndarray):
raise TypeError('Energy indices for intensity caching '
'must be supplied in a 1D numpy.ndarray.')
elif cache_energy_indices.dtype != _np.int32:
raise TypeError('Energy indices for intensity caching '
'must be integers.')
elif time_is_space and len(cache_energy_indices) != len(energies):
raise TypeError('Sky maps must be cached at all energies.')
if cache_phase_indices is None:
cache_phase_indices = _np.arange(len(phases),
dtype=_np.int32)
elif not isinstance(cache_phases_indices, _np.ndarray):
raise TypeError('Phase indices for intensity caching '
'must be supplied in a 1D numpy.ndarray.')
elif cache_phase_indices.dtype != _np.int32:
raise TypeError('Phase indices for intensity caching '
'must be integers.')
elif not time_is_space and len(cache_phases_indices) != len(phases):
raise TypeError('Sky maps must be cached at all phases.')
_req_size = 4.0 if single_precision_intensities else 8.0
_req_size *= len(cache_phase_indices) * len(cache_energy_indices) # bytes
_req_size *= sqrt_num_rays**2.0 # + 1.0 # origin ray negligible
if _req_size/1.0e9 >= cache_intensities:
raise MemoryError('Too much memory would be required to '
'cache the intensities at this '
'resolution. Try decreasing the number '
'of rays, energies, and/or phases, or '
'override the cache size limit if '
'safe.')
cache_intensities = True
else:
cache_intensities = False
try:
self.images
except AttributeError:
if reuse_ray_map:
yield ('Warning: a ray map has not been cached... '
'tracing new ray set')
else:
# if spacetime configuration was updated
if ref.needs_update or not reuse_ray_map:
# try to free up memory; CPython reference counting means
# this should have immediate effect
del self.images
else:
# del self.images[0] # doesn't require much memory
del self.images[-1] # requires far more memory
try:
_ray_map = tuple(self.images[1:])
yield 'Cached ray set to be reused... commencing imaging'
except AttributeError:
_ray_map = None
yield 'Commencing ray tracing and imaging'
images = _integrate(threads,
ref.r_s,
ref.R,
ref.Omega,
self['mode_frequency'],
ref.zeta,
ref.epsilon,
ref.a, # dimensionless spin
ref.q, # mass quadrupole
ref.d,
ref.i,
sqrt_num_rays,
epsabs_ray,
epsrel_ray,
max_steps,
init_step,
image_plane_radial_increment_power,
self.global_variables,
energies,
phases,
cache_intensities,
cache_energy_indices,
cache_phase_indices,
single_precision_intensities,
_ray_map,
self.global_to_local_file,
self._hot_atmosphere)
if images[0] == 1:
raise Exception('A numerical error arose during imaging '
'computation... terminating simulation.')
elif _ray_map is not None: # only recalculated info is returned
# tuple elements:
# energy-phase resolved signal (2D array)
# energy-phase resolved specific intensity sky maps (3D array)
# the last element is None if intensities not cached
# transpose so signal phase increments along columns
self.images[0] = images[1].T
self.images.append(images[2])
else: # the ray map is also returned
# tuple elements:
# energy-phase resolved signal (2D array)
# x coordinate on image plane (1D array)
# y coordinate on image plane (1D array)
# colatitude mapped to point (x,y) on image plane (1D array)
# azimuth mapped to point (x,y) on image plane (1D array)
# radial coord mapped to point (x,y) on image plane (1D array)
# phase lag
# redshift
# aberrated ray angle to local surface normal
# elliptical image-plane radial array
# elliptical image-plane semi-major axis
# elliptical image-plane semi-minor axis
# energy-phase resolved specific intensity sky maps (3D array)
# the last element is None if intensities not cached
# transpose so signal phase increments along columns
self.images = [images[1].T] + list(images[2:])
yield 'Ray tracing complete.'
yield 'Ray set cached.'
if cache_intensities:
yield 'Intensity caching complete.'
else:
if len(phases) > 1:
yield 'Phase-resolved specific flux integration complete.'
else:
yield 'Specific flux integration complete.'
# memoization
self._spacetime([param.value for param in self._spacetime])
if sky_map_kwargs is None: sky_map_kwargs = {}
if animate_kwargs is None: animate_kwargs = {}
if plot_sky_maps or animate_sky_maps:
root_dir = sky_map_kwargs.pop('root_dir', './images')
file_root = sky_map_kwargs.pop('file_root', 'skymap')
file_root = _os.path.join(root_dir, file_root)
phase_average = sky_map_kwargs.get('phase_average', False)
if phase_average and time_is_space:
raise ValueError('Cannot phase average sky maps when spatial '
'dimensions are used to render time.')
if phase_average and animate_sky_maps:
raise ValueError('Phase averaged sky maps cannot be animated.')
bolometric = sky_map_kwargs.get('bolometric', False)
if bolometric and not time_is_space:
raise ValueError('Cannot energy-integrate sky maps when spatial '
'dimensions are used to render energy.')
if bolometric and animate_sky_maps:
raise ValueError('Bolometric sky maps cannot be animated.')
if plot_sky_maps:
if not _os.path.isdir(root_dir):
_os.mkdir(root_dir)
elif _os.path.isfile(file_root + '_0.png'):
yield ('\nWarning: at least one image file exists '
'in ``%s``.' % root_dir)
yield ('Attempting to move image files to a subdirectory '
'of ``%s``.' % root_dir)
try: # to archive the existing image files
from datetime import datetime
obj = datetime.now()
temp = '__datetime__%i.%i.%i__%i.%i.%i' % (obj.day,
obj.month,
obj.year,
obj.hour,
obj.minute,
obj.second)
temp = _os.path.join(root_dir, 'archived_%s' % temp)
_os.mkdir(temp)
image_files = _os.listdir(root_dir)
for image in image_files:
if '.png' in image:
_os.rename(_os.path.join(root_dir, image),
_os.path.join(temp, image))
except Exception as e:
raise Exception('Aborting: image files would be '
'overwritten. %s' % str(e))
else:
yield 'Image files archived in subdirectory ``%s``.' % temp
figsize, dpi, num_frames = self._plot_sky_maps(file_root,
_phases = phases,
_energies = energies,
_c_idxs = cache_energy_indices,
_c_pidxs = cache_phase_indices,
_redraw = True,
deactivate_verbosity = _DV,
**sky_map_kwargs)
elif animate_sky_maps:
if reimage:
raise ValueError('Star was reimaged but sky maps were not '
'plotted... aborting animation.')
figsize, dpi, num_frames = self._plot_sky_maps(file_root,
_phases = phases,
_energies = energies,
_c_idxs = cache_energy_indices,
_c_pidxs = cache_phase_indices,
_redraw = False,
deactivate_verbosity = _DV,
**sky_map_kwargs)
if animate_sky_maps:
if not _os.path.isfile(file_root + '_0.png'):
raise IOError('No images located for animation.')
if num_frames is None and reimage:
if not time_is_space:
num_frames = self.images[-1].shape[0]
else:
num_frames = self.images[-1].shape[1]
elif num_frames is None:
if not time_is_space:
try:
num_frames = len(phases)
except TypeError:
raise TypeError('You need to declare the image phases '
'in order to include all images from disk.')
else:
try:
num_frames = len(energies)
except TypeError:
raise TypeError('You need to declare the image energies '
'in order to include all images from disk.')
if free_memory:
try:
del self.images # try to free up memory
except AttributeError:
pass
self._animate(file_root, num_frames,
figsize, dpi,
deactivate_verbosity = _DV,
**animate_kwargs)
yield None
@make_verbose('Plotting intensity sky maps', 'Intensity sky maps plotted')
def _plot_sky_maps(self,
_file_root,
_phases,
_energies,
_c_idxs,
_c_pidxs,
_redraw,
threads = 1,
with_pulse_profile_and_spectrum = False,
time_is_space = False,
panel_layout = None,
panel_indices = None,
cycles = 1,
phase_average = False,
bolometric = False,
energy_bounds = None,
phase_bounds = None,
num_levels = 100,
add_zero_intensity_level = True,
normalise_each_panel = True,
invert = False,
annotate_energies=False,
annotate_phases=False,
energy_annotation_format='[%.1f keV]',
phase_annotation_format='[%.1f cycles]',
annotate_location=(0.05,0.05),
colormap = None,
figsize = (10,10),
usetex = False,
fontsize_scale = 1.0,
tick_spacing = (0.2,1.0),
tick_length_scaling = 1.0,
dpi_scale = 1.0,
**kwargs):
""" Helper method for specific intensity sky map visualization.
Uses Delaunay triangulation to create an irregular sky mesh and
calculate photon (specific) intensity contours at a sequence of phases.
Each figure generated contains a sequence of panels arranged in
one or two spatial dimensions. Each panel is an intensity sky map,
either at a particular energy or integrated over a finite energy
interval. Panels cannot mix specific and integrated intensities. Only
a some sequence of energy (intervals), in any order, can be identified
as labelling panels in one or two spatial dimensions. Time (rotational
phase), whilst it could be defined to label a sequence of panels, is
only identified as a labelling a sequence of *figures*.
Similarly, sequence of energies could be identified as a variable
labelling a sequence of figures, but is not. Moreover, energy and time
could label panels in to spatial dimensions, but such mixing is not
permitted. Finally, variables controlling the source-receiver system
could be identified as labels of panels and/or figures, but this
is also not supported. More complicated rendering patterns may be
supported in future versions, but for now can be achieved via
custom extensions building off of the current functionality.
:param str _file_root:
Relative or absolute path to parent directory for images,
extended with the root name for the image files. E.g., the
default is ``./images/skymap``. You do not need to change this
unless you wish to, and is otherwise reserved for internal use.
You may supply a custom file path via keywords ``root_dir`` and
``file_root`` upon calling :meth:`~Photosphere.image`, which are
concatenated appropriately.
:param ndarray[n] _phases:
The phases at which the star was imaged. This is handled
internally, so do *not* pass a keyword argument. If phase averaging,
the minimum and maximum phases must be zero and :math:`2\pi`
radians (i.e., zero and one cycles).
:param ndarray[n] _energies:
The energies at which the star was imaged. This is handled
internally, so do *not* pass a keyword argument.
:param ndarray[n] _c_idxs:
The energy indices for which the intensity maps were cached for
memory-efficieny plotting. This is handled internally, so do *not*
pass a keyword argument.
:param ndarray[n] _c_pidxs:
The energy indices for which the intensity maps were cached for
memory-efficieny plotting. This is handled internally, so do *not*
pass a keyword argument.
:param bool _redraw:
Redraw the sky maps? This is handled internally, so do *not* pass
a keyword argument.
:param int threads:
Number of OpenMP threads to spawn.
:param bool with_pulse_profile_and_spectrum:
A setting that fundamentally changes some behaviours. If
deactivated (the default), only photon (specific) intensity skymaps
are plotted. The following frame is an example:
.. image:: _static/_skymap_plot.png
:param bool with_pulse_profile_and_spectrum:
If deactivated, the following keyword arguments do not have a use:
:obj:`cycles` and :obj:`colormap`. If *activated*, photon specific
intensity skymaps at three energies are plotted in each frame,
together with their associated photon specific flux pulse-profiles,
and also the photon specific flux spectrum at a finer array of
energies. Use :obj:`panel_indices` to select the energies. The
pulse-profiles are each normalised to their respective maxima,
and the spectrum shows the relative orders of magnitude of the
specific flux signals. The following frame is an example:
.. image:: _static/_skymap_with_pulse_profile_and_spectrum_plot.png
:param bool with_pulse_profile_and_spectrum:
If activated, a subset of other keyword arguments are ignored:
:obj:`energy_bounds`, :obj:`phase_average`, :obj:`panel_layout`,
and :obj:`invert`. The panel layout is rigid (not customisable) in
order to focus on the plot quality. If more energies were added, the
information density in the plot-space might become too high without
adding much more new information.
:param bool time_is_space:
Each image is at constant energy (or is a spectral trace up to
that energy) instead of being at constant phase (or a pulse-profile
trace up to that phase).
:param tuple[int,int] panel_layout:
Two elements: the number of rows and columns of panels. If ``None``,
a layout is automatically determined based on the number of
images to be plotted.
:param iterable panel_indices:
These ordered integers will be used to select intensity information
by indexing the energy dimension of a 3D intensity array. If
specific intensites are plotted, these integers should index
a subset of energies at which the star was imaged. If intensities
are plotted, these integers should index a subset of the energy
intervals over which specific intensities are integrated. See
the :obj:`energy_bounds` keyword argument. If the flux is
calculated at more energies than specific intensities are cached
at, then these integers need to index the
:obj:`cache_energy_indices` array appropriately.
:param int cycles:
Nuber of cycles to generate images for. Only relevant if one cycle
is different to the next in terms of the frame, e.g., most commonly
if plotting the pulse-profile traces over more than one cycle. If
frames separated by one cycle are identical, declare the number
of cycles to the animator instead.
:param bool phase_average:
Average each sky map over one revolution (cycle) in phase?
Note that the resulting image is incompatible with the currently
supported animation mode. The following image is an example:
.. image:: _static/_skymap_phaseaveraged.png
:param bool bolometric:
Integrate each sky map over energy? Note that the resulting image
is incompatible with the currently supported animation mode.
:param iterable energy_bounds:
A set of two-element containers. Each container has an ordered pair
of energies which delimit an integration domain. Specific intensity
is integrated along each sky direction, at each phase, between
these energy bounds. The bounds must be between the minimum and
maximum energies at which the star was imaged. If ``None``,
specific intensity sky maps will be plotted (the default). This
option is ignored if energy is defined as the time dimension,
meaning the sky maps are animated with respect to energy.
:param iterable phase_bounds:
A set of two-element containers. Each container has an ordered pair
of energies which delimit an integration domain. Specific intensity
is integrated along each sky direction, at each phase, between
these energy bounds. The bounds must be between the minimum and
maximum energies at which the star was imaged. If ``None``,
specific intensity sky maps will be plotted (the default). This
option is ignored if phase is defined as the time dimension,
meaning the sky maps are animated with respect to phase.
.. note::
To use this functionality, the specific intensities must have
been cached at all energies the specific flux is calculated at.
:param int num_levels:
Number of contour levels in (specific) intensity, distributed
between minimum finite, and maximum values per panel, or over all
panels. See :obj:`normalise_each_panel` keyword argument.
:param bool add_zero_intensity_level:
Add a contour level at zero intensity such that the colormap
minimum corresponds to zero intensity? If ``True`` (the default),
then the background sky, where there is by definition zero model
intensity, has the same colour only as the subset of the image of
the surface that is not radiating in the model. The disadvantage of
this choice is that the intensity structure of the image as a
function of phase and sky direction is generally not as well-
resolved by the colour and greyscale variation. In the limit
that the minimum finite intensity of the image is far smaller than
the maximum, then the intensity resolution by colour and greyscale
values is highest. If ``False``, then the minimum colour is
assigned to the minimum finite intensity as a function of phase
and sky direction. This also maximally resolves the intensity by
colour and greyscale values, which is useful for models wherein the
surface radiation field is constructed, for instance, from
uniform-temperature localised hot regions. However, in this case
the background sky colour is undefined; the background sky colour
is thus set to the minimum colour in the colormap, meaning that the
fainest subset of the image over phase and sky direction merges
with the background sky in terms of colour and greyscale values.
:param bool normalise_each_panel:
Normalise the contour colormap to each skymap panel uniquely, or
globally over all panels? The former yields relative intensity
as function of phase and sky direction for an energy or energy
interval, whilst the latter offers more spectral information but
emission in some panels may not be discernable.
:param bool invert:
Invert the greyscale to show bright pixels as dark on a white
plot background. If a colormap is manually supplied, this just
controls the plot background colour. Inversion is recommended
for printed format, whilst a black background is more intuitive
when in digital format.
:param obj colormap:
Usage dependent on other settings. If not plotting the pulse-profile
and spectrum, then this is simply a (matplotlib) colormap object.
Choose something appropriate and *accessible* for a non-negative
scalar field (sky intensities). If plotting the pulse-profile and
spectrum too, then :obj:`colormap` can be the string
``'RedGreyBlue'`` to invoke the default colour scheme which is reds
for the lowest energy intensity skymap and pulse-profile; pure
greyscale for the intermediate energy; and blues for the highest
energy. Alternatively, if :obj:`colormap` is simply ``None``, the
default greyscale will be used for all energies, with all
pulse-profiles in black. Lastly, you can supply a three-element
list or tuple of colormap objects, ordered from lowest to highest
energy; the pulse-profile line colours will be retrieved as the
midpoint of the colormap. Note that the background sky colour will
be set to the lowest colour in each colourmap.
:param tuple(int,int) figsize:
The figure size (width, height) in *inches*. If the dimensions are
inconsistent with the aspect ratio suggested by the
:obj`panel_layout` settings, the height of the figure will be
automatically rescaled to achieve congruence, meaning each panel is
approximately square.
:param bool usetex:
Use TeX backend for figure text.
:param float fontsize_scale:
Use this argument to scale the font size of figure text relative
to the default font size that is automatically determined based
on the approximate panel size, which is in turn based on the
figure size and the panel layout.
:param tuple[float,float] tick_spacing:
A two-element container. The first element is the minor tick
spacing, and the second is the major tick spacing, for both
the :math:`x` and :math:`y` directions on the image plane. The
units are both the maximum possible angular size of the image of the
surface in an ambient Schwarzschild spacetime,
:math:`R_{\\rm eq}/\\sqrt{1-r_{\\rm s}/R_{\\rm eq}}`.
:param float tick_length_scaling:
Use this argument to scale the axis tick lengths relative to the
default lengths that are automatically determined based on the
panel size.
:param float dpi_scale:
Use this argument to scale the dots per inch of the figure,
relative to the default that is automatically determined based
on the panel size.
"""
file_root = _file_root
phases = _phases
energies = _energies
redraw = _redraw
if with_pulse_profile_and_spectrum:
if len(panel_indices) != 3:
raise ValueError('Selected plot type designed for showcasing '
'the specific photon intensity skymaps and '
'their associated specific photon flux '
'pulse-profiles or spectra specifically at '
'at three energies or phases to avoid '
'excessive information density.')
panel_layout = (2,3)
if not isinstance(cycles, int):
raise TypeError('Declare the number of cycles with an integer.')
elif cycles < 1:
cycles = 1 # quietly ignore input
if not time_is_space:
num_frames = len(phases)
else:
num_frames = len(energies)
elif cycles > 1:
if not time_is_space:
num_frames = len(phases) + (cycles - 1) * (len(phases) - 1)
else:
num_frames = len(energies)
elif panel_layout is None:
x = int(_m.ceil(_m.sqrt(len(panel_indices))))
if x * (x - 1) >= len(panel_indices):
panel_layout = (x, x - 1)
else:
panel_layout = (x, x)
if not time_is_space:
num_frames = len(phases)
else:
num_frames = len(energies)
# try to improve the aspect ratio so that each panel is
# approximately square
width = panel_layout[1] + (panel_layout[1] - 1) * 0.2
_hspace = 0.25 if with_pulse_profile_and_spectrum else 0.2
height = panel_layout[0] + (panel_layout[0] - 1) * _hspace
aspect_ratio = height/float(width)
if aspect_ratio != 1.0:
if _np.abs(figsize[1]/float(figsize[0]) - aspect_ratio)/aspect_ratio > 0.1:
figsize = (figsize[0], figsize[0] * aspect_ratio)
# calculate an appropriate dpi to resolve each panel adequately
dpi = (max(panel_layout) / 2.0) * 150.0 * dpi_scale
if redraw:
rcParams['text.usetex'] = usetex
try:
iter(panel_indices)
except TypeError:
raise TypeError('Panel indices object must be iterable.')
if _np.product(panel_layout) < len(panel_indices):
raise ValueError('There are too few panels for the requested '
'number of intensity sky maps.')
# some scaling for appropriate fontsize
panel_size = max(figsize[1]/float(panel_layout[0]),
figsize[0]/float(panel_layout[1]))
fontsize = (panel_size/5.0) * 14.0 * fontsize_scale
rcParams['font.size'] = int(fontsize)
tick_length = int((panel_size/5.0) * 8 * tick_length_scaling)
# get coordinates of irregular set of points for triangulation
X = self.images[1]
Y = self.images[2]
#if not isinstance(energies, _np.ndarray):
# raise TypeError('Imaging energies must be in an ndarray.')
#if not isinstance(phases, _np.ndarray):
# raise TypeError('Imaging phases must be in an ndarray.')
images = self.images[-1]
if time_is_space: # transpose dimensions
_images_ = _np.zeros((images.shape[1],
images.shape[0],
images.shape[-1]), dtype=_np.double)
for i in range(images.shape[-1]):
_images_[:,:,i] = images[:,:,i].T
images = _images_
if with_pulse_profile_and_spectrum:
if not time_is_space:
flux = self.images[0]
else:
flux = self.images[0].T
if not with_pulse_profile_and_spectrum and not time_is_space and energy_bounds:
with verbose(True,
'Integrating specific intensity over energy intervals',
'Integrated specific intensity over energy intervals'):
for bounds in energy_bounds:
if bounds[0] > bounds[1]:
raise ValueError('Energy bounds in a tuple must be '
'ordered.')
for bound in bounds:
if not energies[0] <= bound <= energies[-1]:
raise ValueError('Extrapolation would be required.')
if len(panel_indices) < len(energy_bounds):
yield 'Warning: fewer panels than energy intervals.'
integrated = _np.zeros((images.shape[0],
len(energy_bounds),
images.shape[2]), dtype=_np.double)
intensities = _np.zeros((images.shape[1],
images.shape[2]), dtype=_np.double)
for i in range(images.shape[0]): # phases
intensities[...] = images[i,...] # sky directions
for k in range(len(energy_bounds)):
bounds = _np.log10( _np.array(energy_bounds[k]) )
_integrated = energy_integrator(threads,
intensities,
_np.log10(energies),
bounds)
integrated[i,k,:] = _integrated[0,:]
images = integrated
elif not with_pulse_profile_and_spectrum and not time_is_space:
if len(panel_indices) != len(_c_idxs):
yield ('Warning: number of panels not equal to number of '
'phases.')
elif not with_pulse_profile_and_spectrum and time_is_space:
if len(panel_indices) != len(_c_pidxs):
yield ('Warning: number of panels not equal to number of '
'phases.')
if not with_pulse_profile_and_spectrum and not time_is_space and phase_average:
with verbose(True,
'Averaging (specific) intensity over rotational phase',
'Averaged (specific) intensity over rotational phase'):
if phases[0] != 0.0 or phases[-1] != _2pi:
raise ValueError('Minimum and maximum phases at which '
'star is imaged must be zero and unity '
'if you are phase averaging.')
averaged = _np.zeros((1,
images.shape[1],
images.shape[2]), dtype = _np.double)
intensities = _np.zeros((images.shape[2],
images.shape[0]), dtype = _np.double)
for i in range(images.shape[1]): # energies
for j in range(images.shape[2]): # sky directions
intensities[j,:] = images[:,i,j]
_averaged = phase_integrator(1.0, # exposure time
_np.array([0.0, 1.0]),
intensities,
phases / _2pi,
0.0) # phase shift
for j in range(images.shape[2]):
averaged[:,i,j] = _averaged[j,:]
images = averaged
if not with_pulse_profile_and_spectrum and time_is_space and phase_bounds:
with verbose(True,
'Integrating specific intensity over phase intervals',
'Integrated specific intensity over phase intervals'):
for bounds in phase_bounds:
if bounds[0] > bounds[1]:
raise ValueError('Phase bounds in a tuple must be '
'ordered.')
for bound in bounds:
if not phases[0] <= bound <= phases[-1]:
raise ValueError('Extrapolation would be required.')
if len(panel_indices) < len(phase_bounds):
yield 'Warning: fewer panels than phase intervals.'
integrated = _np.zeros((images.shape[0],
len(phase_bounds),
images.shape[2]), dtype=_np.double)
intensities = _np.zeros((images.shape[2],
images.shape[1]), dtype=_np.double)
for i in range(images.shape[0]): # energies
intensities[...] = images[i,...].T # sky directions
for k in range(len(phase_bounds)):
bounds = _np.array(phase_bounds[k])
_integrated = phase_integrator(1.0,
bounds,
intensities,
phases / _2pi,
0.0)
integrated[i,k,:] = _integrated[:,0]
images = integrated
if not with_pulse_profile_and_spectrum and time_is_space and bolometric:
with verbose(True,
'Integrating bolometric intensity',
'Averaged bolometric intensity'):
if phases[0] != 0.0 or phases[-1] != _2pi:
raise ValueError('Minimum and maximum phases at which '
'star is imaged must be zero and unity '
'if you are phase averaging.')
integrated = _np.zeros((images.shape[0],
1,
images.shape[2]), dtype = _np.double)
intensities = _np.zeros((images.shape[1],
images.shape[2]), dtype = _np.double)
for i in range(images.shape[0]): # phases
for j in range(images.shape[2]): # sky directions
intensities[:,j] = images[i,:,j]
bounds = _np.log10( _np.array([energies[0], energies[-1]]) )
_integrated = energy_integrator(threads,
intensities,
_np.log10(energies),
bounds)
for j in range(images.shape[2]):
integrated[i,:,j] = _integrated[:,j]
images = integrated
if normalise_each_panel:
with verbose(True,
'Normalising each sky map panel separately',
'Normalised sky map panels separately'):
# normalise intensity for each individual panel
levels = []
if not time_is_space:
for j in range(images.shape[1]): # at each energy
# find extreme intensities over discrete set of image
# phases and sky directions
MIN = _np.min(images[:,j,:][images[:,j,:] > 0.0])
MAX = _np.max(images[:,j,:])
levels.append(_np.linspace(MIN, MAX, num_levels))
if add_zero_intensity_level:
levels[-1] = _np.array([0.0, 0.001*MIN] + list(levels[-1]))
else:
for j in range(images.shape[0]): # at each energy
# find extreme intensities over discrete set of image
# phases and sky directions
MIN = _np.min(images[j,:,:][images[j,:,:] > 0.0])
MAX = _np.max(images[j,:,:])
levels.append(_np.linspace(MIN, MAX, num_levels))
if add_zero_intensity_level:
levels[-1] = _np.array([0.0, 0.001*MIN] + list(levels[-1]))
else:
with verbose(True,
'Normalising sky map panels globally'
'Normalised sky map panels globally'):
MIN = _np.min(images[:,:,:][images[:,:,:] > 0.0])
MAX = _np.max(images[:,:,:])
levels = _np.linspace(MIN, MAX, num_levels)
if add_zero_intensity_level:
levels = _np.array([0.0, 0.001*MIN] + list(levels))
# because of default tick formatting and a minus sign,
# the left and bottom margins need to be different
left = 0.09 * (fontsize/14.0)
bottom = 0.11 * (fontsize/14.0)
right = 0.975
top = bottom + (right - left)
ref = self._spacetime
fig = Figure(figsize = figsize)
canvas = FigureCanvas(fig)
if with_pulse_profile_and_spectrum:
if not time_is_space and colormap == 'RedGreyBlue':
cmap = [cm.Reds_r, cm.Greys_r, cm.Blues_r]
_line_colors = [cm.Reds_r(0.25),
cm.Greys_r(0.0),
cm.Blues_r(0.25)]
elif not isinstance(colormap, (list, tuple)):
cmap = [cm.Greys_r] * 3
_line_colors = [cm.Greys_r(0.0)] * 3
else:
cmap = colormap
_line_colors = [cmap[j](0.5) for j in range(len(cmap))]
gs = gridspec.GridSpec(panel_layout[0],
panel_layout[1],
left=left, right=right,
bottom=bottom, top=top,
wspace=0.2, hspace=_hspace)
axes = [fig.add_subplot(gs[j]) for j in range(len(panel_indices))]
pp_ax = fig.add_subplot(gs[len(panel_indices):-1])
spec_ax = fig.add_subplot(gs[-1])
line_styles = ['-', '--', '-.']
else:
cmap = colormap or (cm.Greys if invert else cm.Greys_r)
gs = gridspec.GridSpec(panel_layout[0],
panel_layout[1],
left=left, right=right,
bottom=bottom, top=top,
wspace=0.2, hspace=_hspace)
axes = [fig.add_subplot(gs[j]) for j in range(len(panel_indices))]
if with_pulse_profile_and_spectrum:
if not time_is_space: annotate_energies = True
else: annotate_phases = True
_I = 10
for i in range(images.shape[0]):
if phase_average:
yield 'Rendering phase-averaged images'
elif bolometric:
yield 'Rendering bolometric images'
elif i == 0 and images.shape[0] < 10:
yield 'Rendering images'
elif i == 0 and images.shape[0] >= 10:
yield 'Rendering image numbers [%i, %i]'%(i+1, i+_I)
elif i%_I == 0:
yield 'Rendering image numbers (%i, %i]'%(i, i+_I)
for j, idx in enumerate(panel_indices):
ax = axes[j]
if with_pulse_profile_and_spectrum:
_cmap = cmap[j]
else:
_cmap = cmap
if (with_pulse_profile_and_spectrum or
_np.product(panel_layout) - j - 1 < panel_layout[1]):
if ref.R < 1.5 * ref.r_s:
ax.set_xlabel(r'$(2x/(3\sqrt{3}r_{\rm s}))$')
else:
ax.set_xlabel(r'$(x/R_{\rm eq})\sqrt{1-r_{\rm s}/R_{\rm eq}}$')
if j % panel_layout[1] == 0:
if ref.R < 1.5 * ref.r_s:
ax.set_ylabel(r'$(2y/(3\sqrt{3}r_{\rm s}))$')
else:
ax.set_ylabel(r'$(y/R_{\rm eq})\sqrt{1-r_{\rm s}/R_{\rm eq}}$')
_veneer(tick_spacing, tick_spacing, ax,
length = tick_length)
if with_pulse_profile_and_spectrum:
ax.set_facecolor(_cmap(0.0))
else:
ax.set_facecolor('white' if invert else 'black')
if not time_is_space:
lvls = levels if isinstance(levels, _np.ndarray) else levels[idx]
else:
lvls = levels if isinstance(levels, _np.ndarray) else levels[i]
ax.tricontourf(X,
Y,
images[i,idx,:],
cmap = _cmap,
levels = lvls)
# correct the aspect ratio
x_view = ax.xaxis.get_view_interval()
diff = x_view[1] - x_view[0]
ax.xaxis.set_view_interval(x_view[0] - diff * 0.025,
x_view[1] + diff * 0.025)
y_view = ax.yaxis.get_view_interval()
ax.yaxis.set_view_interval(y_view[1] - diff * 1.025,
y_view[1] + diff * 0.025)
if not time_is_space and annotate_energies:
ax.text(annotate_location[0], annotate_location[1],
s=energy_annotation_format % energies[_c_idxs[idx]],
fontdict={'color': 'black' if invert else 'white'},
transform=ax.transAxes)
if time_is_space and annotate_phases:
ax.text(annotate_location[0], annotate_location[1],
s=phase_annotation_format % (phases[_c_pidxs[idx]]/_2pi),
fontdict={'color': 'black' if invert else 'white'},
transform=ax.transAxes)
if with_pulse_profile_and_spectrum: # plot summaries
_upper_view_lim = _np.max(flux) * 100.0
_lower_view_lim = _np.min(flux)
_view_lim_diff = _np.log10(_upper_view_lim)
_view_lim_diff -= _np.log10(_lower_view_lim)
for j, idx in enumerate(panel_indices):
_idx = _c_idxs[idx]
_diff = _np.log10(flux[_idx,i])
_diff -= _np.log10(_lower_view_lim)
spec_ax.axvline(energies[_idx],
0.0,
_diff/_view_lim_diff,
color=_line_colors[j],
ls=line_styles[j], lw=1.0)
spec_ax.set_xscale('log')
spec_ax.set_yscale('log')
spec_ax.set_xlim(energies[0], energies[-1])
spec_ax.set_ylim(_lower_view_lim, _upper_view_lim)
_veneer(None, None, spec_ax, length = tick_length,
log=(True, True))
spec_ax.set_xlabel('Photon energy [keV]')
if not time_is_space:
spec_ax.plot(energies, flux[:,i], 'k-', lw=1.0)
else:
for j, idx in enumerate(panel_indices):
_idx = _c_pidxs[idx]
spec_ax.plot(energies, flux[_idx,:i],
color=_line_colors[j],
ls=line_styles[j], lw=1.0,
label=phase_annotation_format % phases[_idx])
if not time_is_space:
for _cycle in range(cycles): # plot pulse-profiles
_ext_phases = []
for _i in range(_cycle):
_ext_phases += list(phases[1 if _i > 0 else 0:] + _i * _2pi)
_ext_phases += list(phases[1 if _cycle > 0 else 0:i+1] + _cycle * _2pi)
_ext_phases = _np.array(_ext_phases)
for j, idx in enumerate(panel_indices):
_idx = _c_idxs[idx]
_max = _np.max(flux[_idx,:])
_ext_flux = []
for _i in range(_cycle):
_ext_flux += list(flux[_idx, 1 if _i > 0 else 0:])
_ext_flux += list(flux[_idx, 1 if _cycle > 0 else 0:i+1])
_ext_flux = _np.array(_ext_flux)
pp_ax.plot(_ext_phases/_2pi,
_ext_flux/_max,
color=_line_colors[j],
ls=line_styles[j], lw=1.0,
label=energy_annotation_format % energies[_idx])
pp_ax.set_xlim(0.0, float(cycles))
pp_ax.set_ylim(0.0,1.2)
_veneer((0.1,0.5), (0.05,0.2), pp_ax, length = tick_length)
pp_ax.legend(loc='upper center', ncol=3, mode='expand',
handlelength=4.0,
frameon=False, fancybox=False)
pp_ax.set_xlabel('Phase [$2\pi$ radians]')
pp_ax.set_ylabel('photons/cm$^2$/s/keV')
fig.savefig(file_root + '_%i.png' % (len(_ext_phases) - 1),
dpi=dpi)
pp_ax.clear()
spec_ax.clear()
else:
pp_ax.plot(phases/_2pi,
flux[i,:]/_np.max(flux[i,:]), 'k-', lw=1.0)
pp_ax.set_xlim(0.0, 1.0)
pp_ax.set_ylim(0.0,1.2)
_veneer((0.1,0.5), (0.05,0.2), pp_ax, length = tick_length)
pp_ax.legend(loc='upper center', ncol=3, mode='expand',
handlelength=4.0,
frameon=False, fancybox=False)
pp_ax.set_xlabel('Phase [$2\pi$ radians]')
pp_ax.set_ylabel('photons/cm$^2$/s/keV')
fig.savefig(file_root + '_%i.png' % i, dpi=dpi)
else:
fig.savefig(file_root + '_%i.png' % i, dpi=dpi)
for ax in axes:
ax.clear()
for ax in axes:
ax.cla()
plt.close(fig)
yield figsize, dpi, num_frames
@staticmethod
@make_verbose('Animating intensity sky maps', 'Intensity sky maps animated')
def _animate(_file_root, _num_frames, _figsize, _dpi,
cycles = 1, fps = None, **kwargs):
""" Helper method to animate the intensity sky maps.
:param str _file_root:
Reserved for internal use.
:param int _num_frames:
Reserved for internal use.
:param int _figsize:
Reserved for internal use.
:param int _dpi:
Reserved for internal use.
:param int cycles:
Number of explicit cycles to generate frames for. The frames from
the principal cycle are reused, so the images are periodic in
their phase evolution. There is no delay between cycles in this
type of repitition.
:param int fps:
Frames per second. If ``None``, then one cycle (assuming images
have been precomputed for a complete cycle), consisting of
so many frames, will exhibit a period of one second.
:param bool repeat:
Inform *ffmpeg* to enter a loop when video play back commences.
Deprecated.
:param repeat_delay:
Delay between repeats in milliseconds. Deprecated.
:param str ffmpeg_path:
Absolute path to *ffmpeg* executable. If ``None``, defaults
to matplotlib rcParams settings, but no guarantee that the package
will be found even if installed on system. Deprecated.
"""
file_root = _file_root
num_frames = _num_frames
figsize = _figsize
dpi = _dpi
fig = plt.figure(figsize = figsize)
ax = plt.subplot(111)
plt.axis('off')
fig.subplots_adjust(left=0, bottom=0, right=1, top=1,
wspace=None, hspace=None)
# animation code based on:
# http://jakevdp.github.io/blog/2012/08/18/matplotlib-animation-tutorial/
filename = file_root + '_0.png'
img = mgimg.imread(filename)
imgplot = ax.imshow(img, aspect='auto')
cycles = int(cycles)
if cycles < 1:
cycles = 1
elif cycles > 1:
num_frames += (cycles - 1) * (_num_frames - 1)
class _context: # mutable nonlocal namespace in closure
_cycle_idx = 0 # track cycle
_j = -1 # track image index
_last = -1
def _update(i): # load phase-ordered set of images
if _context._last == i:
return imgplot,
if _context._cycle_idx == 0 and i == _num_frames:
_context._j = 1
_context._cycle_idx = 1
elif i == _num_frames + _context._cycle_idx * (_num_frames - 1):
_context._j = 1
_context._cycle_idx += 1
else:
_context._j += 1
_context._last = i
filename = file_root + '_%i.png' % _context._j
img = mgimg.imread(filename)
imgplot.set_data(img)
return imgplot,
ani = animation.FuncAnimation(fig, _update, frames=num_frames, blit=True)
if fps is None:
fps = num_frames # all frames span one second
# secret keyword argument; not clear whether should be exposed to user
bitrate = kwargs.get('bitrate', -1) # default is let _mpl choose
filename = file_root + '_animated.mp4'
yield 'Writing to disk: %s' % filename
ani.save(filename, writer = 'ffmpeg',
dpi = dpi, fps = fps, bitrate = bitrate,
extra_args=['-vcodec', 'libx264'])
fig.clf() # this or ax.cla() needed to free memory
plt.close(fig)
yield None
Photosphere._update_doc()
def _veneer(x, y, axes, lw=1.0, length=8, log=(False, False)):
""" Make the plots a little more aesthetically pleasing. """
if x is not None:
if x[1] is not None:
axes.xaxis.set_major_locator(MultipleLocator(x[1]))
if x[0] is not None:
axes.xaxis.set_minor_locator(MultipleLocator(x[0]))
elif not log[0]:
axes.xaxis.set_major_locator(AutoLocator())
axes.xaxis.set_minor_locator(AutoMinorLocator())
if y is not None:
if y[1] is not None:
axes.yaxis.set_major_locator(MultipleLocator(y[1]))
if y[0] is not None:
axes.yaxis.set_minor_locator(MultipleLocator(y[0]))
elif not log[1]:
axes.yaxis.set_major_locator(AutoLocator())
axes.yaxis.set_minor_locator(AutoMinorLocator())
axes.tick_params(which='major', colors='black', length=length, width=lw)
axes.tick_params(which='minor', colors='black', length=int(length/2), width=lw)
plt.setp(axes.spines.values(), linewidth=lw, color='black')
|
11502516
|
import sys
import time
from qibullet import SimulationManager
from qibullet import PepperVirtual
from qibullet import NaoVirtual
from qibullet import RomeoVirtual
if __name__ == "__main__":
simulation_manager = SimulationManager()
if (sys.version_info > (3, 0)):
rob = input("Which robot should be spawned? (pepper/nao/romeo): ")
else:
rob = raw_input("Which robot should be spawned? (pepper/nao/romeo): ")
client = simulation_manager.launchSimulation(gui=True)
if rob.lower() == "nao":
robot = simulation_manager.spawnNao(client, spawn_ground_plane=True)
elif rob.lower() == "pepper":
robot = simulation_manager.spawnPepper(client, spawn_ground_plane=True)
elif rob.lower() == "romeo":
robot = simulation_manager.spawnRomeo(client, spawn_ground_plane=True)
else:
print("You have to specify a robot, pepper, nao or romeo.")
simulation_manager.stopSimulation(client)
sys.exit(1)
# Subscribe to the IMU of the robot with a default frequency
robot.subscribeImu()
robot.unsubscribeImu()
# Get the IMU of the robot as an Imu object
imu = robot.getImu()
print("Type of the robot IMU: " + str(type(imu)))
# Subscribe to the IMU, and define a specific frequency
robot.subscribeImu(frequency=100) # Or imu.setFrequency(100)
try:
while True:
# The following method is equivalent to calling
# imu.getValues()
angular_velocity, linear_acceleration = robot.getImuValues()
# One can also retrieve the accelerometer and gyroscope data
# separately:
# The following method is equivalent to calling
# imu.getGyroscopeValues()
# angular_velocity = robot.getImuGyroscopeValues()
# The following method is equivalent to calling
# robot.getImuAccelerometerValues()
# linear_acceleration = imu.getAccelerometerValues()
print("Gyroscope values: " + str(angular_velocity))
print("Accelerometer values: " + str(linear_acceleration))
time.sleep(1.0)
except KeyboardInterrupt:
pass
finally:
# Usually, robot.unsubscribeImu() or imu.unsubscribe should be called
# to stop the IMU data retrieval process. But the stopSimulation method
# will automatically track and kill active robot module processes
simulation_manager.stopSimulation(client)
|
11502518
|
import sys
from django import template
from django.conf import settings
register = template.Library()
@register.filter
def sorted_apps(value):
if not hasattr(settings, 'ADMIN_DASHBOARD_LAYOUT'):
return value
app_layout = settings.ADMIN_DASHBOARD_LAYOUT
def _get_app_sequence(app):
return app_layout.get(app['app_label'], {}).get('sequence', sys.maxsize)
def _get_model_sequence(app, model):
models = app_layout.get(app['app_label'], {}).get('models', [])
return models.index(model["object_name"]) if model["object_name"] in models else sys.maxsize
def _update_app(app):
models = app['models']
models.sort(
key=lambda x: _get_model_sequence(app, x)
)
app['models'] = models
return app
app_list = value
app_list.sort(
key=lambda x: _get_app_sequence(x)
)
app_list = list(map(lambda x: _update_app(x), app_list))
return app_list
|
11502545
|
import os
import time
import datetime as dt
import numpy as np
from netCDF4 import Dataset
from scipy.interpolate import interp1d
import scipy.ndimage as ndimage
from utils.c_wrapper import cvort, cvort4
from utils.utils import cfind_extrema, upscale_field
from load_settings import settings
import setup_logging
C20_DATA_DIR = os.path.join(settings.DATA_DIR, 'c20_full')
EARTH_RADIUS = 6371000
EARTH_CIRC = EARTH_RADIUS * 2 * np.pi
NUM_ENSEMBLE_MEMBERS = 56
log = setup_logging.get_logger('st.find_vortmax')
class C20Data(object):
'''Class used for accessing data from C20 Reanalysis project.
This acts as a wrapper around netCDF4.Datasets and makes it easy to view data.
Typically it exposes the prmsl and vort850/vort9950 fields for all ensemble members.
It will load these fields, along with corresponding maxima (vorticity) and minima (pressure)
each time a new date is set.
:param year: Year from which to take data
:param fields: List of C20 fields that are to be loaded, or use 'all' for complete set
:param version: Version of C20 data to use
'''
def __init__(self, year, fields='all', version=settings.C20_VERSION):
self._year = year
self.dx = None
self.date = None
self.version = version
log.info('C20Data: year={}, version={}'.format(year, version))
if fields == 'all':
# rh995 has been removed.
self.fields = ['u9950', 'v9950', 'u850', 'v850', 'prmsl', 't9950', 't850', 'cape', 'pwat']
else:
self.fields = fields
if 'u9950' in self.fields and 'v9950' in self.fields:
self.calc_9950_vorticity = True
else:
self.calc_9950_vorticity = False
if 'u850' in self.fields and 'v850' in self.fields:
self.calc_850_vorticity = True
else:
self.calc_850_vorticity = False
fields = ', '.join(self.fields)
log.info('Using: {}'.format(fields))
self._load_datasets(self._year)
def set_year(self, year):
'''Sets a year and loads the relevant dataset'''
self._year = year
self.close_datasets()
self._load_datasets(self._year)
def close_datasets(self):
'''Closes all open datasets'''
for dataset in self.nc_datasets.values():
dataset.close()
def _load_datasets(self, year):
'''Loads datasets for a given year
Just sets up the NetCDF4 objects, doesn't actually load any data apart from
lons/lats and dates.
'''
# All datasets have lon/lat/time info in them, so any will do.
any_dataset = None
dataset_fieldname = None
self.nc_datasets = {}
for field in self.fields:
# e.g. ~/stormtracks_data/data/c20_full/2005/prmsl_2005.nc
path = os.path.join(C20_DATA_DIR, self.version, str(year), '{}_{}.nc'.format(field, year))
if not os.path.exists(path):
msg = 'File does not exist: {}'.format(path)
log.error(msg)
raise RuntimeError(msg)
log.debug('Loading {} from {}'.format(field, path))
dataset = Dataset(path)
dataset_fieldname = field
any_dataset = dataset
self.nc_datasets[field] = dataset
start_date = dt.datetime(1, 1, 1)
hours_since_JC = any_dataset.variables['time'][:]
self.number_enseble_members = any_dataset.variables[dataset_fieldname].shape[1]
self.lons = any_dataset.variables['lon'][:]
self.lats = any_dataset.variables['lat'][:]
self.dates = np.array([start_date + dt.timedelta(hs / 24.) -
dt.timedelta(2) for hs in hours_since_JC])
dlon = self.lons[2] - self.lons[0]
# N.B. array as dx varies with lat.
# lons, lats are in degres.
self.dx = (dlon * np.cos(self.lats * np.pi / 180) * EARTH_CIRC) / 360.
self.dy = (self.lats[0] - self.lats[2]) * EARTH_CIRC / 360.
# Interpolation functions.
self.f_lon = interp1d(np.arange(0, 180), self.lons)
self.f_lat = interp1d(np.arange(0, 91), self.lats)
self.first_date()
def first_date(self):
'''Sets date to the first date of the year (i.e. Jan the 1st)'''
return self.set_date(self.dates[0])
def next_date(self):
'''Moves date on by one timestep (6hr)'''
index = np.where(self.dates == self.date)[0][0]
if index < len(self.dates):
date = self.dates[index + 1]
return self.set_date(date)
else:
log.warn('Trying to set date beyond date range')
return None
def prev_date(self):
'''Moves date back by one timestep (6hr)'''
index = np.where(self.dates == self.date)[0][0]
if index > 0:
date = self.dates[index - 1]
return self.set_date(date)
else:
log.warn('Trying to set date beyond date range')
return None
def set_date(self, date):
'''Sets date and loads all data for that date
Will have no effect if there is no difference in date.
:param date: date to load
:returns: date if successful, otherwise None
'''
if date != self.date:
try:
log.debug("Setting date to {0}".format(date))
index = np.where(self.dates == date)[0][0]
self.date = date
self._process_ensemble_data(index)
except:
self.date = None
log.exception('Problem loading date {}'.format(date))
raise
return date
def _cvorticity(self, u, v):
'''Calculates the (2nd order) vorticity by calling into a c function'''
vort = np.zeros_like(u)
cvort(u, v, u.shape[0], u.shape[1], self.dx, self.dy, vort)
return vort
def _cvorticity4(self, u, v):
'''Calculates the (4th order) vorticity by calling into a c function
Algorithm was taken from Walsh's code'''
vort = np.zeros_like(u)
cvort4(u, v, u.shape[0], u.shape[1], self.dx, self.dy, vort)
return vort
def _process_ensemble_data(self, index):
'''
Processes data for one ensemble member
Loads the relevant data and then performs a variety of calculations on it.
At a minimum, prmsl, vort and vort4 will be calculated for the current date, as well
as their maxima/minima as appropriate. Additionally (depending on how class is configured),
smoothed_vort and up_vort (upscaled_vorticity) can be calculated.
Rough times for each step are recorded.
:param index: index of timestep in C20 data
'''
start = time.time()
self._load_ensemble_data(index)
end = time.time()
fields = ', '.join(self.fields)
log.debug(' Loaded {0} in {1}'.format(fields, end - start))
if self.calc_9950_vorticity:
start = time.time()
self._calculate_vorticities('9950')
end = time.time()
log.debug(' Calculated 9950 vorticity in {0}'.format(end - start))
if self.calc_850_vorticity:
start = time.time()
self._calculate_vorticities('850')
end = time.time()
log.debug(' Calculated 850 vorticity in {0}'.format(end - start))
start = time.time()
self._find_min_max_from_fields()
end = time.time()
log.debug(' Found maxima/minima in {0}'.format(end - start))
def _load_ensemble_data(self, index):
'''Loads the raw data from the NetCDF4 files'''
# N.B. it is very important how the data is loaded. The data is stored in NetCDF4 files,
# which in turn uses HDF5 as a storage medium. HDF5 allows for compression of particular
# subsets of data ('chunks'). If you access the data in terms of these chunks, it will be
# **much** faster, which is why all data for one date is loaded at a time, i.e. 56x91x180
# cells, or num_ensemble_members x lat x lon.
# This can be seen by looking at e.g. c20data.prmsl.shape, which will be (56, 91, 180).
for field in self.fields:
if field in ['u9950', 'u850', 'u250']:
setattr(self, field, - self.nc_datasets[field].variables[field][index])
else:
setattr(self, field, self.nc_datasets[field].variables[field][index])
def _calculate_vorticities(self, pressure_level):
'''Calculates vort (2nd order) and vort4 (4th order)
Uses c functions for speed.'''
vort = []
# self.vort4 = []
if pressure_level == '9950':
for em in range(NUM_ENSEMBLE_MEMBERS):
vort.append(self._cvorticity(self.u9950[em], self.v9950[em]))
# vort4.append(self._cvorticity4(self.u[em], self.v[em]))
elif pressure_level == '850':
for em in range(NUM_ENSEMBLE_MEMBERS):
vort.append(self._cvorticity(self.u850[em], self.v850[em]))
# vort4.append(self._cvorticity4(self.u[em], self.v[em]))
setattr(self, 'vort{}'.format(pressure_level), vort)
def _find_min_max_from_fields(self):
'''Finds the minima (prmsl) and maxima (vort/vort4)'''
if 'prmsl' in self.fields:
self.pmins, self.pmaxs = [], []
for ensemble_member in range(NUM_ENSEMBLE_MEMBERS):
e, index_pmaxs, index_pmins = cfind_extrema(self.prmsl[ensemble_member])
self.pmins.append([(self.prmsl[ensemble_member][pmin[0], pmin[1]], (self.lons[pmin[1]], self.lats[pmin[0]]))
for pmin in index_pmins])
if 'u9950' in self.fields and 'v9950' in self.fields:
self.vmaxs9950 = []
for ensemble_member in range(NUM_ENSEMBLE_MEMBERS):
e, index_vmaxs, index_vmins = cfind_extrema(self.vort9950[ensemble_member])
self.vmaxs9950.append([
(self.vort9950[ensemble_member][vmax[0], vmax[1]], (self.lons[vmax[1]], self.lats[vmax[0]]))
for vmax in index_vmaxs])
if 'u850' in self.fields and 'v850' in self.fields:
self.vmaxs850 = []
for ensemble_member in range(NUM_ENSEMBLE_MEMBERS):
e, index_vmaxs, index_vmins = cfind_extrema(self.vort850[ensemble_member])
self.vmaxs850.append([
(self.vort850[ensemble_member][vmax[0], vmax[1]], (self.lons[vmax[1]], self.lats[vmax[0]]))
for vmax in index_vmaxs])
|
11502551
|
from django.forms import SelectMultiple, HiddenInput
from django_filters import MultipleChoiceFilter
from service_catalog.models import Support
from service_catalog.models.support import SupportState
from Squest.utils.squest_filter import SquestFilter
class SupportFilter(SquestFilter):
class Meta:
model = Support
fields = ['title', 'instance__id', 'instance__name', 'user_open__username', 'state']
state = MultipleChoiceFilter(
choices=SupportState.choices,
widget=SelectMultiple(attrs={'data-live-search': "true"}))
def __init__(self, *args, **kwargs):
super(SupportFilter, self).__init__(*args, **kwargs)
self.filters['instance__name'].field.label = "Instance"
self.filters['instance__id'].field.widget = HiddenInput()
self.filters['user_open__username'].field.label = "User open"
|
11502567
|
import pandas as pd
import rpy2.robjects as ro
from rpy2.robjects.packages import importr
from rpy2.robjects import pandas2ri
from rpy2.robjects.conversion import localconverter
import rpy2.situation
from rpy2.robjects.packages import importr
import rpy2.robjects as robjects
import rpy2.interactive as r
utils = importr("utils")
base = importr('base')
mice = importr('mice')
class MissingValue:
def __init__(self):
pass
def missing_value_count(self, df):
nan_lists = {}
for col in df.columns:
nan_counter = 0
for nan in df[col].isnull():
if nan:
nan_counter += 1
nan_lists[col] = nan_counter
for k, v in nan_lists.items():
print('feature {}, total missing value count {}'.format(k, v))
def handinng_missing_value(self, df, col, method = 'pmm'):
'''
:param df:
:param col: str,连续变量
:return:
'''
with localconverter(ro.default_converter + pandas2ri.converter):
r_from_pd_df = ro.conversion.py2rpy(df)
with localconverter(ro.default_converter + pandas2ri.converter):
pd_from_r_df = ro.conversion.rpy2py(r_from_pd_df)
rpy2.robjects.r('''
imputing_missing_data <- function(df, s, methods = 'pmm'){
cols <- as.vector(unlist(strsplit(s, split = "-")))
df_continus_variable <- df[,cols]
for (i in 1:dim(df_continus_variable)[1]){
if(sum(is.na(df_continus_variable[i,]))/length(df_continus_variable[i,]) > 0.2){
df_continus_variable <- df_continus_variable[-i,]
}
}
for (i in 1:dim(df_continus_variable)[2]){
if(sum(is.na(df_continus_variable[,i]))/length(df_continus_variable[,i]) > 0.2){
df_continus_variable <- df_continus_variable[,-i]
}
}
impute <- mice(df_continus_variable, m=5, maxit=50, meth=methods, seed=500)
df_continus_variable <- complete(impute, 1)
df_other <- df
df_other[,cols] <- NULL
df <- cbind(df_continus_variable,df_other)
df
}
''')
rf = rpy2.robjects.r['imputing_missing_data']
with localconverter(ro.default_converter + pandas2ri.converter):
data= ro.conversion.rpy2py(rf(r_from_pd_df,col,methods = method))
return data
if __name__ == '__main__':
df = pd.read_csv('data.csv')
print(MissingValue().missing_value_count(df))
print(MissingValue().handinng_missing_value(df,'V1-V2-V3-V4-V5-V6-V7-V8-V9',method = 'pmm'))
|
11502597
|
import unittest
from expungeservice.models.record import Record
from tests.factories.case_factory import CaseFactory
from tests.factories.charge_factory import ChargeFactory
class TestRecordObject(unittest.TestCase):
def test_print_balance_in_cents(self):
record = Record(tuple([CaseFactory.create(balance="123.00"), CaseFactory.create(balance="246.00")]))
assert record.total_balance_due == 369.00
def test_print_balance_in_cents_empty(self):
record = Record(tuple([CaseFactory.create()]))
assert record.total_balance_due == 0.00
class TestChargeMethod(unittest.TestCase):
def setUp(self):
self.charge_zero = ChargeFactory.create(case_number="1")
self.case_1 = CaseFactory.create(case_number="1", charges=tuple([self.charge_zero]))
self.charge_one = ChargeFactory.create(case_number="2")
self.charge_two = ChargeFactory.create(case_number="2")
self.case_2 = CaseFactory.create(case_number="2", charges=tuple([self.charge_one, self.charge_two]))
self.record = Record(tuple([self.case_1, self.case_2]))
def test_num_cases(self):
assert len(self.record.charges) == 3
def test_charges_index_0(self):
assert self.record.charges[0] == self.charge_zero
def test_charges_index_1(self):
assert self.record.charges[1] == self.charge_one
def test_charges_index_2(self):
assert self.record.charges[2] == self.charge_two
|
11502603
|
import torch
from typing import List
from pytorch_toolbelt.losses.functional import sigmoid_focal_loss
from torch.nn.modules.loss import _Loss
from pytorch_toolbelt.losses.functional import soft_dice_score
import torch.nn as nn
class FocalLoss(_Loss):
def __init__(self, alpha=0.5, gamma=2, ignore_index=None):
"""
Focal loss for multi-class problem.
https://github.com/BloodAxe/pytorch-toolbelt/blob/develop/pytorch_toolbelt/losses/focal.py
:param alpha:
:param gamma:
:param ignore_index: If not None, targets with given index are ignored
"""
super().__init__()
self.alpha = alpha
self.gamma = gamma
self.ignore_index = ignore_index
def forward(self, label_input, label_target):
num_classes = label_input.size(1)
loss = 0
# Filter anchors with -1 label from loss computation
if self.ignore_index is not None:
not_ignored = label_target != self.ignore_index
for cls in range(num_classes):
cls_label_target = (label_target == cls).long()
cls_label_input = label_input[:, cls, ...]
if self.ignore_index is not None:
cls_label_target = cls_label_target[not_ignored]
cls_label_input = cls_label_input[not_ignored]
loss += sigmoid_focal_loss(
cls_label_input, cls_label_target, gamma=self.gamma, alpha=self.alpha
)
return loss
class MulticlassDiceLoss(_Loss):
"""Implementation of Dice loss for multiclass (semantic) image segmentation task
"""
def __init__(
self,
classes: List[int] = None,
from_logits=True,
weight=None,
reduction="elementwise_mean",
):
super(MulticlassDiceLoss, self).__init__(reduction=reduction)
self.classes = classes
self.from_logits = from_logits
self.weight = weight
def forward(self, y_pred: torch.Tensor, y_true: torch.Tensor) -> torch.Tensor:
"""
:param y_pred: NxCxHxW
:param y_true: NxHxW
:return: scalar
"""
if self.from_logits:
y_pred = y_pred.softmax(dim=1)
n_classes = y_pred.size(1)
smooth = 1e-3
loss = torch.zeros(n_classes, dtype=torch.float, device=y_pred.device)
if self.classes is None:
classes = range(n_classes)
else:
classes = self.classes
if self.weight is None:
weights = [1] * n_classes
else:
weights = self.weight
for class_index, weight in zip(classes, weights):
dice_target = (y_true == class_index).float()
dice_output = y_pred[:, class_index, ...]
num_preds = dice_target.long().sum()
if num_preds == 0:
loss[class_index] = 0
else:
dice = soft_dice_score(
dice_output, dice_target, from_logits=False, smooth=smooth
)
loss[class_index] = (1.0 - dice) * weight
if self.reduction == "elementwise_mean":
return loss.mean()
if self.reduction == "sum":
return loss.sum()
return loss
class BCEMulticlassDiceLoss(MulticlassDiceLoss):
__name__ = "bce_multiclass_dice_loss"
def __init__(self, eps=1e-7, activation="sigmoid"):
super().__init__(eps, activation)
self.bce = nn.BCEWithLogitsLoss(reduction="mean")
def forward(self, y_pr, y_gt):
dice = super().forward(y_pr, y_gt)
bce = self.bce(y_pr, y_gt)
return dice + bce
|
11502633
|
from pygments.lexer import RegexLexer, bygroups
from pygments.token import *
class JoedbcLexer(RegexLexer):
name = 'joedbc'
aliases = ['joedbc']
filenames = ['*.joedbc']
tokens = {
'root': [
(r'namespace\s+', Keyword, 'namespace'),
(r'create_unique_index\s+', Keyword, 'index_table_fields'),
(r'create_index\s+', Keyword, 'index_table_fields'),
(r'set_table_null_initialization\s+', Keyword, 'table_constant'),
(r'generate_c_wrapper', Keyword)
],
'namespace': [
(r'[a-zA-Z_]\w*', Name.Namespace, 'namespace_continuation')
],
'namespace_continuation': [
(r'::', Operator, 'namespace')
],
'index_table_fields': [
(r'[a-zA-Z_]\w*\s+', Name.Variable, 'table_fields')
],
'table_fields': [
(r'[a-zA-Z_]\w*\s+', Name.Class, 'fields')
],
'fields': [
(r'[a-zA-Z_]\w*', Name.Variable, 'fields_continuation')
],
'fields_continuation': [
(r',', Operator, 'fields')
],
'table_constant' : [
]
}
def setup(app):
app.add_lexer('joedbc', JoedbcLexer())
|
11502691
|
from runtime import *
'''
multiple inheritance
'''
class A:
def foo(self) -> int:
return 1
class B:
def bar(self) -> int:
return 2
class C( A, B ):
def call_foo_bar(self) -> int:
a = self.foo()
a += self.bar()
return a
## extend foo ##
def foo(self) -> int:
#a = A.foo(self) ## TODO fix me, or support `super`
a = A.prototype.foo(self) ## workaround
a += 100
return a
def main():
a = A()
assert( a.foo()==1 )
b = B()
assert( b.bar()==2 )
c = C()
assert( c.foo()==101 )
assert( c.bar()==2 )
assert( c.call_foo_bar()==103 )
main()
|
11502710
|
import hashlib
import hmac
import json
import logging
import time
from json.decoder import JSONDecodeError
from typing import TYPE_CHECKING, Any, Dict, List, Literal, Optional, Tuple
from urllib.parse import urlencode
import requests
from rotkehlchen.accounting.ledger_actions import LedgerAction
from rotkehlchen.accounting.structures.balance import Balance
from rotkehlchen.assets.asset import Asset
from rotkehlchen.assets.utils import symbol_to_asset_or_token
from rotkehlchen.constants.assets import A_EUR
from rotkehlchen.errors.asset import UnknownAsset
from rotkehlchen.errors.misc import RemoteError
from rotkehlchen.errors.serialization import DeserializationError
from rotkehlchen.exchanges.data_structures import (
AssetMovement,
Location,
MarginPosition,
Price,
Trade,
)
from rotkehlchen.exchanges.exchange import ExchangeInterface, ExchangeQueryBalances
from rotkehlchen.inquirer import Inquirer
from rotkehlchen.logging import RotkehlchenLogsAdapter
from rotkehlchen.serialization.deserialize import (
deserialize_asset_amount,
deserialize_fee,
deserialize_timestamp_from_date,
)
from rotkehlchen.types import ApiKey, ApiSecret, Timestamp, TradeType
from rotkehlchen.user_messages import MessagesAggregator
from rotkehlchen.utils.misc import iso8601ts_to_timestamp
if TYPE_CHECKING:
from rotkehlchen.db.dbhandler import DBHandler
logger = logging.getLogger(__name__)
log = RotkehlchenLogsAdapter(logger)
# This corresponds to md5('') and is used in signature generation
MD5_EMPTY_STR = 'd41d8cd98f00b204e9800998ecf8427e'
# Pairs can be found in Basic API doc:
# https://www.bitcoin.de/en/api/tapi/v4/docu#handelspaarliste_c2f
BITCOINDE_TRADING_PAIRS = (
'btceur',
'bcheur',
'btgeur',
'etheur',
'bsveur',
'ltceur',
'iotabtc',
'dashbtc',
'gntbtc',
'ltcbtc',
)
def bitcoinde_asset(symbol: str) -> Asset:
return symbol_to_asset_or_token(symbol.upper())
def bitcoinde_pair_to_world(pair: str) -> Tuple[Asset, Asset]:
if len(pair) == 6:
tx_asset = bitcoinde_asset(pair[:3])
native_asset = bitcoinde_asset(pair[3:])
elif len(pair) in (7, 8):
tx_asset = bitcoinde_asset(pair[:4])
native_asset = bitcoinde_asset(pair[4:])
else:
raise DeserializationError(f'Could not parse pair: {pair}')
return tx_asset, native_asset
def trade_from_bitcoinde(raw_trade: Dict) -> Trade:
"""Convert bitcoin.de raw data to a trade
May raise:
- DeserializationError
- UnknownAsset
- KeyError
"""
try:
timestamp = deserialize_timestamp_from_date(
raw_trade['successfully_finished_at'],
'iso8601',
'bitcoinde',
)
except KeyError:
# For very old trades (2013) bitcoin.de does not return 'successfully_finished_at'
timestamp = deserialize_timestamp_from_date(
raw_trade['trade_marked_as_paid_at'],
'iso8601',
'bitcoinde',
)
trade_type = TradeType.deserialize(raw_trade['type'])
tx_amount = deserialize_asset_amount(raw_trade['amount_currency_to_trade'])
native_amount = deserialize_asset_amount(raw_trade['volume_currency_to_pay'])
tx_asset, native_asset = bitcoinde_pair_to_world(raw_trade['trading_pair'])
amount = tx_amount
rate = Price(native_amount / tx_amount)
fee_amount = deserialize_fee(raw_trade['fee_currency_to_pay'])
fee_asset = A_EUR
return Trade(
timestamp=timestamp,
location=Location.BITCOINDE,
base_asset=tx_asset,
quote_asset=native_asset,
trade_type=trade_type,
amount=amount,
rate=rate,
fee=fee_amount,
fee_currency=fee_asset,
link=str(raw_trade['trade_id']),
)
class Bitcoinde(ExchangeInterface): # lgtm[py/missing-call-to-init]
def __init__(
self,
name: str,
api_key: ApiKey,
secret: ApiSecret,
database: 'DBHandler',
msg_aggregator: MessagesAggregator,
):
super().__init__(
name=name,
location=Location.BITCOINDE,
api_key=api_key,
secret=secret,
database=database,
)
self.uri = 'https://api.bitcoin.de'
self.session.headers.update({'x-api-key': api_key})
self.msg_aggregator = msg_aggregator
def edit_exchange_credentials(
self,
api_key: Optional[ApiKey],
api_secret: Optional[ApiSecret],
passphrase: Optional[str],
) -> bool:
changed = super().edit_exchange_credentials(api_key, api_secret, passphrase)
if api_key is not None:
self.session.headers.update({'x-api-key': api_key})
return changed
def _generate_signature(self, request_type: str, url: str, nonce: str) -> str:
signed_data = '#'.join([request_type, url, self.api_key, nonce, MD5_EMPTY_STR]).encode()
signature = hmac.new(
self.secret,
signed_data,
hashlib.sha256,
).hexdigest()
self.session.headers.update({
'x-api-signature': signature,
})
return signature
def _api_query(
self,
verb: Literal['get', 'post'],
path: str,
options: Optional[Dict] = None,
) -> Dict:
"""
Queries Bitcoin.de with the given verb for the given path and options
"""
assert verb in ('get', 'post'), (
'Given verb {} is not a valid HTTP verb'.format(verb)
)
request_path_no_args = '/v4/' + path
data = ''
if not options:
request_path = request_path_no_args
else:
request_path = request_path_no_args + '?' + urlencode(options)
nonce = str(int(time.time() * 1000))
request_url = self.uri + request_path
self._generate_signature(
request_type=verb.upper(),
url=request_url,
nonce=nonce,
)
headers = {
'x-api-nonce': nonce,
}
if data != '':
headers.update({
'Content-Type': 'application/json',
'Content-Length': str(len(data)),
})
log.debug('Bitcoin.de API Query', verb=verb, request_url=request_url)
try:
response = getattr(self.session, verb)(request_url, data=data, headers=headers)
except requests.exceptions.RequestException as e:
raise RemoteError(f'Bitcoin.de API request failed due to {str(e)}') from e
try:
json_ret = json.loads(response.text)
except JSONDecodeError as exc:
raise RemoteError('Bitcoin.de returned invalid JSON response') from exc
if response.status_code not in (200, 401):
if isinstance(json_ret, dict) and 'errors' in json_ret:
for error in json_ret['errors']:
if error.get('field') == 'X-API-KEY' and error.get('code') == 1:
raise RemoteError('Provided API Key is in invalid Format')
if error.get('code') == 3:
raise RemoteError('Provided API Key is invalid')
raise RemoteError(json_ret['errors'])
raise RemoteError(
'Bitcoin.de api request for {} failed with HTTP status code {}'.format(
response.url,
response.status_code,
),
)
if not isinstance(json_ret, dict):
raise RemoteError('Bitcoin.de returned invalid non-dict response')
return json_ret
def validate_api_key(self) -> Tuple[bool, str]:
"""
Validates that the Bitcoin.de API key is good for usage in rotki
"""
try:
self._api_query('get', 'account')
return True, ""
except RemoteError as e:
return False, str(e)
def query_balances(self, **kwargs: Any) -> ExchangeQueryBalances:
assets_balance: Dict[Asset, Balance] = {}
try:
resp_info = self._api_query('get', 'account')
except RemoteError as e:
msg = (
'Bitcoin.de request failed. Could not reach bitcoin.de due '
'to {}'.format(e)
)
log.error(msg)
return None, msg
log.debug(f'Bitcoin.de account response: {resp_info}')
for currency, balance in resp_info['data']['balances'].items():
asset = bitcoinde_asset(currency)
try:
usd_price = Inquirer().find_usd_price(asset=asset)
except RemoteError as e:
self.msg_aggregator.add_error(
f'Error processing Bitcoin.de balance entry due to inability to '
f'query USD price: {str(e)}. Skipping balance entry',
)
continue
try:
amount = deserialize_asset_amount(balance['total_amount'])
except DeserializationError as e:
self.msg_aggregator.add_error(
f'Error processing Bitcoin.de {asset} balance entry due to inability to '
f'deserialize the amount due to {str(e)}. Skipping balance entry',
)
continue
assets_balance[asset] = Balance(
amount=amount,
usd_value=amount * usd_price,
)
return assets_balance, ''
def query_online_trade_history(
self,
start_ts: Timestamp,
end_ts: Timestamp,
) -> Tuple[List[Trade], Tuple[Timestamp, Timestamp]]:
page = 1
resp_trades = []
while True:
resp = self._api_query('get', 'trades', {'state': 1, 'page': page})
resp_trades.extend(resp['trades'])
if 'page' not in resp:
break
if resp['page']['current'] >= resp['page']['last']:
break
page = resp['page']['current'] + 1
log.debug('Bitcoin.de trade history query', results_num=len(resp_trades))
trades = []
for tx in resp_trades:
log.debug(f'Processing raw Bitcoin.de trade: {tx}')
try:
timestamp = iso8601ts_to_timestamp(tx['successfully_finished_at'])
except KeyError:
# For very old trades (2013) bitcoin.de does not return 'successfully_finished_at'
timestamp = iso8601ts_to_timestamp(tx['trade_marked_as_paid_at'])
if tx['state'] != 1:
continue
if timestamp < start_ts or timestamp > end_ts:
continue
try:
converted_trade = trade_from_bitcoinde(tx)
log.debug(f'Deserialized trade from Bitcoin.de: {converted_trade}')
trades.append(converted_trade)
except UnknownAsset as e:
self.msg_aggregator.add_warning(
f'Found bitcoin.de trade with unknown asset '
f'{e.asset_name}. Ignoring it.',
)
continue
except (DeserializationError, KeyError) as e:
msg = str(e)
if isinstance(e, KeyError):
msg = f'Missing key entry for {msg}.'
self.msg_aggregator.add_error(
'Error processing a Bitcoin.de trade. Check logs '
'for details. Ignoring it.',
)
log.error(
'Error processing a Bitcoin.de trade',
trade=tx,
error=msg,
)
continue
return trades, (start_ts, end_ts)
def query_online_deposits_withdrawals(
self, # pylint: disable=no-self-use
start_ts: Timestamp, # pylint: disable=unused-argument
end_ts: Timestamp, # pylint: disable=unused-argument
) -> List[AssetMovement]:
return [] # noop for bitcoinde
def query_online_income_loss_expense(
self, # pylint: disable=no-self-use
start_ts: Timestamp, # pylint: disable=unused-argument
end_ts: Timestamp, # pylint: disable=unused-argument
) -> List[LedgerAction]:
return [] # noop for bitcoinde
def query_online_margin_history(
self, # pylint: disable=no-self-use
start_ts: Timestamp, # pylint: disable=unused-argument
end_ts: Timestamp, # pylint: disable=unused-argument
) -> List[MarginPosition]:
return [] # noop for bitcoinde
|
11502711
|
import os
import time
import numpy as np
import streamlit as st
from twec.twec import TWEC
def train(
data_dir="./data/",
embedding_size=300,
skipgram=False,
siter=10,
diter=10,
negative_samples=10,
window_size=5,
output_path="./model",
overwrite_compass=True,
streamlit=False,
component=None,
):
if streamlit and component is None:
raise ValueError("`component` cannot be `None` when `streamlit` is `True`.")
aligner = TWEC(
size=embedding_size,
sg=int(skipgram),
siter=siter,
diter=diter,
workers=4,
ns=negative_samples,
window=window_size,
opath=output_path,
)
if streamlit:
component.write("Training")
progress = 0.0
progress_bar = component.progress(progress)
output = component.beta_expander("Output")
all_files = sorted(os.listdir(data_dir))
num_files = len(all_files)
start = time.time()
# train the compass: the text should be the concatenation of the text from the slices
aligner.train_compass(
os.path.join(data_dir, "compass.txt"), overwrite=overwrite_compass
)
# keep an eye on the overwrite behaviour
end = time.time()
compass_out = f"Time Taken for TWEC Pre-Training: {(end - start)} ms"
if not streamlit:
print(compass_out)
else:
progress += 1 / num_files
progress_bar.progress(np.round(progress, decimals=1))
with output:
st.write(compass_out)
slices = {}
for file in all_files:
if file != "compass.txt":
start = time.time()
slices[file.split(".")[0]] = aligner.train_slice(
os.path.join(data_dir, file), save=True
)
end = time.time()
year_out = f"Time Taken for TWEC Fine-tuning for {file.split('.')[0]}: {(end - start)} ms"
if not streamlit:
print(year_out)
else:
progress += 1 / num_files
if progress > 1.0:
progress = 1.0
progress_bar.progress(progress)
with output:
st.write(year_out)
if __name__ == "__main__":
train()
|
11502713
|
import os
import math
import time
import urllib
import discord
import asyncio
import aiohttp
import datetime
import operator
import collections
from PIL import Image
from random import choice
from random import randint
import motor.motor_asyncio
from discord.ext import commands
from discord.utils import get
from utils.dataIO import fileIO
from googletrans import Translator
from urllib.parse import quote_plus
from utils.option_parser import OptionParser
from utils.chat_formatting import escape_mass_mentions, italics, pagify
class Utility(commands.Cog):
def __init__(self, bot):
self.bot = bot
# define database variables
self.server_settings = self.bot.db["utility"] # bot settings for utility
client = motor.motor_asyncio.AsyncIOMotorClient()
self.db = client['{}_utility'.format(self.bot.config['bot_name'])] # doesn't follow typical structure
self.api_keys = fileIO("config.json", "load")["API_KEYS"]
self.WOLFRAM_API_KEY = self.api_keys['WOLFRAM_API_KEY']
self.poll_sessions = []
self.stopwatches = {}
@commands.cooldown(1, 10, commands.BucketType.user)
@commands.command(pass_context=True)
async def roll(self, ctx, dice_num:str='6'):
"""Rolls random number between 1 and user's choice. Defaults to 100.
[Options]
dice_num: Number of faces on your dice.
[Example]
+<COMMAND> 727
"""
author = ctx.message.author
if dice_num.isdigit():
number = int(dice_num)
else:
number = 6
if number > 1:
n = randint(1, number)
await ctx.send("{} :game_die: {} :game_die:".format(author.mention, n))
else:
await ctx.send("{} Maybe higher than 1? ;P".format(author.mention))
@commands.cooldown(1, 10, commands.BucketType.user)
@commands.command()
async def crypto(self, ctx, ticker:str):
"""Get info on a specific coin.
[Options]
ticker: Ticker of the coin.
[Example]
+<COMMAND> btc
"""
user = ctx.message.author
url = "https://min-api.cryptocompare.com/data/price?fsym={}&tsyms=USD,EUR".format(ticker.upper())
# url = "https://api.coinmarketcap.com/v2/ticker/1/"
async with aiohttp.ClientSession() as session:
async with session.get(url) as r:
result = await r.json()
em = discord.Embed(colour=user.colour)
desc = ""
for value in result.keys():
desc += "{}: `{:,}`\n".format(value, result[value])
full_url = f"https://www.cryptocompare.com/coins/{ticker.lower()}/overview/"
em.set_author(name=f"{ticker.upper()}", url=full_url)
em.description = desc
em.set_footer(text="Data from https://cryptocompare.com/")
await ctx.send(embed = em)
return
await ctx.send("**Error. Try again later.**")
@commands.cooldown(1, 30, commands.BucketType.user)
@commands.command()
async def lmgtfy(self, ctx, *, search_terms : str):
"""Creates a lmgtfy link.
[Options]
search_terms: The things you want it to show you how to look up...
[Example]
+<COMMAND> How do I add owo! bot to my server?
"""
search_terms = escape_mass_mentions(search_terms.replace(" ", "+"))
await ctx.send("https://lmgtfy.com/?q={}".format(search_terms))
@commands.cooldown(1, 1, commands.BucketType.user)
@commands.command(aliases=["sw"])
async def stopwatch(self, ctx):
"""Stars/stops a stopwatch
[Example]
+<COMMAND>
"""
author = ctx.message.author
if not author.id in self.stopwatches:
self.stopwatches[author.id] = int(time.perf_counter())
await ctx.send(author.mention + " Stopwatch started!")
else:
tmp = abs(self.stopwatches[author.id] - int(time.perf_counter()))
tmp = str(datetime.timedelta(seconds=tmp))
await ctx.send(author.mention + " Stopwatch stopped! Time: **" + tmp + "**")
self.stopwatches.pop(author.id, None)
@commands.cooldown(1, 30, commands.BucketType.user)
@commands.command(aliases=["tr"])
async def translate(self, ctx, *, text):
"""Translates text into something else. To english by default.
[Options]
From (-f): Language you're starting from.
To (-t): Language you want it in.
[Example]
+<COMMAND> わたしは にほんごがすこししか はなせません。
"""
return await ctx.send(":red_circle: **Sorry, currently unavailable.**")
user = ctx.message.author
text, options = self._get_translate_options(text)
translator = Translator()
tr_text = translator.translate(text)
await ctx.send("**{}**: `{}`".format(user.display_name, tr_text.text))
def _get_translate_options(self, text):
return text, None
@commands.cooldown(1, 10, commands.BucketType.user)
@commands.command(aliases=["ud"])
async def urban(self, ctx, *, search_terms : str, definition_number : int=1):
"""Get an urban dictionary definition of a word. I'm sure this will be good.
[Options]
search_terms: The words you want a definition to.
definition_number: The definition number (int)
[Example]
+<COMMAND> cookiezi 1
"""
user = ctx.message.author
def encode(s):
return quote_plus(s, encoding='utf-8', errors='replace')
original_search = search_terms
search_terms = search_terms.split(" ")
try:
if len(search_terms) > 1:
pos = int(search_terms[-1]) - 1
search_terms = search_terms[:-1]
else:
pos = 0
if pos not in range(0, 11): # API only provides the
pos = 0 # top 10 definitions
except ValueError:
pos = 0
search_terms = "+".join([encode(s) for s in search_terms])
url = "http://api.urbandictionary.com/v0/define?term=" + search_terms
try:
async with aiohttp.ClientSession() as session:
async with session.get(url) as r:
result = await r.json()
if result["list"]:
definition = result['list'][pos]['definition']
example = result['list'][pos]['example']
defs = len(result['list'])
msg = ("**__Definition #{} of {}:__\n**{}\n\n"
"**__Example:__\n**{}".format(pos+1, defs, definition,
example))
msg = pagify(msg, ["\n"])
urban_icon = "http://i.imgur.com/nWfKsAS.png"
counter = 1
for page in msg:
em = discord.Embed(description=page, colour=user.colour)
em.set_author(name="{}".format(original_search).capitalize(), icon_url = urban_icon)
em.set_footer(text="Page {}".format(str(counter)))
await ctx.send(embed = em)
if counter >= 3:
break
counter += 1
else:
await ctx.send("Your search terms gave no results.")
except IndexError:
await ctx.send("There is no definition #{}".format(pos+1))
except:
await ctx.send("Error.")
@commands.cooldown(1, 10, commands.BucketType.user)
@commands.command(pass_context=True)
async def flip(self, ctx, user : discord.Member=None):
"""Flip a coin or a user.
[Options]
user: The user you would like to flip.
[Example]
+<COMMAND> <USER>
"""
if user != None:
msg = ""
if user.id == self.bot.user.id:
user = ctx.message.author
msg = "Nice try. You think this is funny? How about *this* instead:\n\n"
char = "abcdefghijklmnopqrstuvwxyz"
tran = "ɐqɔpǝɟƃɥᴉɾʞlɯuodbɹsʇnʌʍxʎz"
table = str.maketrans(char, tran)
name = user.display_name.translate(table)
char = char.upper()
tran = "∀qƆpƎℲפHIſʞ˥WNOԀQᴚS┴∩ΛMX⅄Z"
table = str.maketrans(char, tran)
name = name.translate(table)
await ctx.send("(╯°□°)╯︵ {}".format(name[::-1]))
else:
msg = await ctx.send("**Flips a coin and...**")
await asyncio.sleep(1)
await msg.edit(content="_**Flips a coin and... " + choice(["HEADS!**_", "TAILS!**_"]))
@commands.cooldown(1, 10, commands.BucketType.user)
@commands.command()
async def choose(self, ctx, *choices):
"""Chooses between multiple choices.
[Options]
choises: The different choices. To denote multiple choices, you should use double quotes.
[Example]
+<COMMAND> Pizza Banana "Apple Pie" "Something else"
"""
if len(choices) < 2:
await ctx.send('Not enough choices to pick from.')
else:
await ctx.send(escape_mass_mentions(choice(choices)))
@commands.cooldown(1, 30, commands.BucketType.user)
@commands.command(pass_context=True, no_pm=True)
async def poll(self, ctx, *choices):
"""Start/stop a poll between multiple choices
[Options]
choises: The different choices. Separate using semi-colons.
[Example]
+<COMMAND> Question?;Banana;Apple Pie;Something else
"""
message = ctx.message
if len(choices) == 1:
if choices[0].lower() == "stop":
await self.endpoll(message)
return
if not self.getPollByChannel(message):
check = " ".join(choices).lower()
if "@everyone" in check or "@here" in check:
await ctx.send("Nice try.")
return
p = NewPoll(message, self)
if p.valid:
self.poll_sessions.append(p)
await p.start()
else:
await self.bot.send_cmd_help(ctx)
else:
await ctx.send("**A poll is already ongoing in this channel.**")
async def endpoll(self, message):
if self.getPollByChannel(message):
p = self.getPollByChannel(message)
if p.author == message.author.id: # or isMemberAdmin(message)
await self.getPollByChannel(message).endPoll()
else:
await ctx.send("Only admins and the author can stop the poll.")
else:
await ctx.send("There's no poll ongoing in this channel.")
def getPollByChannel(self, message):
for poll in self.poll_sessions:
if poll.channel == message.channel:
return poll
return False
async def check_poll_votes(self, message):
if message.author.id != self.bot.user.id:
if self.getPollByChannel(message):
self.getPollByChannel(message).checkAnswer(message)
@commands.cooldown(1, 30, commands.BucketType.user)
@commands.command(no_pm=True)
async def anime(self, ctx, *media_name):
"""Find an anime, manga, whatever you like.
[Options]
Manga (-m): If the media is a manga
User (-u): If the search is a user
[Example]
+<COMMAND> Made in Abyss
"""
user = ctx.message.author
option_parser = OptionParser()
option_parser.add_option('m','manga', opt_type=None, default=False)
option_parser.add_option('u','user', opt_type=None, default=False)
media_name, options = option_parser.parse(media_name)
media_name = str(media_name)
media_type = "anime"
if options["manga"]:
media_type = "manga"
elif options["user"]:
media_type = "user"
try:
top_result = await self._get_anime_search(media_type, media_name)
if top_result:
em = await self._create_anime_embed(media_type, top_result, user.colour)
await ctx.send(embed = em)
else:
await ctx.send(f":red_circle: **{media_name} {media_type} was not found!**")
except:
return await ctx.send(f":red_circle: **No results!**")
async def _create_anime_embed(self, media_type, result, color):
# print(result)
em = discord.Embed(colour=color)
em.set_author(name="{}".format(result['title']), url=result['url'])
em.add_field(name="Synopsis", value=result["synopsis"])
#misc = ""
#misc += "Rated: {}".format(result['rated'])
#em.add_field(name="Misc", value=misc)
em.set_thumbnail(url=result["image_url"])
return em
async def _get_anime_search(self, media_type, media_name):
# returns top result and image url
query = urllib.parse.quote_plus(media_name, encoding='utf-8', errors='replace')
uri = f"https://api.jikan.moe/v3/search/{media_type}?q={query}"
async with aiohttp.ClientSession() as session:
async with session.get(uri) as resp:
data = await resp.json()
return data["results"][0]
return None
@commands.cooldown(1, 30, commands.BucketType.user)
@commands.command(pass_context=True, name='wolfram', aliases=['w','ask'])
async def wolfram(self, ctx, *, arguments : str):
"""Ask the wolfram god a question.
[Options]
arguments: The things you want to ask it.
[Example]
+<COMMAND> What is the airspeed velocity of an unladen swallow?
"""
user = ctx.message.author
channel = ctx.message.channel
api_key = self.WOLFRAM_API_KEY
width = 800
max_height = 800
font_size = 30
layout = 'labelbar'
background = '193555'
foreground = 'white'
rand_num = randint(0, 50)
if not api_key:
await ctx.send('Missing Api Key.')
return
try:
query = urllib.parse.quote_plus(arguments, encoding='utf-8', errors='replace')
url = 'http://api.wolframalpha.com/v1/simple?appid={}&i={}%3F&width={}&fontsize={}&background={}&foreground={}'.format(
api_key, query, width, font_size, background, foreground)
file = '{}.png'.format(rand_num)
filename = 'cogs/utility/temp/{}.png'.format(rand_num)
#filename = '{}.png'.format(user.id)
async with aiohttp.ClientSession() as session:
async with session.get(url) as r:
image = await r.content.read()
with open(filename,'wb') as f:
f.write(image)
# crop image
image = Image.open(filename)
width = image.size[0]
height = image.size[1]
# if too big
if height > max_height:
offset = 100
size_det_img = image.crop((width-offset, 0, width - offset + 1, height))
size_det_img = size_det_img.convert('RGB')
current_color = size_det_img.getpixel((0, 0))
change_height = 0
for i in range(height):
new_pixel_color = size_det_img.getpixel((0, i))
if current_color != new_pixel_color:
if i > max_height:
break
change_height = i
img2 = image.crop((0, 0, width, change_height + 3))
image = img2
image.save(filename)
wolfram_file = discord.File(filename)
em = discord.Embed(colour=user.colour)
em.set_image(url='attachment://{}'.format(file))
full_url = "http://www.wolframalpha.com/input/?i={}".format(query)
em.description = "{} Click [here]({}) for full result".format(user.mention, full_url)
await channel.send(embed = em, file = wolfram_file)
os.remove(filename)
except:
await ctx.send('**Error. Try another search term.**')
return
'''
@commands.group(pass_context=True)
async def stream(self, ctx):
"""Get stream alerts from your favorite users"""
if ctx.invoked_subcommand is None:
await self.bot.send_cmd_help(ctx)
return
@commands.has_permissions(manage_guild = True)
@stream.command(name = "add", no_pm=True)
async def add_stream(self, ctx, toggle=None):
pass
@commands.has_permissions(manage_guild = True)
@stream.command(name = "remove", no_pm=True)
async def remove_stream(self, ctx, toggle=None):
pass
@commands.has_permissions(manage_guild = True)
@stream.command(name = "check", no_pm=True)
async def stream_check(self, ctx, toggle=None):
pass'''
@commands.command(pass_context=True, no_pm=True, aliases = ['games'])
async def whoplays(self, ctx, *game:str):
"""Shows a list of all the people playing a game.
[Options]
game: Name of the game you want to get a list of users for. (optional)
[Example]
+<COMMAND> osu!
"""
user = ctx.message.author
server = ctx.message.guild
members = server.members
game, options = OptionParser().parse(game)
# print(game)
if game and len(game) <= 2:
await ctx.send("You need at least 3 characters.")
return
if game:
playing_game = ""
count_playing = 0
players = []
games = []
for member in members:
if member != None and member.activity != None and member.activity.name != None and not member.bot:
# print(member.activity.name.lower())
if game.lower() in member.activity.name.lower():
# print((member.name, member.activity.name))
players.append((member.name, member.activity.name))
players = sorted(players, key=operator.itemgetter(0))
if len(players) == 0:
await ctx.send("No one is playing that game.")
else:
user_page = int(options["page"])
per_page = 15
total_pages = math.ceil(len(players)/per_page)
embed_list = []
for page in range(total_pages):
start_ind = per_page*page
end_ind = per_page*page + per_page
msg = "```"
for player, game in players[start_ind:end_ind]:
msg += u"▸ {:<25} {:<30}\n".format(player, game)
msg += "```"
em = discord.Embed(description=msg, colour=user.colour)
showing = "({})".format(len(players))
em.set_author(name="These are the people who are playing {} {}: \n".format(game, showing))
em.set_footer(text="Page {}/{}".format(page+1, total_pages))
embed_list.append(em)
await self.bot.menu(ctx, embed_list, message=None, page=user_page-1, timeout=15)
else:
freq_list = {}
for member in members:
if member != None and member.activity != None and member.activity.name != None and not member.bot:
if member.activity.name not in freq_list:
freq_list[member.activity.name] = 0
freq_list[member.activity.name]+=1
sorted_list = sorted(freq_list.items(), key=operator.itemgetter(1), reverse = True)
if not freq_list:
await ctx.send("Surprisingly, no one is playing anything.")
else:
# create display
msg = "```"
games_per_page = 15
max_games = min(len(sorted_list), games_per_page)
for i in range(max_games):
game, freq = sorted_list[i]
msg += "▸ {:<25} {:<30}\n".format(game[:25], freq_list[game])
msg += "```"
em = discord.Embed(description=msg, colour=user.colour)
em.set_author(name="These are the server's most played games at the moment:")
await ctx.send(embed = em)
class NewPoll():
def __init__(self, message, main):
self.channel = message.channel
self.author = message.author.id
self.client = main.bot
self.poll_sessions = main.poll_sessions
msg = message.content[6:]
msg = msg.split(";")
if len(msg) < 2: # Needs at least one question and 2 choices
self.valid = False
return None
else:
self.valid = True
self.already_voted = []
self.question = msg[0]
msg.remove(self.question)
self.answers = {}
i = 1
for answer in msg: # {id : {answer, votes}}
self.answers[i] = {"ANSWER" : answer, "VOTES" : 0}
i += 1
async def start(self):
msg = "**POLL STARTED!**\n\n{}\n\n".format(self.question)
for id, data in self.answers.items():
msg += "{}. *{}*\n".format(id, data["ANSWER"])
msg += "\nType the number to vote!"
await self.channel.send(msg)
await asyncio.sleep(60)
if self.valid:
await self.endPoll()
async def endPoll(self):
self.valid = False
msg = "**POLL ENDED!**\n\n{}\n\n".format(self.question)
for data in self.answers.values():
msg += "*{}* - {} votes\n".format(data["ANSWER"], str(data["VOTES"]))
await self.channel.send(msg)
self.poll_sessions.remove(self)
def checkAnswer(self, message):
try:
i = int(message.content)
if i in self.answers.keys():
if message.author.id not in self.already_voted:
data = self.answers[i]
data["VOTES"] += 1
self.answers[i] = data
self.already_voted.append(message.author.id)
except ValueError:
pass
def setup(bot):
n = Utility(bot)
bot.add_cog(n)
bot.add_listener(n.check_poll_votes, "on_message")
|
11502731
|
import os
import unittest
from . import GeorefMockTest
EXAMPLE_CONFIG = 'config/georef.example.cfg'
class RoutesTest(GeorefMockTest):
def test_v1_0_endpoints(self):
"""Los endpoints con prefijo /api/v1.0 deberían existir incluso si no
se cuenta con más de una versión de la API. Esto se debe a que
versiones iniciales de la API fueron publicadas que utilizaban el
prefijo /v1.0."""
urls = [
'/api/v1.0/provincias',
'/api/v1.0/departamentos',
'/api/v1.0/municipios',
'/api/v1.0/localidades',
'/api/v1.0/direcciones',
'/api/v1.0/calles',
'/api/v1.0/ubicacion'
]
validations = [
self.app.options(url).status_code == 200
for url in urls
]
self.assertTrue(all(validations))
@unittest.skipIf(os.environ['GEOREF_CONFIG'] != EXAMPLE_CONFIG,
'No se está utilizando la config de ejemplo')
def test_complete_download_redirect(self):
"""La API debería permitir la descarga total de datos por recurso. Las
descargas se implementan como una redirección a una URL donde se
almacenan los datos a descargarse (HTTP 302).
La configuración de ejemplo de la API utiliza una URL de ejemplo para
/provincias.json."""
resp = self.app.get('/api/provincias.json')
self.assertTrue(resp.status_code == 302 and
resp.headers['Location'] == 'https://www.example.org')
@unittest.skipIf(os.environ['GEOREF_CONFIG'] != EXAMPLE_CONFIG,
'No se está utilizando la config de ejemplo')
def test_complete_download_redirect_unset(self):
"""Si no se configura uno de los recursos de descarga completa, al
acceder al recurso se debería obtener un error 404.
La configuración de ejemplo de la API solo configura el recurso
/provincias.json. El resto quedan sin configurar."""
resp = self.app.get('/api/departamentos.json')
self.assertTrue(resp.status_code == 404)
|
11502736
|
del_items(0x80082CF0)
SetType(0x80082CF0, "int GetTpY__FUs(unsigned short tpage)")
del_items(0x80082D0C)
SetType(0x80082D0C, "int GetTpX__FUs(unsigned short tpage)")
del_items(0x80082D18)
SetType(0x80082D18, "void Remove96__Fv()")
del_items(0x80082D50)
SetType(0x80082D50, "void AppMain()")
del_items(0x80082E1C)
SetType(0x80082E1C, "void MAIN_RestartGameTask__Fv()")
del_items(0x80082E48)
SetType(0x80082E48, "void GameTask__FP4TASK(struct TASK *T)")
del_items(0x80082F54)
SetType(0x80082F54, "void MAIN_MainLoop__Fv()")
del_items(0x80082FA8)
SetType(0x80082FA8, "void CheckMaxArgs__Fv()")
del_items(0x80082FDC)
SetType(0x80082FDC, "unsigned char GPUQ_InitModule__Fv()")
del_items(0x80082FE8)
SetType(0x80082FE8, "void GPUQ_FlushQ__Fv()")
del_items(0x8008315C)
SetType(0x8008315C, "void GPUQ_LoadImage__FP4RECTli(struct RECT *Rect, long ImgHandle, int Offset)")
del_items(0x80083210)
SetType(0x80083210, "void GPUQ_DiscardHandle__Fl(long hnd)")
del_items(0x800832B0)
SetType(0x800832B0, "void GPUQ_LoadClutAddr__FiiiPv(int X, int Y, int Cols, void *Addr)")
del_items(0x8008334C)
SetType(0x8008334C, "void GPUQ_MoveImage__FP4RECTii(struct RECT *R, int x, int y)")
del_items(0x800833EC)
SetType(0x800833EC, "unsigned char PRIM_Open__FiiiP10SCREEN_ENVUl(int Prims, int OtSize, int Depth, struct SCREEN_ENV *Scr, unsigned long MemType)")
del_items(0x80083508)
SetType(0x80083508, "unsigned char InitPrimBuffer__FP11PRIM_BUFFERii(struct PRIM_BUFFER *Pr, int Prims, int OtSize)")
del_items(0x800835E4)
SetType(0x800835E4, "void PRIM_Clip__FP4RECTi(struct RECT *R, int Depth)")
del_items(0x8008370C)
SetType(0x8008370C, "unsigned char PRIM_GetCurrentScreen__Fv()")
del_items(0x80083718)
SetType(0x80083718, "void PRIM_FullScreen__Fi(int Depth)")
del_items(0x80083754)
SetType(0x80083754, "void PRIM_Flush__Fv()")
del_items(0x80083984)
SetType(0x80083984, "unsigned long *PRIM_GetCurrentOtList__Fv()")
del_items(0x80083990)
SetType(0x80083990, "void ClearPbOnDrawSync(struct PRIM_BUFFER *Pb)")
del_items(0x800839CC)
SetType(0x800839CC, "unsigned char ClearedYet__Fv()")
del_items(0x800839D8)
SetType(0x800839D8, "void PrimDrawSycnCallBack()")
del_items(0x800839F8)
SetType(0x800839F8, "void SendDispEnv__Fv()")
del_items(0x80083A1C)
SetType(0x80083A1C, "struct POLY_F4 *PRIM_GetNextPolyF4__Fv()")
del_items(0x80083A34)
SetType(0x80083A34, "struct POLY_FT4 *PRIM_GetNextPolyFt4__Fv()")
del_items(0x80083A4C)
SetType(0x80083A4C, "struct POLY_GT4 *PRIM_GetNextPolyGt4__Fv()")
del_items(0x80083A64)
SetType(0x80083A64, "struct POLY_G4 *PRIM_GetNextPolyG4__Fv()")
del_items(0x80083A7C)
SetType(0x80083A7C, "struct POLY_F3 *PRIM_GetNextPolyF3__Fv()")
del_items(0x80083A94)
SetType(0x80083A94, "struct DR_MODE *PRIM_GetNextDrArea__Fv()")
del_items(0x80083AAC)
SetType(0x80083AAC, "bool ClipRect__FRC4RECTR4RECT(struct RECT *ClipRect, struct RECT *RectToClip)")
del_items(0x80083BC0)
SetType(0x80083BC0, "bool IsColiding__FRC4RECTT0(struct RECT *ClipRect, struct RECT *NewRect)")
del_items(0x80083C28)
SetType(0x80083C28, "void VID_AfterDisplay__Fv()")
del_items(0x80083C50)
SetType(0x80083C50, "void VID_ScrOn__Fv()")
del_items(0x80083C8C)
SetType(0x80083C8C, "void VID_DoThisNextSync__FPFv_v(void (*Func)())")
del_items(0x80083CE4)
SetType(0x80083CE4, "unsigned char VID_NextSyncRoutHasExecuted__Fv()")
del_items(0x80083CF0)
SetType(0x80083CF0, "unsigned long VID_GetTick__Fv()")
del_items(0x80083CFC)
SetType(0x80083CFC, "void VID_DispEnvSend()")
del_items(0x80083D54)
SetType(0x80083D54, "void VID_SetXYOff__Fii(int x, int y)")
del_items(0x80083D64)
SetType(0x80083D64, "int VID_GetXOff__Fv()")
del_items(0x80083D70)
SetType(0x80083D70, "int VID_GetYOff__Fv()")
del_items(0x80083D7C)
SetType(0x80083D7C, "bool VID_IsDbuffer__Fv()")
del_items(0x80083D88)
SetType(0x80083D88, "void VID_SetDBuffer__Fb(bool DBuf)")
del_items(0x8008401C)
SetType(0x8008401C, "void MyFilter__FUlUlPCc(unsigned long MemType, unsigned long Size, char *Name)")
del_items(0x80084024)
SetType(0x80084024, "void SlowMemMove__FPvT0Ul(void *Dest, void *Source, unsigned long size)")
del_items(0x80084044)
SetType(0x80084044, "int GetTpY__FUs_addr_80084044(unsigned short tpage)")
del_items(0x80084060)
SetType(0x80084060, "int GetTpX__FUs_addr_80084060(unsigned short tpage)")
del_items(0x8008406C)
SetType(0x8008406C, "struct FileIO *SYSI_GetFs__Fv()")
del_items(0x80084078)
SetType(0x80084078, "struct FileIO *SYSI_GetOverlayFs__Fv()")
del_items(0x80084084)
SetType(0x80084084, "void SortOutFileSystem__Fv()")
del_items(0x800841B4)
SetType(0x800841B4, "void MemCb__FlPvUlPCcii(long hnd, void *Addr, unsigned long Size, char *Name, int Users, int TimeStamp)")
del_items(0x800841D8)
SetType(0x800841D8, "void Spanker__Fv()")
del_items(0x8008422C)
SetType(0x8008422C, "void GaryLiddon__Fv()")
del_items(0x80084234)
SetType(0x80084234, "void ReadPad__Fi(int NoDeb)")
del_items(0x800843BC)
SetType(0x800843BC, "void DummyPoll__Fv()")
del_items(0x800843C4)
SetType(0x800843C4, "void DaveOwens__Fv()")
del_items(0x800843EC)
SetType(0x800843EC, "void DaveCentreStuff__Fv()")
del_items(0x80084534)
SetType(0x80084534, "void PlaceStoreGold2__Fil(int myplr, long v)")
del_items(0x8008475C)
SetType(0x8008475C, "void GivePlayerDosh__Fil(int PlayerNo, long cost)")
del_items(0x80084910)
SetType(0x80084910, "int CalcItemVal__FP10ItemStruct(struct ItemStruct *Item)")
del_items(0x8008496C)
SetType(0x8008496C, "void RemoveDupInvItem__Fii(int pnum, int iv)")
del_items(0x80084B5C)
SetType(0x80084B5C, "long DetectDup__FP10ItemStructi(struct ItemStruct *Item, int PlayerNo)")
del_items(0x80084DD8)
SetType(0x80084DD8, "void WinterSales__Fi(int PlayerNo)")
del_items(0x80085014)
SetType(0x80085014, "void KeefDaFeef__Fi(int PlayerNo)")
del_items(0x80085460)
SetType(0x80085460, "unsigned short GetCur__C4CPad(struct CPad *this)")
del_items(0x80085488)
SetType(0x80085488, "unsigned char CheckActive__4CPad(struct CPad *this)")
del_items(0x80085494)
SetType(0x80085494, "int GetTpY__FUs_addr_80085494(unsigned short tpage)")
del_items(0x800854B0)
SetType(0x800854B0, "int GetTpX__FUs_addr_800854B0(unsigned short tpage)")
del_items(0x800854BC)
SetType(0x800854BC, "void TimSwann__Fv()")
del_items(0x800854C4)
SetType(0x800854C4, "struct FileIO *__6FileIOUl(struct FileIO *this, unsigned long OurMemId)")
del_items(0x80085514)
SetType(0x80085514, "void ___6FileIO(struct FileIO *this, int __in_chrg)")
del_items(0x80085568)
SetType(0x80085568, "long Read__6FileIOPCcUl(struct FileIO *this, char *Name, unsigned long RamId)")
del_items(0x800856D8)
SetType(0x800856D8, "int FileLen__6FileIOPCc(struct FileIO *this, char *Name)")
del_items(0x8008573C)
SetType(0x8008573C, "void FileNotFound__6FileIOPCc(struct FileIO *this, char *Name)")
del_items(0x8008575C)
SetType(0x8008575C, "bool StreamFile__6FileIOPCciPFPUciib_bii(struct FileIO *this, char *Name, int Slice, bool (*Func)(), int Offset, int Size)")
del_items(0x8008583C)
SetType(0x8008583C, "bool ReadAtAddr__6FileIOPCcPUci(struct FileIO *this, char *Name, unsigned char *Dest, int Len)")
del_items(0x80085900)
SetType(0x80085900, "void DumpOldPath__6FileIO(struct FileIO *this)")
del_items(0x80085964)
SetType(0x80085964, "void SetSearchPath__6FileIOPCc(struct FileIO *this, char *Path)")
del_items(0x80085A40)
SetType(0x80085A40, "bool FindFile__6FileIOPCcPc(struct FileIO *this, char *Name, char *Buffa)")
del_items(0x80085B54)
SetType(0x80085B54, "char *CopyPathItem__6FileIOPcPCc(struct FileIO *this, char *Dst, char *Src)")
del_items(0x80085BFC)
SetType(0x80085BFC, "void LockSearchPath__6FileIO(struct FileIO *this)")
del_items(0x80085C54)
SetType(0x80085C54, "void UnlockSearchPath__6FileIO(struct FileIO *this)")
del_items(0x80085CAC)
SetType(0x80085CAC, "bool SearchPathExists__6FileIO(struct FileIO *this)")
del_items(0x80085CC0)
SetType(0x80085CC0, "bool Save__6FileIOPCcPUci(struct FileIO *this, char *Name, unsigned char *Addr, int Len)")
del_items(0x80085CFC)
SetType(0x80085CFC, "struct PCIO *__4PCIOUl(struct PCIO *this, unsigned long OurMemId)")
del_items(0x80085D64)
SetType(0x80085D64, "void ___4PCIO(struct PCIO *this, int __in_chrg)")
del_items(0x80085DBC)
SetType(0x80085DBC, "bool FileExists__4PCIOPCc(struct PCIO *this, char *Name)")
del_items(0x80085E00)
SetType(0x80085E00, "bool LoReadFileAtAddr__4PCIOPCcPUci(struct PCIO *this, char *Name, unsigned char *Dest, int Len)")
del_items(0x80085EC4)
SetType(0x80085EC4, "int GetFileLength__4PCIOPCc(struct PCIO *this, char *Name)")
del_items(0x80085F7C)
SetType(0x80085F7C, "bool LoSave__4PCIOPCcPUci(struct PCIO *this, char *Name, unsigned char *Addr, int Len)")
del_items(0x80086050)
SetType(0x80086050, "bool LoStreamFile__4PCIOPCciPFPUciib_bii(struct PCIO *this, char *Name, int Slice, bool (*Func)(), int Offset, int Size)")
del_items(0x80086260)
SetType(0x80086260, "struct SysObj *__6SysObj(struct SysObj *this)")
del_items(0x80086278)
SetType(0x80086278, "void *__nw__6SysObji(int Amount)")
del_items(0x800862A4)
SetType(0x800862A4, "void *__nw__6SysObjiUl(int Amount, unsigned long RamID)")
del_items(0x80086320)
SetType(0x80086320, "void __dl__6SysObjPv(void *ptr)")
del_items(0x8008638C)
SetType(0x8008638C, "struct DatIO *__5DatIOUl(struct DatIO *this, unsigned long OurMemId)")
del_items(0x800863C8)
SetType(0x800863C8, "void ___5DatIO(struct DatIO *this, int __in_chrg)")
del_items(0x80086420)
SetType(0x80086420, "bool FileExists__5DatIOPCc(struct DatIO *this, char *Name)")
del_items(0x80086460)
SetType(0x80086460, "bool LoReadFileAtAddr__5DatIOPCcPUci(struct DatIO *this, char *Name, unsigned char *Dest, int Len)")
del_items(0x80086520)
SetType(0x80086520, "int GetFileLength__5DatIOPCc(struct DatIO *this, char *Name)")
del_items(0x800865D4)
SetType(0x800865D4, "bool LoSave__5DatIOPCcPUci(struct DatIO *this, char *Name, unsigned char *Addr, int Len)")
del_items(0x8008667C)
SetType(0x8008667C, "bool LoStreamFile__5DatIOPCciPFPUciib_bii(struct DatIO *this, char *Name, int Slice, bool (*Func)(), int Offset, int Size)")
del_items(0x80086888)
SetType(0x80086888, "struct CdIO *__4CdIOUl(struct CdIO *this, unsigned long OurMemId)")
del_items(0x800868CC)
SetType(0x800868CC, "void ___4CdIO(struct CdIO *this, int __in_chrg)")
del_items(0x80086924)
SetType(0x80086924, "bool FileExists__4CdIOPCc(struct CdIO *this, char *Name)")
del_items(0x80086948)
SetType(0x80086948, "bool LoReadFileAtAddr__4CdIOPCcPUci(struct CdIO *this, char *Name, unsigned char *Dest, int Len)")
del_items(0x800869E4)
SetType(0x800869E4, "int GetFileLength__4CdIOPCc(struct CdIO *this, char *Name)")
del_items(0x80086A08)
SetType(0x80086A08, "bool LoSave__4CdIOPCcPUci(struct CdIO *this, char *Name, unsigned char *Addr, int Len)")
del_items(0x80086ADC)
SetType(0x80086ADC, "bool CD_GetCdlFILE__FPCcP7CdlFILE(char *Name, struct CdlFILE *RetFile)")
del_items(0x80086B2C)
SetType(0x80086B2C, "bool LoStreamFile__4CdIOPCciPFPUciib_bii(struct CdIO *this, char *Name, int Slice, bool (*Func)(), int Offset, int Size)")
del_items(0x80086D54)
SetType(0x80086D54, "bool LoAsyncStreamFile__4CdIOPCciPFPUciib_bii(struct CdIO *this, char *Name, int Slice, bool (*Func)(), int Offset, int Size)")
del_items(0x80086EA4)
SetType(0x80086EA4, "void BL_InitEAC__Fv()")
del_items(0x80086F9C)
SetType(0x80086F9C, "long BL_ReadFile__FPcUl(char *Name, unsigned long RamId)")
del_items(0x800870B4)
SetType(0x800870B4, "long BL_AsyncReadFile__FPcUl(char *Name, unsigned long RamId)")
del_items(0x80087214)
SetType(0x80087214, "void BL_LoadDirectory__Fv()")
del_items(0x8008733C)
SetType(0x8008733C, "void BL_LoadStreamDir__Fv()")
del_items(0x800875CC)
SetType(0x800875CC, "struct STRHDR *BL_MakeFilePosTab__FPUcUl(unsigned char *BL_DirPtr, unsigned long NoStreamFiles)")
del_items(0x800876B4)
SetType(0x800876B4, "struct STRHDR *BL_FindStreamFile__FPcc(char *Name, char LumpID)")
del_items(0x80087840)
SetType(0x80087840, "bool BL_FileExists__FPcc(char *Name, char LumpID)")
del_items(0x8008787C)
SetType(0x8008787C, "int BL_FileLength__FPcc(char *Name, char LumpID)")
del_items(0x800878FC)
SetType(0x800878FC, "bool BL_LoadFileAtAddr__FPcPUcc(char *Name, unsigned char *Dest, char LumpID)")
del_items(0x80087A64)
SetType(0x80087A64, "bool BL_AsyncLoadDone__Fv()")
del_items(0x80087A70)
SetType(0x80087A70, "void BL_WaitForAsyncFinish__Fv()")
del_items(0x80087AB4)
SetType(0x80087AB4, "void BL_AsyncLoadCallBack__Fi(int ah)")
del_items(0x80087B18)
SetType(0x80087B18, "long BL_LoadFileAsync__FPcc(char *Name, char LumpID)")
del_items(0x80087CCC)
SetType(0x80087CCC, "bool BL_AsyncLoadFileAtAddr__FPcPUcc(char *Name, unsigned char *Dest, char LumpID)")
del_items(0x80087DE8)
SetType(0x80087DE8, "struct STRHDR *BL_OpenStreamFile__FPcc(char *Name, char LumpID)")
del_items(0x80087E14)
SetType(0x80087E14, "bool BL_CloseStreamFile__FP6STRHDR(struct STRHDR *StreamHDR)")
del_items(0x80087E1C)
SetType(0x80087E1C, "int LZNP_Decode__FPUcT0(unsigned char *in, unsigned char *out)")
del_items(0x80087EF0)
SetType(0x80087EF0, "void *Tmalloc__Fi(int MemSize)")
del_items(0x80087FE4)
SetType(0x80087FE4, "void Tfree__FPv(void *Addr)")
del_items(0x80088094)
SetType(0x80088094, "void InitTmalloc__Fv()")
del_items(0x800880BC)
SetType(0x800880BC, "void strupr__FPc(char *Buffa)")
del_items(0x80088110)
SetType(0x80088110, "void PauseTask__FP4TASK(struct TASK *T)")
del_items(0x80088160)
SetType(0x80088160, "int GetPausePad__Fv()")
del_items(0x80088288)
SetType(0x80088288, "bool TryPadForPause__Fi(int PadNum)")
del_items(0x800882B4)
SetType(0x800882B4, "void DoPause__14CPauseMessagesi(struct CPauseMessages *this, int nPadNum)")
del_items(0x800884C4)
SetType(0x800884C4, "bool DoPausedMessage__14CPauseMessages(struct CPauseMessages *this)")
del_items(0x800885FC)
SetType(0x800885FC, "int DoQuitMessage__14CPauseMessages(struct CPauseMessages *this)")
del_items(0x8008871C)
SetType(0x8008871C, "bool AreYouSureMessage__14CPauseMessages(struct CPauseMessages *this)")
del_items(0x8008883C)
SetType(0x8008883C, "bool PA_SetPauseOk__Fb(bool NewPause)")
del_items(0x8008884C)
SetType(0x8008884C, "bool PA_GetPauseOk__Fv()")
del_items(0x80088858)
SetType(0x80088858, "void MY_PausePrint__17CTempPauseMessageiiiP4RECT(struct CTempPauseMessage *this, int s, int Txt, int Menu, struct RECT *PRect)")
del_items(0x80088A98)
SetType(0x80088A98, "void InitPrintQuitMessage__17CTempPauseMessage(struct CTempPauseMessage *this)")
del_items(0x80088AA0)
SetType(0x80088AA0, "void PrintQuitMessage__17CTempPauseMessagei(struct CTempPauseMessage *this, int Menu)")
del_items(0x80088C18)
SetType(0x80088C18, "void LeavePrintQuitMessage__17CTempPauseMessagei(struct CTempPauseMessage *this, int Menu)")
del_items(0x80088C20)
SetType(0x80088C20, "void InitPrintAreYouSure__17CTempPauseMessage(struct CTempPauseMessage *this)")
del_items(0x80088C28)
SetType(0x80088C28, "void PrintAreYouSure__17CTempPauseMessagei(struct CTempPauseMessage *this, int Menu)")
del_items(0x80088DA0)
SetType(0x80088DA0, "void LeavePrintAreYouSure__17CTempPauseMessagei(struct CTempPauseMessage *this, int Menu)")
del_items(0x80088DA8)
SetType(0x80088DA8, "void InitPrintPaused__17CTempPauseMessage(struct CTempPauseMessage *this)")
del_items(0x80088DB0)
SetType(0x80088DB0, "void PrintPaused__17CTempPauseMessage(struct CTempPauseMessage *this)")
del_items(0x80088F00)
SetType(0x80088F00, "void LeavePrintPaused__17CTempPauseMessage(struct CTempPauseMessage *this)")
del_items(0x80088F08)
SetType(0x80088F08, "void ___17CTempPauseMessage(struct CTempPauseMessage *this, int __in_chrg)")
del_items(0x80088F30)
SetType(0x80088F30, "void _GLOBAL__D_DoPause__14CPauseMessagesi()")
del_items(0x80088F58)
SetType(0x80088F58, "void _GLOBAL__I_DoPause__14CPauseMessagesi()")
del_items(0x80088F80)
SetType(0x80088F80, "struct CTempPauseMessage *__17CTempPauseMessage(struct CTempPauseMessage *this)")
del_items(0x80088FC4)
SetType(0x80088FC4, "void ___14CPauseMessages(struct CPauseMessages *this, int __in_chrg)")
del_items(0x80088FF8)
SetType(0x80088FF8, "struct CPauseMessages *__14CPauseMessages(struct CPauseMessages *this)")
del_items(0x8008900C)
SetType(0x8008900C, "void SetRGB__6DialogUcUcUc(struct Dialog *this, unsigned char R, unsigned char G, unsigned char B)")
del_items(0x8008902C)
SetType(0x8008902C, "void SetBack__6Dialogi(struct Dialog *this, int Type)")
del_items(0x80089034)
SetType(0x80089034, "void SetBorder__6Dialogi(struct Dialog *this, int Type)")
del_items(0x8008903C)
SetType(0x8008903C, "void ___6Dialog(struct Dialog *this, int __in_chrg)")
del_items(0x80089064)
SetType(0x80089064, "struct Dialog *__6Dialog(struct Dialog *this)")
del_items(0x800890E4)
SetType(0x800890E4, "int GetOverlayOtBase__7CBlocks()")
del_items(0x800890EC)
SetType(0x800890EC, "int GetMaxOtPos__7CBlocks()")
del_items(0x800890F4)
SetType(0x800890F4, "unsigned short GetDown__C4CPad(struct CPad *this)")
del_items(0x8008911C)
SetType(0x8008911C, "unsigned char CheckActive__4CPad_addr_8008911C(struct CPad *this)")
del_items(0x80089128)
SetType(0x80089128, "unsigned long ReadPadStream__Fv()")
del_items(0x80089240)
SetType(0x80089240, "void PAD_Handler__Fv()")
del_items(0x8008943C)
SetType(0x8008943C, "struct CPad *PAD_GetPad__FiUc(int PadNum, unsigned char both)")
del_items(0x800894EC)
SetType(0x800894EC, "void NewVal__4CPadUs(struct CPad *this, unsigned short New)")
del_items(0x80089560)
SetType(0x80089560, "void BothNewVal__4CPadUsUs(struct CPad *this, unsigned short New, unsigned short New2)")
del_items(0x800895F4)
SetType(0x800895F4, "unsigned short Trans__4CPadUs(struct CPad *this, unsigned short PadVal)")
del_items(0x80089718)
SetType(0x80089718, "void Flush__4CPad(struct CPad *this)")
del_items(0x8008976C)
SetType(0x8008976C, "void InitClickBits__FPUs(unsigned short *CountArray)")
del_items(0x8008978C)
SetType(0x8008978C, "unsigned short MakeClickBits__FiiiPUs(int Switch, int Closed, int Speed, unsigned short *CountArray)")
del_items(0x80089818)
SetType(0x80089818, "void _GLOBAL__I_Pad0()")
del_items(0x80089850)
SetType(0x80089850, "void SetPadType__4CPadUc(struct CPad *this, unsigned char val)")
del_items(0x80089858)
SetType(0x80089858, "unsigned char CheckActive__4CPad_addr_80089858(struct CPad *this)")
del_items(0x80089864)
SetType(0x80089864, "void SetActive__4CPadUc(struct CPad *this, unsigned char a)")
del_items(0x8008986C)
SetType(0x8008986C, "void SetBothFlag__4CPadUc(struct CPad *this, unsigned char fl)")
del_items(0x80089874)
SetType(0x80089874, "struct CPad *__4CPadi(struct CPad *this, int PhysStick)")
del_items(0x800898A8)
SetType(0x800898A8, "void Set__7FontTab(struct FontTab *this)")
del_items(0x80089944)
SetType(0x80089944, "void InitPrinty__Fv()")
del_items(0x800899F4)
SetType(0x800899F4, "void SetTextDat__5CFontP7TextDat(struct CFont *this, struct TextDat *NewDat)")
del_items(0x800899FC)
SetType(0x800899FC, "int KanjiPrintChar__5CFontUsUsUsUcUcUc(struct CFont *this, unsigned short Cx, unsigned short Cy, unsigned short kan, int R, int G, int B)")
del_items(0x80089B90)
SetType(0x80089B90, "int PrintChar__5CFontUsUsUcUcUcUc(struct CFont *this, unsigned short Cx, unsigned short Cy, unsigned char C, int R, int G, int B)")
del_items(0x80089D44)
SetType(0x80089D44, "int Print__5CFontiiPc8TXT_JUSTP4RECTUcUcUc(struct CFont *this, int X, int Y, char *Str, enum TXT_JUST Justify, struct RECT *TextWindow, int R, int G, int B)")
del_items(0x8008A37C)
SetType(0x8008A37C, "int GetWrap__5CFontPcP4RECT(struct CFont *this, char *Str, struct RECT *TextWindow)")
del_items(0x8008A5EC)
SetType(0x8008A5EC, "int GetWrapWidth__5CFontPcP4RECT(struct CFont *this, char *Str, struct RECT *TextWindow)")
del_items(0x8008A758)
SetType(0x8008A758, "int GetStrWidth__5CFontPc(struct CFont *this, char *Str)")
del_items(0x8008A7D4)
SetType(0x8008A7D4, "void SetChar__5CFontiUs(struct CFont *this, int ch, unsigned short Frm)")
del_items(0x8008A854)
SetType(0x8008A854, "int SetOTpos__5CFonti(struct CFont *this, int OT)")
del_items(0x8008A860)
SetType(0x8008A860, "int GetCharWidth__5CFontUc(struct CFont *this, unsigned char ch)")
del_items(0x8008A910)
SetType(0x8008A910, "void _GLOBAL__I_WHITER()")
del_items(0x8008A96C)
SetType(0x8008A96C, "int GetOverlayOtBase__7CBlocks_addr_8008A96C()")
del_items(0x8008A974)
SetType(0x8008A974, "void ClearFont__5CFont(struct CFont *this)")
del_items(0x8008A998)
SetType(0x8008A998, "bool IsDefined__5CFontUc(struct CFont *this, unsigned char C)")
del_items(0x8008A9B8)
SetType(0x8008A9B8, "int GetCharFrameNum__5CFontUc(struct CFont *this, unsigned char ch)")
del_items(0x8008A9D0)
SetType(0x8008A9D0, "void Init__5CFont(struct CFont *this)")
del_items(0x8008AA04)
SetType(0x8008AA04, "struct FRAME_HDR *GetFr__7TextDati(struct TextDat *this, int FrNum)")
del_items(0x8008AA20)
SetType(0x8008AA20, "unsigned char TrimCol__Fs(short col)")
del_items(0x8008AA58)
SetType(0x8008AA58, "struct POLY_GT4 *DialogPrint__Fiiiiiiiiii(int Frm, int X, int Y, int SW, int SH, int UW, int UH, int UOfs, int VOfs, int Trans)")
del_items(0x8008B3D8)
SetType(0x8008B3D8, "struct POLY_G4 *GetDropShadowG4__FUcUcUcUcUcUcUcUcUcUcUcUc(unsigned char r0, unsigned char g0, unsigned char b0, unsigned char r1, int g1, int b1, int r2, int g2, int b2, int r3, int g3, int b3)")
del_items(0x8008B510)
SetType(0x8008B510, "void DropShadows__Fiiii(int x, int y, int w, int h)")
del_items(0x8008B7B4)
SetType(0x8008B7B4, "void InitDialog__Fv()")
del_items(0x8008B8EC)
SetType(0x8008B8EC, "void GetSizes__6Dialog(struct Dialog *this)")
del_items(0x8008BB70)
SetType(0x8008BB70, "void Back__6Dialogiiii(struct Dialog *this, int DX, int DY, int DW, int DH)")
del_items(0x8008CC88)
SetType(0x8008CC88, "void Line__6Dialogiii(struct Dialog *this, int DX, int DY, int DW)")
del_items(0x8008CEB8)
SetType(0x8008CEB8, "int SetOTpos__6Dialogi(struct Dialog *this, int OT)")
del_items(0x8008CECC)
SetType(0x8008CECC, "struct PAL *GetPal__7TextDati(struct TextDat *this, int PalNum)")
del_items(0x8008CEE8)
SetType(0x8008CEE8, "struct FRAME_HDR *GetFr__7TextDati_addr_8008CEE8(struct TextDat *this, int FrNum)")
del_items(0x8008CF04)
SetType(0x8008CF04, "void ATT_DoAttract__Fv()")
del_items(0x8008CFCC)
SetType(0x8008CFCC, "void CustomPlayerInit__FR12PlayerStruct(struct PlayerStruct *P)")
del_items(0x8008CFD4)
SetType(0x8008CFD4, "void CreatePlayersFromFeData__FR9FE_CREATE(struct FE_CREATE *CStruct)")
del_items(0x8008D0AC)
SetType(0x8008D0AC, "void UpdateSel__FPUsUsPUc(unsigned short *Col, unsigned short Add, unsigned char *Count)")
del_items(0x8008D0EC)
SetType(0x8008D0EC, "void CycleSelCols__Fv()")
del_items(0x8008D2A4)
SetType(0x8008D2A4, "int FindTownCreature__7CBlocksi(struct CBlocks *this, int GameEqu)")
del_items(0x8008D318)
SetType(0x8008D318, "int FindCreature__7CBlocksi(struct CBlocks *this, int MgNum)")
del_items(0x8008D38C)
SetType(0x8008D38C, "struct CBlocks *__7CBlocksiiiii(struct CBlocks *this, int BgId, int ObjId, int ItemId, int Level, int List)")
del_items(0x8008D4F0)
SetType(0x8008D4F0, "void SetTownersGraphics__7CBlocks(struct CBlocks *this)")
del_items(0x8008D528)
SetType(0x8008D528, "void SetMonsterGraphics__7CBlocksii(struct CBlocks *this, int Level, int List)")
del_items(0x8008D5F0)
SetType(0x8008D5F0, "void ___7CBlocks(struct CBlocks *this, int __in_chrg)")
del_items(0x8008D678)
SetType(0x8008D678, "void DumpGt4s__7CBlocks(struct CBlocks *this)")
del_items(0x8008D6E0)
SetType(0x8008D6E0, "void DumpRects__7CBlocks(struct CBlocks *this)")
del_items(0x8008D748)
SetType(0x8008D748, "void SetGraphics__7CBlocksPP7TextDatPii(struct CBlocks *this, struct TextDat **TDat, int *pId, int Id)")
del_items(0x8008D7A4)
SetType(0x8008D7A4, "void DumpGraphics__7CBlocksPP7TextDatPi(struct CBlocks *this, struct TextDat **TDat, int *Id)")
del_items(0x8008D7F4)
SetType(0x8008D7F4, "void Load__7CBlocksi(struct CBlocks *this, int Id)")
del_items(0x8008D8AC)
SetType(0x8008D8AC, "void MakeRectTable__7CBlocks(struct CBlocks *this)")
del_items(0x8008DA00)
SetType(0x8008DA00, "void MakeGt4Table__7CBlocks(struct CBlocks *this)")
del_items(0x8008DBE4)
SetType(0x8008DBE4, "void MakeGt4__7CBlocksP8POLY_GT4P9FRAME_HDR(struct CBlocks *this, struct POLY_GT4 *GT4, struct FRAME_HDR *Fr)")
del_items(0x8008DD0C)
SetType(0x8008DD0C, "void MyRoutine__FR7CBlocksii(struct CBlocks *B, int x, int y)")
del_items(0x8008DD74)
SetType(0x8008DD74, "void SetRandOffset__7CBlocksi(struct CBlocks *this, int QuakeAmount)")
del_items(0x8008DDD0)
SetType(0x8008DDD0, "void Print__7CBlocks(struct CBlocks *this)")
del_items(0x8008DEEC)
SetType(0x8008DEEC, "void SetXY__7CBlocksii(struct CBlocks *this, int nx, int ny)")
del_items(0x8008DF14)
SetType(0x8008DF14, "void GetXY__7CBlocksPiT1(struct CBlocks *this, int *nx, int *ny)")
del_items(0x8008DF2C)
SetType(0x8008DF2C, "void InitColourCycling__7CBlocks(struct CBlocks *this)")
del_items(0x8008E078)
SetType(0x8008E078, "void GetGCol__7CBlocksiiPUcP7RGBData(struct CBlocks *this, int x, int y, unsigned char *Rgb, struct RGBData *Data)")
del_items(0x8008E1B8)
SetType(0x8008E1B8, "void PrintMap__7CBlocksii(struct CBlocks *this, int x, int y)")
del_items(0x8008ED28)
SetType(0x8008ED28, "void IterateVisibleMap__7CBlocksiiPFP9CacheInfoP8map_infoii_ib(struct CBlocks *this, int x, int y, int (*Func)(), bool VisCheck)")
del_items(0x8008F1A0)
SetType(0x8008F1A0, "int AddMonst__FP9CacheInfoP8map_infoii(struct CacheInfo *Info, struct map_info *p0, int bx, int by)")
del_items(0x8008F280)
SetType(0x8008F280, "void PrintMonsters__7CBlocksii(struct CBlocks *this, int x, int y)")
del_items(0x8008FD24)
SetType(0x8008FD24, "int AddTowners__FP9CacheInfoP8map_infoii(struct CacheInfo *Info, struct map_info *p0, int bx, int by)")
del_items(0x8008FD80)
SetType(0x8008FD80, "void PrintTowners__7CBlocksii(struct CBlocks *this, int x, int y)")
del_items(0x80090138)
SetType(0x80090138, "int AddObject__FP9CacheInfoP8map_infoii(struct CacheInfo *Info, struct map_info *p0, int bx, int by)")
del_items(0x80090194)
SetType(0x80090194, "void PrintObjects__7CBlocksii(struct CBlocks *this, int x, int y)")
del_items(0x800905F0)
SetType(0x800905F0, "int AddDead__FP9CacheInfoP8map_infoii(struct CacheInfo *Info, struct map_info *p0, int bx, int by)")
del_items(0x8009067C)
SetType(0x8009067C, "void PrintDead__7CBlocksii(struct CBlocks *this, int x, int y)")
del_items(0x80090940)
SetType(0x80090940, "int AddItem__FP9CacheInfoP8map_infoii(struct CacheInfo *Info, struct map_info *p0, int bx, int by)")
del_items(0x8009099C)
SetType(0x8009099C, "void PrintItems__7CBlocksii(struct CBlocks *this, int x, int y)")
del_items(0x80090F5C)
SetType(0x80090F5C, "int AddMissile__FP9CacheInfoP8map_infoii(struct CacheInfo *Info, struct map_info *p0, int bx, int by)")
del_items(0x80091064)
SetType(0x80091064, "void PrintMissiles__7CBlocksii(struct CBlocks *this, int x, int y)")
del_items(0x8009125C)
SetType(0x8009125C, "int ScrToWorldX__7CBlocksii(struct CBlocks *this, int sx, int sy)")
del_items(0x80091270)
SetType(0x80091270, "int ScrToWorldY__7CBlocksii(struct CBlocks *this, int sx, int sy)")
del_items(0x80091284)
SetType(0x80091284, "void SetScrollTarget__7CBlocksii(struct CBlocks *this, int x, int y)")
del_items(0x80091348)
SetType(0x80091348, "void DoScroll__7CBlocks(struct CBlocks *this)")
del_items(0x80091434)
SetType(0x80091434, "void SetPlayerPosBlocks__7CBlocksiii(struct CBlocks *this, int PlayerNum, int bx, int by)")
del_items(0x800914D4)
SetType(0x800914D4, "void GetScrXY__7CBlocksR4RECTiiii(struct CBlocks *this, struct RECT *R, int x, int y, int sxoff, int syoff)")
del_items(0x800915A8)
SetType(0x800915A8, "void ShadScaleSkew__7CBlocksP8POLY_FT4(struct POLY_FT4 *Ft4)")
del_items(0x80091648)
SetType(0x80091648, "int WorldToScrX__7CBlocksii(struct CBlocks *this, int x, int y)")
del_items(0x80091650)
SetType(0x80091650, "int WorldToScrY__7CBlocksii(struct CBlocks *this, int x, int y)")
del_items(0x80091664)
SetType(0x80091664, "struct CBlocks *BL_GetCurrentBlocks__Fv()")
del_items(0x80091670)
SetType(0x80091670, "int GetHighlightCol__FiPcUsUsUs(int Index, char *SelList, unsigned short P1Col, unsigned short P2Col, int P12Col)")
del_items(0x800916B8)
SetType(0x800916B8, "void PRIM_GetPrim__FPP8POLY_FT4(struct POLY_FT4 **Prim)")
del_items(0x80091734)
SetType(0x80091734, "int GetHighlightCol__FiPiUsUsUs(int Index, int *SelList, unsigned short P1Col, unsigned short P2Col, int P12Col)")
del_items(0x8009177C)
SetType(0x8009177C, "struct POLY_FT4 *PRIM_GetCopy__FP8POLY_FT4(struct POLY_FT4 *Prim)")
del_items(0x800917B8)
SetType(0x800917B8, "void PRIM_GetPrim__FPP8POLY_GT4(struct POLY_GT4 **Prim)")
del_items(0x80091834)
SetType(0x80091834, "void PRIM_CopyPrim__FP8POLY_FT4T0(struct POLY_FT4 *Dest, struct POLY_FT4 *Source)")
del_items(0x8009185C)
SetType(0x8009185C, "int GetCreature__14TownToCreaturei(struct TownToCreature *this, int GameCreature)")
del_items(0x80091878)
SetType(0x80091878, "void SetItemGraphics__7CBlocksi(struct CBlocks *this, int Id)")
del_items(0x800918A0)
SetType(0x800918A0, "void SetObjGraphics__7CBlocksi(struct CBlocks *this, int Id)")
del_items(0x800918C8)
SetType(0x800918C8, "void DumpItems__7CBlocks(struct CBlocks *this)")
del_items(0x800918EC)
SetType(0x800918EC, "void DumpObjs__7CBlocks(struct CBlocks *this)")
del_items(0x80091910)
SetType(0x80091910, "void DumpMonsters__7CBlocks(struct CBlocks *this)")
del_items(0x80091938)
SetType(0x80091938, "int GetOtPos__7CBlocksi(struct CBlocks *this, int LogicalY)")
del_items(0x80091970)
SetType(0x80091970, "void InitFromGt4__9LittleGt4P8POLY_GT4ii(struct LittleGt4 *this, struct POLY_GT4 *Gt4, int nw, int nh)")
del_items(0x800919FC)
SetType(0x800919FC, "int GetNumOfFrames__7TextDatii(struct TextDat *this, int Creature, int Action)")
del_items(0x80091A34)
SetType(0x80091A34, "int GetNumOfActions__7TextDati(struct TextDat *this, int Creature)")
del_items(0x80091A58)
SetType(0x80091A58, "struct CCreatureHdr *GetCreature__7TextDati(struct TextDat *this, int Creature)")
del_items(0x80091A74)
SetType(0x80091A74, "void SetFileInfo__7TextDatPC13CTextFileInfoi(struct TextDat *this, struct CTextFileInfo *NewInfo, int NewTexNum)")
del_items(0x80091A80)
SetType(0x80091A80, "int GetNumOfFrames__7TextDat(struct TextDat *this)")
del_items(0x80091A94)
SetType(0x80091A94, "struct PAL *GetPal__7TextDati_addr_80091A94(struct TextDat *this, int PalNum)")
del_items(0x80091AB0)
SetType(0x80091AB0, "struct FRAME_HDR *GetFr__7TextDati_addr_80091AB0(struct TextDat *this, int FrNum)")
del_items(0x80091ACC)
SetType(0x80091ACC, "struct TextDat *__7TextDat(struct TextDat *this)")
del_items(0x80091B00)
SetType(0x80091B00, "void OnceOnlyInit__7TextDat(struct TextDat *this)")
del_items(0x80091B20)
SetType(0x80091B20, "void ___7TextDat(struct TextDat *this, int __in_chrg)")
del_items(0x80091B68)
SetType(0x80091B68, "void ReloadTP__7TextDat(struct TextDat *this)")
del_items(0x80091BA8)
SetType(0x80091BA8, "void Use__7TextDatlbi(struct TextDat *this, long NewHndDat, bool DatLoaded, int size)")
del_items(0x80091DE8)
SetType(0x80091DE8, "bool TpLoadCallBack__FPUciib(unsigned char *Mem, int ReadSoFar, int Size, bool LastChunk)")
del_items(0x80091E90)
SetType(0x80091E90, "void StreamLoadTP__7TextDat(struct TextDat *this)")
del_items(0x80091F48)
SetType(0x80091F48, "void FinishedUsing__7TextDat(struct TextDat *this)")
del_items(0x80091FE0)
SetType(0x80091FE0, "void MakeBlockOffsetTab__7TextDat(struct TextDat *this)")
del_items(0x8009202C)
SetType(0x8009202C, "long MakeOffsetTab__C9CBlockHdr(struct CBlockHdr *this)")
del_items(0x80092158)
SetType(0x80092158, "void SetUVTp__7TextDatP9FRAME_HDRP8POLY_FT4ii(struct TextDat *this, struct FRAME_HDR *Fr, struct POLY_FT4 *FT4, int XFlip, int YFlip)")
del_items(0x80092258)
SetType(0x80092258, "bool IsCompressed__7TextDatiiii(struct TextDat *this, int Creature, int Action, int Dir, int Frame)")
del_items(0x800922A4)
SetType(0x800922A4, "struct POLY_FT4 *PrintMonster__7TextDatiiiiiii(struct TextDat *this, int Creature, int Action, int Dir, int Frame, int x, int y, int OtPos)")
del_items(0x80092350)
SetType(0x80092350, "struct POLY_FT4 *PrintMonsterA__7TextDatiiibi(struct TextDat *this, int Frm, int X, int Y, bool XFlip, int OtPos)")
del_items(0x800926F8)
SetType(0x800926F8, "void PrepareFt4__7TextDatP8POLY_FT4iiiii(struct TextDat *this, struct POLY_FT4 *FT4, int Frm, int X, int Y, int XFlip, int YFlip)")
del_items(0x8009298C)
SetType(0x8009298C, "unsigned char *GetDecompBufffer__7TextDati(struct TextDat *this, int Size)")
del_items(0x80092AEC)
SetType(0x80092AEC, "void SetUVTpGT4__7TextDatP9FRAME_HDRP8POLY_GT4ii(struct TextDat *this, struct FRAME_HDR *Fr, struct POLY_GT4 *FT4, int XFlip, int YFlip)")
del_items(0x80092BEC)
SetType(0x80092BEC, "void PrepareGt4__7TextDatP8POLY_GT4iiiii(struct TextDat *this, struct POLY_GT4 *GT4, int Frm, int X, int Y, int XFlip, int YFlip)")
del_items(0x80092E44)
SetType(0x80092E44, "void SetUVTpGT3__7TextDatP9FRAME_HDRP8POLY_GT3(struct TextDat *this, struct FRAME_HDR *Fr, struct POLY_GT3 *GT3)")
del_items(0x80092EC8)
SetType(0x80092EC8, "void PrepareGt3__7TextDatP8POLY_GT3iii(struct TextDat *this, struct POLY_GT3 *GT3, int Frm, int X, int Y)")
del_items(0x80093090)
SetType(0x80093090, "struct POLY_FT4 *PrintFt4__7TextDatiiiiii(struct TextDat *this, int Frm, int X, int Y, int XFlip, int OtPos, int YFlip)")
del_items(0x800931E4)
SetType(0x800931E4, "struct POLY_GT4 *PrintGt4__7TextDatiiiiii(struct TextDat *this, int Frm, int X, int Y, int XFlip, int OtPos, int YFlip)")
del_items(0x80093338)
SetType(0x80093338, "void DecompFrame__7TextDatP9FRAME_HDR(struct TextDat *this, struct FRAME_HDR *Fr)")
del_items(0x80093490)
SetType(0x80093490, "void MakeCreatureOffsetTab__7TextDat(struct TextDat *this)")
del_items(0x800935D0)
SetType(0x800935D0, "void MakePalOffsetTab__7TextDat(struct TextDat *this)")
del_items(0x800936CC)
SetType(0x800936CC, "void InitData__7TextDat(struct TextDat *this)")
del_items(0x800936FC)
SetType(0x800936FC, "void DumpData__7TextDat(struct TextDat *this)")
del_items(0x80093824)
SetType(0x80093824, "void DumpHdr__7TextDat(struct TextDat *this)")
del_items(0x80093888)
SetType(0x80093888, "struct TextDat *GM_UseTexData__Fi(int Id)")
del_items(0x800939BC)
SetType(0x800939BC, "void GM_ForceTpLoad__Fi(int Id)")
del_items(0x800939F8)
SetType(0x800939F8, "void GM_FinishedUsing__FP7TextDat(struct TextDat *Fin)")
del_items(0x80093A4C)
SetType(0x80093A4C, "void SetPal__7TextDatP9FRAME_HDRP8POLY_FT4(struct TextDat *this, struct FRAME_HDR *Fr, struct POLY_FT4 *FT4)")
del_items(0x80093B10)
SetType(0x80093B10, "int GetFrNum__7TextDatiiii(struct TextDat *this, int Creature, int Action, int Direction, int Frame)")
del_items(0x80093B64)
SetType(0x80093B64, "bool IsDirAliased__7TextDatiii(struct TextDat *this, int Creature, int Action, int Direction)")
del_items(0x80093BBC)
SetType(0x80093BBC, "void DoDecompRequests__7TextDat(struct TextDat *this)")
del_items(0x80093CE0)
SetType(0x80093CE0, "void FindDecompArea__7TextDatR4RECT(struct TextDat *this, struct RECT *R)")
del_items(0x80093DB8)
SetType(0x80093DB8, "struct CTextFileInfo *GetFileInfo__7TextDati(int Id)")
del_items(0x80093E08)
SetType(0x80093E08, "int GetSize__C15CCreatureAction(struct CCreatureAction *this)")
del_items(0x80093E30)
SetType(0x80093E30, "int GetFrNum__C15CCreatureActionii(struct CCreatureAction *this, int Direction, int Frame)")
del_items(0x80093E60)
SetType(0x80093E60, "void InitDirRemap__15CCreatureAction(struct CCreatureAction *this)")
del_items(0x80093F20)
SetType(0x80093F20, "int GetFrNum__C12CCreatureHdriii(struct CCreatureHdr *this, int Action, int Direction, int Frame)")
del_items(0x80093F64)
SetType(0x80093F64, "struct CCreatureAction *GetAction__C12CCreatureHdri(struct CCreatureHdr *this, int ActNum)")
del_items(0x80093FF4)
SetType(0x80093FF4, "void InitActionDirRemaps__12CCreatureHdr(struct CCreatureHdr *this)")
del_items(0x80094064)
SetType(0x80094064, "int GetSize__C12CCreatureHdr(struct CCreatureHdr *this)")
del_items(0x800940D0)
SetType(0x800940D0, "void LoadDat__C13CTextFileInfoli(struct CTextFileInfo *this, long hnd, int size)")
del_items(0x80094204)
SetType(0x80094204, "long LoadDat__C13CTextFileInfo(struct CTextFileInfo *this)")
del_items(0x8009425C)
SetType(0x8009425C, "long LoadHdr__C13CTextFileInfo(struct CTextFileInfo *this)")
del_items(0x80094284)
SetType(0x80094284, "void MakeFname__C13CTextFileInfoPcPCc(struct CTextFileInfo *this, char *Dest, char *Ext)")
del_items(0x800942CC)
SetType(0x800942CC, "long GetFile__C13CTextFileInfoPcUl(struct CTextFileInfo *this, char *Ext, unsigned long RamId)")
del_items(0x8009436C)
SetType(0x8009436C, "bool HasFile__C13CTextFileInfoPc(struct CTextFileInfo *this, char *Ext)")
del_items(0x80094400)
SetType(0x80094400, "void Un64__FPUcT0l(unsigned char *Src, unsigned char *Dest, long SizeBytes)")
del_items(0x800944D4)
SetType(0x800944D4, "struct CScreen *__7CScreen(struct CScreen *this)")
del_items(0x80094508)
SetType(0x80094508, "void Load__7CScreeniii(struct CScreen *this, int Id, int tpx, int tpy)")
del_items(0x8009481C)
SetType(0x8009481C, "void Unload__7CScreen(struct CScreen *this)")
del_items(0x80094840)
SetType(0x80094840, "void Display__7CScreeniiii(struct CScreen *this, int Id, int tpx, int tpy, int fadeval)")
del_items(0x80094B20)
SetType(0x80094B20, "void SetRect__5CPartR7TextDatR4RECT(struct CPart *this, struct TextDat *TDat, struct RECT *R)")
del_items(0x80094B9C)
SetType(0x80094B9C, "void GetBoundingBox__6CBlockR7TextDatR4RECT(struct CBlock *this, struct TextDat *TDat, struct RECT *R)")
del_items(0x80094CF8)
SetType(0x80094CF8, "void _GLOBAL__D_DatPool()")
del_items(0x80094D50)
SetType(0x80094D50, "void _GLOBAL__I_DatPool()")
del_items(0x80094DA4)
SetType(0x80094DA4, "void PRIM_GetPrim__FPP8POLY_GT4_addr_80094DA4(struct POLY_GT4 **Prim)")
del_items(0x80094E20)
SetType(0x80094E20, "void PRIM_GetPrim__FPP8POLY_FT4_addr_80094E20(struct POLY_FT4 **Prim)")
del_items(0x80094E9C)
SetType(0x80094E9C, "void DumpDatFile__7TextDat(struct TextDat *this)")
del_items(0x80094F10)
SetType(0x80094F10, "bool CanXferFrame__C7TextDat(struct TextDat *this)")
del_items(0x80094F38)
SetType(0x80094F38, "bool CanXferPal__C7TextDat(struct TextDat *this)")
del_items(0x80094F60)
SetType(0x80094F60, "bool IsLoaded__C7TextDat(struct TextDat *this)")
del_items(0x80094F6C)
SetType(0x80094F6C, "int GetTexNum__C7TextDat(struct TextDat *this)")
del_items(0x80094F78)
SetType(0x80094F78, "struct CCreatureHdr *GetCreature__7TextDati_addr_80094F78(struct TextDat *this, int Creature)")
del_items(0x80094F94)
SetType(0x80094F94, "int GetNumOfCreatures__7TextDat(struct TextDat *this)")
del_items(0x80094FA8)
SetType(0x80094FA8, "void SetFileInfo__7TextDatPC13CTextFileInfoi_addr_80094FA8(struct TextDat *this, struct CTextFileInfo *NewInfo, int NewTexNum)")
del_items(0x80094FB4)
SetType(0x80094FB4, "int GetNumOfFrames__7TextDat_addr_80094FB4(struct TextDat *this)")
del_items(0x80094FC8)
SetType(0x80094FC8, "struct PAL *GetPal__7TextDati_addr_80094FC8(struct TextDat *this, int PalNum)")
del_items(0x80094FE4)
SetType(0x80094FE4, "struct FRAME_HDR *GetFr__7TextDati_addr_80094FE4(struct TextDat *this, int FrNum)")
del_items(0x80095000)
SetType(0x80095000, "char *GetName__C13CTextFileInfo(struct CTextFileInfo *this)")
del_items(0x8009500C)
SetType(0x8009500C, "bool HasDat__C13CTextFileInfo(struct CTextFileInfo *this)")
del_items(0x80095034)
SetType(0x80095034, "bool HasTp__C13CTextFileInfo(struct CTextFileInfo *this)")
del_items(0x8009505C)
SetType(0x8009505C, "int GetSize__C6CBlock(struct CBlock *this)")
del_items(0x80095070)
SetType(0x80095070, "bool OVR_IsMemcardOverlayBlank__Fv()")
del_items(0x8009509C)
SetType(0x8009509C, "void OVR_LoadPregame__Fv()")
del_items(0x800950C4)
SetType(0x800950C4, "void OVR_LoadFrontend__Fv()")
del_items(0x800950EC)
SetType(0x800950EC, "void OVR_LoadGame__Fv()")
del_items(0x80095114)
SetType(0x80095114, "void OVR_LoadFmv__Fv()")
del_items(0x8009513C)
SetType(0x8009513C, "void OVR_LoadMemcard__Fv()")
del_items(0x80095168)
SetType(0x80095168, "void ClearOutOverlays__Fv()")
del_items(0x800951C0)
SetType(0x800951C0, "void ClearOut__7Overlay(struct Overlay *this)")
del_items(0x80095284)
SetType(0x80095284, "void Load__7Overlay(struct Overlay *this)")
del_items(0x800952E0)
SetType(0x800952E0, "enum OVER_TYPE OVR_GetCurrentOverlay__Fv()")
del_items(0x800952EC)
SetType(0x800952EC, "void LoadOver__FR7Overlay(struct Overlay *Ovr)")
del_items(0x80095340)
SetType(0x80095340, "void _GLOBAL__I_OVR_Open__Fv()")
del_items(0x800954B0)
SetType(0x800954B0, "enum OVER_TYPE GetOverType__7Overlay(struct Overlay *this)")
del_items(0x800954BC)
SetType(0x800954BC, "void StevesDummyPoll__Fv()")
del_items(0x800954C4)
SetType(0x800954C4, "void Lambo__Fv()")
del_items(0x800954CC)
SetType(0x800954CC, "struct CPlayer *__7CPlayerbii(struct CPlayer *this, bool Town, int mPlayerNum, int NewNumOfPlayers)")
del_items(0x80095624)
SetType(0x80095624, "void ___7CPlayer(struct CPlayer *this, int __in_chrg)")
del_items(0x800956B4)
SetType(0x800956B4, "void Load__7CPlayeri(struct CPlayer *this, int Id)")
del_items(0x80095720)
SetType(0x80095720, "void SetScrollTarget__7CPlayerR12PlayerStructR7CBlocks(struct CPlayer *this, struct PlayerStruct *Plr, struct CBlocks *Bg)")
del_items(0x80095B04)
SetType(0x80095B04, "void Print__7CPlayerR12PlayerStructR7CBlocks(struct CPlayer *this, struct PlayerStruct *Plr, struct CBlocks *Bg)")
del_items(0x8009603C)
SetType(0x8009603C, "int FindAction__7CPlayerR12PlayerStruct(struct CPlayer *this, struct PlayerStruct *Plr)")
del_items(0x800960C0)
SetType(0x800960C0, "enum PACTION FindActionEnum__7CPlayerR12PlayerStruct(struct CPlayer *this, struct PlayerStruct *Plr)")
del_items(0x80096144)
SetType(0x80096144, "void Init__7CPlayer(struct CPlayer *this)")
del_items(0x8009614C)
SetType(0x8009614C, "void Dump__7CPlayer(struct CPlayer *this)")
del_items(0x80096154)
SetType(0x80096154, "void LoadThis__7CPlayeri(struct CPlayer *this, int Id)")
del_items(0x800961C4)
SetType(0x800961C4, "void NonBlockingLoadNewGFX__7CPlayeri(struct CPlayer *this, int Id)")
del_items(0x80096230)
SetType(0x80096230, "void FilthyTask__FP4TASK(struct TASK *T)")
del_items(0x800962B8)
SetType(0x800962B8, "void PRIM_GetPrim__FPP8POLY_FT4_addr_800962B8(struct POLY_FT4 **Prim)")
del_items(0x80096334)
SetType(0x80096334, "struct POLY_FT4 *PRIM_GetCopy__FP8POLY_FT4_addr_80096334(struct POLY_FT4 *Prim)")
del_items(0x80096370)
SetType(0x80096370, "void PRIM_CopyPrim__FP8POLY_FT4T0_addr_80096370(struct POLY_FT4 *Dest, struct POLY_FT4 *Source)")
del_items(0x80096398)
SetType(0x80096398, "int GetDatMaxSize__7CPlayer(struct CPlayer *this)")
del_items(0x800963D8)
SetType(0x800963D8, "int GetOtPos__7CBlocksi_addr_800963D8(struct CBlocks *this, int LogicalY)")
del_items(0x80096414)
SetType(0x80096414, "void SetDecompArea__7TextDatiiii(struct TextDat *this, int nDecX, int nDecY, int nPalX, int nPalY)")
del_items(0x8009642C)
SetType(0x8009642C, "int GetNumOfFrames__7TextDatii_addr_8009642C(struct TextDat *this, int Creature, int Action)")
del_items(0x80096464)
SetType(0x80096464, "int GetNumOfActions__7TextDati_addr_80096464(struct TextDat *this, int Creature)")
del_items(0x80096488)
SetType(0x80096488, "struct CCreatureHdr *GetCreature__7TextDati_addr_80096488(struct TextDat *this, int Creature)")
del_items(0x800964A4)
SetType(0x800964A4, "void SetFileInfo__7TextDatPC13CTextFileInfoi_addr_800964A4(struct TextDat *this, struct CTextFileInfo *NewInfo, int NewTexNum)")
del_items(0x800964B0)
SetType(0x800964B0, "void PROF_Open__Fv()")
del_items(0x800964F0)
SetType(0x800964F0, "bool PROF_State__Fv()")
del_items(0x800964FC)
SetType(0x800964FC, "void PROF_On__Fv()")
del_items(0x8009650C)
SetType(0x8009650C, "void PROF_Off__Fv()")
del_items(0x80096518)
SetType(0x80096518, "void PROF_CpuEnd__Fv()")
del_items(0x80096548)
SetType(0x80096548, "void PROF_CpuStart__Fv()")
del_items(0x8009656C)
SetType(0x8009656C, "void PROF_DrawStart__Fv()")
del_items(0x80096590)
SetType(0x80096590, "void PROF_DrawEnd__Fv()")
del_items(0x800965C0)
SetType(0x800965C0, "void PROF_Draw__FPUl(unsigned long *Ot)")
del_items(0x800967B4)
SetType(0x800967B4, "void PROF_Restart__Fv()")
del_items(0x800967D4)
SetType(0x800967D4, "void PSX_WndProc__FUilUl(unsigned int Msg, long wParam, unsigned long lParam)")
del_items(0x80096B58)
SetType(0x80096B58, "void PSX_PostWndProc__FUilUl(unsigned int Msg, long wParam, unsigned long lParam)")
del_items(0x80096C10)
SetType(0x80096C10, "void GoSetLevel__Fv()")
del_items(0x80096CA4)
SetType(0x80096CA4, "void GoBackLevel__Fv()")
del_items(0x80096D00)
SetType(0x80096D00, "void GoWarpLevel__Fv()")
del_items(0x80096D2C)
SetType(0x80096D2C, "void PostLoadGame__Fv()")
del_items(0x80096DA4)
SetType(0x80096DA4, "void GoLoadGame__Fv()")
del_items(0x80096EFC)
SetType(0x80096EFC, "void PostNewLevel__Fv()")
del_items(0x80096FB0)
SetType(0x80096FB0, "void GoNewLevel__Fv()")
del_items(0x80096FF8)
SetType(0x80096FF8, "void PostGoBackLevel__Fv()")
del_items(0x800970A4)
SetType(0x800970A4, "void GoForwardLevel__Fv()")
del_items(0x800970F8)
SetType(0x800970F8, "void PostGoForwardLevel__Fv()")
del_items(0x800971A4)
SetType(0x800971A4, "void GoNewGame__Fv()")
del_items(0x800971C8)
SetType(0x800971C8, "void PostNewGame__Fv()")
del_items(0x800971F0)
SetType(0x800971F0, "void LevelToLevelInit__Fv()")
del_items(0x80097240)
SetType(0x80097240, "unsigned int GetPal__6GPaneli(struct GPanel *this, int Frm)")
del_items(0x80097284)
SetType(0x80097284, "struct GPanel *__6GPaneli(struct GPanel *this, int Ofs)")
del_items(0x800972E8)
SetType(0x800972E8, "void DrawFlask__6GPanelP7PanelXYP12PlayerStruct(struct GPanel *this, struct PanelXY *XY, struct PlayerStruct *Plr)")
del_items(0x8009775C)
SetType(0x8009775C, "unsigned char SpdTrimCol__Fs(short col)")
del_items(0x80097794)
SetType(0x80097794, "void DrawSpeedBar__6GPanelP7PanelXYP12PlayerStruct(struct GPanel *this, struct PanelXY *XY, struct PlayerStruct *Plr)")
del_items(0x80097EC0)
SetType(0x80097EC0, "void DrawSpell__6GPanelP7PanelXYP12PlayerStruct(struct GPanel *this, struct PanelXY *XY, struct PlayerStruct *Plr)")
del_items(0x8009805C)
SetType(0x8009805C, "void DrawMsgWindow__6GPanelP7PanelXYP12PlayerStruct(struct GPanel *this, struct PanelXY *XY, struct PlayerStruct *Plr)")
del_items(0x800980AC)
SetType(0x800980AC, "int DrawDurThingy__6GPaneliiP10ItemStructi(struct GPanel *this, int X, int Y, struct ItemStruct *Item, int ItemType)")
del_items(0x80098378)
SetType(0x80098378, "void DrawDurIcon__6GPanelP7PanelXYP12PlayerStruct(struct GPanel *this, struct PanelXY *XY, struct PlayerStruct *Plr)")
del_items(0x800984A4)
SetType(0x800984A4, "void Print__6GPanelP7PanelXYP12PlayerStruct(struct GPanel *this, struct PanelXY *XY, struct PlayerStruct *Plr)")
del_items(0x800985BC)
SetType(0x800985BC, "int GetMaxOtPos__7CBlocks_addr_800985BC()")
del_items(0x800985C4)
SetType(0x800985C4, "struct PAL *GetPal__7TextDati_addr_800985C4(struct TextDat *this, int PalNum)")
del_items(0x800985E0)
SetType(0x800985E0, "struct FRAME_HDR *GetFr__7TextDati_addr_800985E0(struct TextDat *this, int FrNum)")
del_items(0x800985FC)
SetType(0x800985FC, "void PrintCDWaitTask__FP4TASK(struct TASK *T)")
del_items(0x80098738)
SetType(0x80098738, "void InitCDWaitIcon__Fv()")
del_items(0x8009876C)
SetType(0x8009876C, "void STR_Debug__FP6SFXHDRPce(struct SFXHDR *sfh, char *e)")
del_items(0x80098780)
SetType(0x80098780, "void STR_SystemTask__FP4TASK(struct TASK *T)")
del_items(0x800987B0)
SetType(0x800987B0, "void STR_AllocBuffer__Fv()")
del_items(0x800987E8)
SetType(0x800987E8, "void STR_Init__Fv()")
del_items(0x80098914)
SetType(0x80098914, "struct SFXHDR *STR_InitStream__Fc(char flag)")
del_items(0x80098A3C)
SetType(0x80098A3C, "struct SFXHDR *STR_PlaySound__FUscic(unsigned short Name, char flag, int volume, char loop)")
del_items(0x80098C84)
SetType(0x80098C84, "void STR_setvolume__FP6SFXHDR(struct SFXHDR *sfh)")
del_items(0x80098D50)
SetType(0x80098D50, "void STR_setpitch__FP6SFXHDR(struct SFXHDR *sfh)")
del_items(0x80098D9C)
SetType(0x80098D9C, "void STR_PlaySFX__FP6SFXHDR(struct SFXHDR *sfh)")
del_items(0x80098EA8)
SetType(0x80098EA8, "void STR_pauseall__Fv()")
del_items(0x80098F1C)
SetType(0x80098F1C, "void STR_resumeall__Fv()")
del_items(0x80098F90)
SetType(0x80098F90, "void STR_CloseStream__FP6SFXHDR(struct SFXHDR *sfh)")
del_items(0x80098FFC)
SetType(0x80098FFC, "void STR_SoundCommand__FP6SFXHDRi(struct SFXHDR *sfh, int Command)")
del_items(0x800990E8)
SetType(0x800990E8, "char STR_Command__FP6SFXHDR(struct SFXHDR *sfh)")
del_items(0x800992D8)
SetType(0x800992D8, "void STR_DMAControl__FP6SFXHDR(struct SFXHDR *sfh)")
del_items(0x800993A0)
SetType(0x800993A0, "void STR_PlayStream__FP6SFXHDRPUci(struct SFXHDR *sfh, unsigned char *Src, int size)")
del_items(0x80099620)
SetType(0x80099620, "void STR_AsyncWeeTASK__FP4TASK(struct TASK *T)")
del_items(0x800998F8)
SetType(0x800998F8, "void STR_AsyncTASK__FP4TASK(struct TASK *T)")
del_items(0x80099CE0)
SetType(0x80099CE0, "void STR_StreamMainTask__FP6SFXHDRc(struct SFXHDR *sfh, char FileType)")
del_items(0x80099E0C)
SetType(0x80099E0C, "void SND_Monitor__FP4TASK(struct TASK *T)")
del_items(0x80099E98)
SetType(0x80099E98, "void SPU_OnceOnlyInit__Fv()")
del_items(0x80099ED0)
SetType(0x80099ED0, "void SPU_Init__Fv()")
del_items(0x80099FD8)
SetType(0x80099FD8, "int SND_FindChannel__Fv()")
del_items(0x8009A044)
SetType(0x8009A044, "void SND_ClearBank__Fv()")
del_items(0x8009A0B4)
SetType(0x8009A0B4, "bool SndLoadCallBack__FPUciib(unsigned char *Mem, int ReadSoFar, int Size, bool LastChunk)")
del_items(0x8009A12C)
SetType(0x8009A12C, "void SND_LoadBank__Fi(int lvlnum)")
del_items(0x8009A250)
SetType(0x8009A250, "int SND_FindSFX__FUs(unsigned short Name)")
del_items(0x8009A32C)
SetType(0x8009A32C, "void SND_StopSnd__Fi(int voice)")
del_items(0x8009A360)
SetType(0x8009A360, "bool SND_IsSfxPlaying__Fi(int SFXNo)")
del_items(0x8009A39C)
SetType(0x8009A39C, "int SND_RemapSnd__Fi(int SFXNo)")
del_items(0x8009A410)
SetType(0x8009A410, "int SND_PlaySnd__FUsiii(unsigned short Name, int vol, int pan, int pitchadj)")
del_items(0x8009A628)
SetType(0x8009A628, "void AS_CallBack0__Fi(int ah)")
del_items(0x8009A694)
SetType(0x8009A694, "void AS_CallBack1__Fi(int ah)")
del_items(0x8009A700)
SetType(0x8009A700, "void AS_WasLastBlock__FiP6STRHDRP6SFXHDR(int ah, struct STRHDR *sh, struct SFXHDR *sfh)")
del_items(0x8009A7C8)
SetType(0x8009A7C8, "int AS_OpenStream__FP6STRHDRP6SFXHDR(struct STRHDR *sh, struct SFXHDR *sfh)")
del_items(0x8009A868)
SetType(0x8009A868, "char AS_GetBlock__FP6SFXHDR(struct SFXHDR *sfh)")
del_items(0x8009A898)
SetType(0x8009A898, "void AS_CloseStream__FP6STRHDRP6SFXHDR(struct STRHDR *sh, struct SFXHDR *sfh)")
del_items(0x8009A8EC)
SetType(0x8009A8EC, "unsigned short SCR_GetBlackClut__Fv()")
del_items(0x8009A8F8)
SetType(0x8009A8F8, "void SCR_Open__Fv()")
del_items(0x8009A930)
SetType(0x8009A930, "void SCR_DumpClut__Fv()")
del_items(0x8009A9A4)
SetType(0x8009A9A4, "unsigned short SCR_NeedHighlightPal__FUsUsi(unsigned short Clut, unsigned short PixVal, int NumOfCols)")
del_items(0x8009A9D8)
SetType(0x8009A9D8, "void Init__13PalCollectionPC7InitPos(struct PalCollection *this, struct InitPos *IPos)")
del_items(0x8009AA68)
SetType(0x8009AA68, "struct PalEntry *FindPal__13PalCollectionUsUsi(struct PalCollection *this, unsigned short SourceClut, unsigned short PixVal, int NumOfCols)")
del_items(0x8009AB44)
SetType(0x8009AB44, "struct PalEntry *NewPal__13PalCollectionUsUsi(struct PalCollection *this, unsigned short SourceClut, unsigned short PixVal, int NumOfCols)")
del_items(0x8009ABC4)
SetType(0x8009ABC4, "void MakePal__8PalEntryUsUsi(struct PalEntry *this, unsigned short _SourceClut, unsigned short _PixVal, int _NumOfCols)")
del_items(0x8009AC64)
SetType(0x8009AC64, "unsigned short GetHighlightPal__13PalCollectionUsUsi(struct PalCollection *this, unsigned short SourceClut, unsigned short PixVal, int NumOfCols)")
del_items(0x8009ACAC)
SetType(0x8009ACAC, "void UpdatePals__13PalCollection(struct PalCollection *this)")
del_items(0x8009AD20)
SetType(0x8009AD20, "void SCR_Handler__Fv()")
del_items(0x8009AD48)
SetType(0x8009AD48, "int GetNumOfObjs__t10Collection2Z8PalEntryi20(struct t10Collection2Z8PalEntryi20 *this)")
del_items(0x8009AD50)
SetType(0x8009AD50, "struct PalEntry *GetObj__t10Collection2Z8PalEntryi20(struct t10Collection2Z8PalEntryi20 *this)")
del_items(0x8009AD8C)
SetType(0x8009AD8C, "void Init__t10Collection2Z8PalEntryi20(struct t10Collection2Z8PalEntryi20 *this)")
del_items(0x8009ADF0)
SetType(0x8009ADF0, "void MoveFromUsedToUnused__t10Collection2Z8PalEntryi20P8PalEntry(struct t10Collection2Z8PalEntryi20 *this, struct PalEntry *RetObj)")
del_items(0x8009AE48)
SetType(0x8009AE48, "void MoveFromUnusedToUsed__t10Collection2Z8PalEntryi20P8PalEntry(struct t10Collection2Z8PalEntryi20 *this, struct PalEntry *RetObj)")
del_items(0x8009AEA0)
SetType(0x8009AEA0, "void Set__8PalEntryUsUsi(struct PalEntry *this, unsigned short _SourceClut, unsigned short _PixVal, int _NumOfCols)")
del_items(0x8009AEB4)
SetType(0x8009AEB4, "void Set__8PalEntryRC7InitPos(struct PalEntry *this, struct InitPos *NewPos)")
del_items(0x8009AEE0)
SetType(0x8009AEE0, "bool SetJustUsed__8PalEntryb(struct PalEntry *this, bool NewVal)")
del_items(0x8009AEE8)
SetType(0x8009AEE8, "void Init__8PalEntry(struct PalEntry *this)")
del_items(0x8009AEF0)
SetType(0x8009AEF0, "unsigned short GetClut__C8PalEntry(struct PalEntry *this)")
del_items(0x8009AEFC)
SetType(0x8009AEFC, "bool IsEqual__C8PalEntryUsUsi(struct PalEntry *this, unsigned short _SourceClut, unsigned short _PixVal, int _NumOfCols)")
del_items(0x8009AF34)
SetType(0x8009AF34, "struct PalEntry *GetNext__Ct11TLinkedList1Z8PalEntry(struct t11TLinkedList1Z8PalEntry *this)")
del_items(0x8009AF40)
SetType(0x8009AF40, "void AddToList__t11TLinkedList1Z8PalEntryPP8PalEntry(struct t11TLinkedList1Z8PalEntry *this, struct PalEntry **Head)")
del_items(0x8009AF60)
SetType(0x8009AF60, "void DetachFromList__t11TLinkedList1Z8PalEntryPP8PalEntry(struct t11TLinkedList1Z8PalEntry *this, struct PalEntry **Head)")
del_items(0x8009AFAC)
SetType(0x8009AFAC, "void stub__FPcPv(char *e, void *argptr)")
del_items(0x8009AFB4)
SetType(0x8009AFB4, "void new_eprint__FPcT0i(char *Text, char *File, int Line)")
del_items(0x8009AFE8)
SetType(0x8009AFE8, "void TonysGameTask__FP4TASK(struct TASK *T)")
del_items(0x8009B070)
SetType(0x8009B070, "void SetAmbientLight__Fv()")
del_items(0x8009B130)
SetType(0x8009B130, "void SetDemoPlayer__Fv()")
del_items(0x8009B160)
SetType(0x8009B160, "void print_demo_task__FP4TASK(struct TASK *T)")
del_items(0x8009B4A0)
SetType(0x8009B4A0, "void TonysDummyPoll__Fv()")
del_items(0x8009B4CC)
SetType(0x8009B4CC, "void SetTonyPoll__Fv()")
del_items(0x8009B4D8)
SetType(0x8009B4D8, "void ClearTonyPoll__Fv()")
del_items(0x8009B4E4)
SetType(0x8009B4E4, "void load_demo_pad_data__FUl(unsigned long demo_num)")
del_items(0x8009B544)
SetType(0x8009B544, "void save_demo_pad_data__FUl(unsigned long demo_num)")
del_items(0x8009B5A4)
SetType(0x8009B5A4, "void set_pad_record_play__Fi(int level)")
del_items(0x8009B618)
SetType(0x8009B618, "void start_demo__Fv()")
del_items(0x8009B628)
SetType(0x8009B628, "void SetQuest__Fv()")
del_items(0x8009B630)
SetType(0x8009B630, "void DrawManaShield__FP12PlayerStruct(struct PlayerStruct *ptrplr)")
del_items(0x8009B638)
SetType(0x8009B638, "void ManaTask__FP4TASK(struct TASK *T)")
del_items(0x8009B640)
SetType(0x8009B640, "void tony__Fv()")
del_items(0x8009B680)
SetType(0x8009B680, "void GLUE_SetMonsterList__Fi(int List)")
del_items(0x8009B68C)
SetType(0x8009B68C, "int GLUE_GetMonsterList__Fv()")
del_items(0x8009B698)
SetType(0x8009B698, "void GLUE_SuspendGame__Fv()")
del_items(0x8009B6EC)
SetType(0x8009B6EC, "void GLUE_ResumeGame__Fv()")
del_items(0x8009B740)
SetType(0x8009B740, "void GLUE_PreTown__Fv()")
del_items(0x8009B770)
SetType(0x8009B770, "void GLUE_PreDun__Fv()")
del_items(0x8009B778)
SetType(0x8009B778, "bool GLUE_Finished__Fv()")
del_items(0x8009B784)
SetType(0x8009B784, "void GLUE_SetFinished__Fb(bool NewFinished)")
del_items(0x8009B790)
SetType(0x8009B790, "void GLUE_StartBg__Fibi(int TextId, bool IsTown, int Level)")
del_items(0x8009B7F8)
SetType(0x8009B7F8, "bool GLUE_SetShowGameScreenFlag__Fb(bool NewFlag)")
del_items(0x8009B808)
SetType(0x8009B808, "bool GLUE_GetShowGameScreenFlag__Fv()")
del_items(0x8009B814)
SetType(0x8009B814, "bool GLUE_SetHomingScrollFlag__Fb(bool NewFlag)")
del_items(0x8009B824)
SetType(0x8009B824, "bool GLUE_SetShowPanelFlag__Fb(bool NewFlag)")
del_items(0x8009B834)
SetType(0x8009B834, "bool GLUE_HasGameStarted__Fv()")
del_items(0x8009B840)
SetType(0x8009B840, "void DoShowPanelGFX__FP6GPanelT0(struct GPanel *P1, struct GPanel *P2)")
del_items(0x8009B918)
SetType(0x8009B918, "void GLUE_DoQuake__Fii(int Time, int Amount)")
del_items(0x8009B928)
SetType(0x8009B928, "void BgTask__FP4TASK(struct TASK *T)")
del_items(0x8009BDD4)
SetType(0x8009BDD4, "struct PInf *FindPlayerChar__FPc(char *Id)")
del_items(0x8009BE6C)
SetType(0x8009BE6C, "struct PInf *FindPlayerChar__Fiii(int Char, int Wep, int Arm)")
del_items(0x8009BEC8)
SetType(0x8009BEC8, "struct PInf *FindPlayerChar__FP12PlayerStruct(struct PlayerStruct *P)")
del_items(0x8009BEF8)
SetType(0x8009BEF8, "int FindPlayerChar__FP12PlayerStructb(struct PlayerStruct *P, bool InTown)")
del_items(0x8009BFC4)
SetType(0x8009BFC4, "void MakeSurePlayerDressedProperly__FR7CPlayerR12PlayerStructbT2(struct CPlayer *Player, struct PlayerStruct *Plr, bool InTown, bool Blocking)")
del_items(0x8009C03C)
SetType(0x8009C03C, "struct MonstList *GLUE_GetCurrentList__Fi(int Level)")
del_items(0x8009C0E8)
SetType(0x8009C0E8, "void GLUE_StartGameExit__Fv()")
del_items(0x8009C154)
SetType(0x8009C154, "void GLUE_Init__Fv()")
del_items(0x8009C15C)
SetType(0x8009C15C, "int GetTexId__7CPlayer(struct CPlayer *this)")
del_items(0x8009C168)
SetType(0x8009C168, "void SetTown__7CBlocksb(struct CBlocks *this, bool Val)")
del_items(0x8009C170)
SetType(0x8009C170, "void MoveToScrollTarget__7CBlocks(struct CBlocks *this)")
del_items(0x8009C184)
SetType(0x8009C184, "void SetDemoKeys__FPi(int *buffer)")
del_items(0x8009C25C)
SetType(0x8009C25C, "void RestoreDemoKeys__FPi(int *buffer)")
del_items(0x8009C2EC)
SetType(0x8009C2EC, "char *get_action_str__Fii(int pval, int combo)")
del_items(0x8009C364)
SetType(0x8009C364, "int get_key_pad__Fi(int n)")
del_items(0x8009C39C)
SetType(0x8009C39C, "bool checkvalid__Fv()")
del_items(0x8009C400)
SetType(0x8009C400, "bool RemoveCtrlScreen__Fv()")
del_items(0x8009C45C)
SetType(0x8009C45C, "unsigned char Init_ctrl_pos__Fv()")
del_items(0x8009C514)
SetType(0x8009C514, "int remove_padval__Fi(int p)")
del_items(0x8009C554)
SetType(0x8009C554, "int remove_comboval__Fib(int p, bool all)")
del_items(0x8009C59C)
SetType(0x8009C59C, "unsigned char set_buttons__Fii(int cline, int n)")
del_items(0x8009C714)
SetType(0x8009C714, "void restore_controller_settings__F8CTRL_SET(enum CTRL_SET s)")
del_items(0x8009C7B8)
SetType(0x8009C7B8, "bool only_one_button__Fi(int p)")
del_items(0x8009C7E4)
SetType(0x8009C7E4, "int SwapJap__Fi(int p)")
del_items(0x8009C7EC)
SetType(0x8009C7EC, "unsigned char main_ctrl_setup__Fv()")
del_items(0x8009CCD0)
SetType(0x8009CCD0, "void PrintCtrlString__FiiUcic(int x, int y, unsigned char cjustflag, int str_num, int col)")
del_items(0x8009D224)
SetType(0x8009D224, "void DrawCtrlSetup__Fv()")
del_items(0x8009D724)
SetType(0x8009D724, "void _GLOBAL__D_ctrlflag()")
del_items(0x8009D74C)
SetType(0x8009D74C, "void _GLOBAL__I_ctrlflag()")
del_items(0x8009D774)
SetType(0x8009D774, "unsigned short GetTick__C4CPad(struct CPad *this)")
del_items(0x8009D79C)
SetType(0x8009D79C, "unsigned short GetDown__C4CPad_addr_8009D79C(struct CPad *this)")
del_items(0x8009D7C4)
SetType(0x8009D7C4, "unsigned short GetUp__C4CPad(struct CPad *this)")
del_items(0x8009D7EC)
SetType(0x8009D7EC, "unsigned short GetCur__C4CPad_addr_8009D7EC(struct CPad *this)")
del_items(0x8009D814)
SetType(0x8009D814, "void SetPadTickMask__4CPadUs(struct CPad *this, unsigned short mask)")
del_items(0x8009D81C)
SetType(0x8009D81C, "void SetPadTick__4CPadUs(struct CPad *this, unsigned short tick)")
del_items(0x8009D824)
SetType(0x8009D824, "void SetRGB__6DialogUcUcUc_addr_8009D824(struct Dialog *this, unsigned char R, unsigned char G, unsigned char B)")
del_items(0x8009D844)
SetType(0x8009D844, "void SetBorder__6Dialogi_addr_8009D844(struct Dialog *this, int Type)")
del_items(0x8009D84C)
SetType(0x8009D84C, "void ___6Dialog_addr_8009D84C(struct Dialog *this, int __in_chrg)")
del_items(0x8009D874)
SetType(0x8009D874, "struct Dialog *__6Dialog_addr_8009D874(struct Dialog *this)")
del_items(0x8009D8F4)
SetType(0x8009D8F4, "int GetOverlayOtBase__7CBlocks_addr_8009D8F4()")
del_items(0x8009D8FC)
SetType(0x8009D8FC, "void color_cycle__FP4TASK(struct TASK *T)")
del_items(0x8009DCBC)
SetType(0x8009DCBC, "void penta_cycle_task__FP4TASK(struct TASK *T)")
del_items(0x8009DE3C)
SetType(0x8009DE3C, "void DrawFlameLogo__Fv()")
del_items(0x8009DFEC)
SetType(0x8009DFEC, "void TitleScreen__FP7CScreen(struct CScreen *FeScreen)")
del_items(0x8009E040)
SetType(0x8009E040, "void DaveLDummyPoll__Fv()")
del_items(0x8009E048)
SetType(0x8009E048, "void DaveL__Fv()")
del_items(0x8009E070)
SetType(0x8009E070, "void DoReflection__FP8POLY_FT4iii(struct POLY_FT4 *Ft4, int R, int G, int B)")
del_items(0x8009E3B0)
SetType(0x8009E3B0, "void mteleportfx__Fv()")
del_items(0x8009E6C4)
SetType(0x8009E6C4, "void invistimer__Fv()")
del_items(0x8009E79C)
SetType(0x8009E79C, "void setUVparams__FP8POLY_FT4P9FRAME_HDR(struct POLY_FT4 *Ft4, struct FRAME_HDR *Fr)")
del_items(0x8009E82C)
SetType(0x8009E82C, "void drawparticle__Fiiiiii(int x, int y, int scale, int anim, int colour, int OtPos)")
del_items(0x8009EA24)
SetType(0x8009EA24, "void drawpolyF4__Fiiiiii(int x, int y, int w, int h, int colour, int OtPos)")
del_items(0x8009EB58)
SetType(0x8009EB58, "void drawpolyG4__Fiiiiiiii(int x, int y, int w, int h1, int h2, int colour0, int colour1, int OtPos)")
del_items(0x8009ED28)
SetType(0x8009ED28, "void particlejump__Fii(int ScrX, int ScrY)")
del_items(0x8009EEF8)
SetType(0x8009EEF8, "void doparticlejump__Fv()")
del_items(0x8009F08C)
SetType(0x8009F08C, "void StartPartJump__Fiiiii(int mi, int height, int scale, int colour, int OtPos)")
del_items(0x8009F1E0)
SetType(0x8009F1E0, "void MonstPartJump__Fi(int m)")
del_items(0x8009F300)
SetType(0x8009F300, "void doparticlechain__Fiiiiiiiiiiii(int sx, int sy, int dx, int dy, int count, int scale, int scaledec, int semitrans, int randomize, int colour, int OtPos, int source)")
del_items(0x8009F650)
SetType(0x8009F650, "void ParticleMissile__FP13MissileStructiiii(struct MissileStruct *Ms, int ScrX, int ScrY, int colour, int OtPos)")
del_items(0x8009F70C)
SetType(0x8009F70C, "void Teleportfx__Fiiiiiiii(int scrnx, int scrny, int width, int height, int scale, int colmask, int numpart, int OtPos)")
del_items(0x8009FA0C)
SetType(0x8009FA0C, "void ResurrectFX__Fiiii(int x, int height, int scale, int OtPos)")
del_items(0x8009FC34)
SetType(0x8009FC34, "void ParticleExp__FP13MissileStructiiii(struct MissileStruct *Ms, int ScrX, int ScrY, int colour, int OtPos)")
del_items(0x8009FCCC)
SetType(0x8009FCCC, "void GetPlrPos__11SPELLFX_DATP12PlayerStruct(struct SPELLFX_DAT *this, struct PlayerStruct *ptrplr)")
del_items(0x8009FDF0)
SetType(0x8009FDF0, "void healFX__Fv()")
del_items(0x8009FF2C)
SetType(0x8009FF2C, "void HealStart__Fi(int plr)")
del_items(0x8009FF60)
SetType(0x8009FF60, "void HealotherStart__Fi(int plr)")
del_items(0x8009FF98)
SetType(0x8009FF98, "void TeleStart__Fi(int plr)")
del_items(0x800A0058)
SetType(0x800A0058, "void TeleStop__Fi(int plr)")
del_items(0x800A0084)
SetType(0x800A0084, "void PhaseStart__Fi(int plr)")
del_items(0x800A00B8)
SetType(0x800A00B8, "void PhaseEnd__Fi(int plr)")
del_items(0x800A00E4)
SetType(0x800A00E4, "void ApocInit__11SPELLFX_DATP12PlayerStruct(struct SPELLFX_DAT *this, struct PlayerStruct *ptrplr)")
del_items(0x800A02CC)
SetType(0x800A02CC, "void ApocaStart__Fi(int plr)")
del_items(0x800A0330)
SetType(0x800A0330, "void DaveLTask__FP4TASK(struct TASK *T)")
del_items(0x800A0400)
SetType(0x800A0400, "void PRIM_GetPrim__FPP7POLY_G4(struct POLY_G4 **Prim)")
del_items(0x800A047C)
SetType(0x800A047C, "void PRIM_GetPrim__FPP7POLY_F4(struct POLY_F4 **Prim)")
del_items(0x800A04F8)
SetType(0x800A04F8, "void PRIM_GetPrim__FPP8POLY_FT4_addr_800A04F8(struct POLY_FT4 **Prim)")
del_items(0x800A0574)
SetType(0x800A0574, "struct CPlayer *GetPlayer__7CPlayeri(int PNum)")
del_items(0x800A05C4)
SetType(0x800A05C4, "int GetLastOtPos__C7CPlayer(struct CPlayer *this)")
del_items(0x800A05D0)
SetType(0x800A05D0, "int GetOtPos__7CBlocksi_addr_800A05D0(struct CBlocks *this, int LogicalY)")
del_items(0x800A060C)
SetType(0x800A060C, "struct FRAME_HDR *GetFr__7TextDati_addr_800A060C(struct TextDat *this, int FrNum)")
del_items(0x800A0628)
SetType(0x800A0628, "void SetQSpell__Fiii(int pnum, int Spell, int type)")
del_items(0x800A0648)
SetType(0x800A0648, "void release_spell__Fi(int pnum)")
del_items(0x800A06AC)
SetType(0x800A06AC, "void select_belt_item__Fi(int pnum)")
del_items(0x800A06B4)
SetType(0x800A06B4, "unsigned char any_belt_items__Fv()")
del_items(0x800A071C)
SetType(0x800A071C, "void get_last_inv__Fv()")
del_items(0x800A0848)
SetType(0x800A0848, "void get_next_inv__Fv()")
del_items(0x800A097C)
SetType(0x800A097C, "void pad_func_up__Fi(int pnum)")
del_items(0x800A09A8)
SetType(0x800A09A8, "void pad_func_down__Fi(int pnum)")
del_items(0x800A09D4)
SetType(0x800A09D4, "void pad_func_left__Fi(int pnum)")
del_items(0x800A09DC)
SetType(0x800A09DC, "void pad_func_right__Fi(int pnum)")
del_items(0x800A09E4)
SetType(0x800A09E4, "void pad_func_select__Fi(int pnum)")
del_items(0x800A0AA8)
SetType(0x800A0AA8, "void SetFindMonsterXY__FP12PlayerStructi(struct PlayerStruct *p, int i)")
del_items(0x800A0B38)
SetType(0x800A0B38, "void pad_func_Attack__Fi(int pnum)")
del_items(0x800A0FEC)
SetType(0x800A0FEC, "void pad_func_Action__Fi(int pnum)")
del_items(0x800A13A4)
SetType(0x800A13A4, "void InitTargetCursor__Fi(int pnum)")
del_items(0x800A13D8)
SetType(0x800A13D8, "void RemoveTargetCursor__Fi(int pnum)")
del_items(0x800A1420)
SetType(0x800A1420, "bool TargetingSpell__Fi(int sp)")
del_items(0x800A1468)
SetType(0x800A1468, "void pad_func_Cast_Spell__Fi(int pnum)")
del_items(0x800A185C)
SetType(0x800A185C, "void pad_func_Use_Item__Fi(int pnum)")
del_items(0x800A1A90)
SetType(0x800A1A90, "void pad_func_BeltList__Fi(int pnum)")
del_items(0x800A1BF8)
SetType(0x800A1BF8, "void pad_func_Chr__Fi(int pnum)")
del_items(0x800A1D2C)
SetType(0x800A1D2C, "void pad_func_Inv__Fi(int pnum)")
del_items(0x800A1E5C)
SetType(0x800A1E5C, "void pad_func_SplBook__Fi(int pnum)")
del_items(0x800A1F8C)
SetType(0x800A1F8C, "void pad_func_QLog__Fi(int pnum)")
del_items(0x800A2080)
SetType(0x800A2080, "void pad_func_SpellBook__Fi(int pnum)")
del_items(0x800A2158)
SetType(0x800A2158, "void pad_func_AutoMap__Fi(int pnum)")
del_items(0x800A2214)
SetType(0x800A2214, "void pad_func_Quick_Spell__Fi(int pnum)")
del_items(0x800A2388)
SetType(0x800A2388, "void check_inv__FiPci(int pnum, char *ilist, int entries)")
del_items(0x800A2608)
SetType(0x800A2608, "void pad_func_Quick_Use_Health__Fi(int pnum)")
del_items(0x800A2630)
SetType(0x800A2630, "void pad_func_Quick_Use_Mana__Fi(int pnum)")
del_items(0x800A2658)
SetType(0x800A2658, "bool sort_gold__Fi(int pnum)")
del_items(0x800A2760)
SetType(0x800A2760, "void DrawObjSelector__FiP12PlayerStruct(int pnum, struct PlayerStruct *player)")
del_items(0x800A2F68)
SetType(0x800A2F68, "bool SelectorActive__Fv()")
del_items(0x800A2F74)
SetType(0x800A2F74, "void DrawObjTask__FP4TASK(struct TASK *T)")
del_items(0x800A32B0)
SetType(0x800A32B0, "void add_area_find_object__Fiii(int index, int x, int y)")
del_items(0x800A3320)
SetType(0x800A3320, "unsigned char CheckRangeObject__Fiii(int x, int y, int distance)")
del_items(0x800A3698)
SetType(0x800A3698, "unsigned char CheckArea__FiiiUci(int xx, int yy, int range, unsigned char allflag, int pnum)")
del_items(0x800A3C80)
SetType(0x800A3C80, "void PlacePlayer__FiiiUc(int pnum, int x, int y, unsigned char do_current)")
del_items(0x800A3DF8)
SetType(0x800A3DF8, "void _GLOBAL__D_gplayer()")
del_items(0x800A3E20)
SetType(0x800A3E20, "void _GLOBAL__I_gplayer()")
del_items(0x800A3E48)
SetType(0x800A3E48, "void SetRGB__6DialogUcUcUc_addr_800A3E48(struct Dialog *this, unsigned char R, unsigned char G, unsigned char B)")
del_items(0x800A3E68)
SetType(0x800A3E68, "void SetBack__6Dialogi_addr_800A3E68(struct Dialog *this, int Type)")
del_items(0x800A3E70)
SetType(0x800A3E70, "void SetBorder__6Dialogi_addr_800A3E70(struct Dialog *this, int Type)")
del_items(0x800A3E78)
SetType(0x800A3E78, "void ___6Dialog_addr_800A3E78(struct Dialog *this, int __in_chrg)")
del_items(0x800A3EA0)
SetType(0x800A3EA0, "struct Dialog *__6Dialog_addr_800A3EA0(struct Dialog *this)")
del_items(0x800A3F20)
SetType(0x800A3F20, "bool Active__11SpellTarget(struct SpellTarget *this)")
del_items(0x800A3F2C)
SetType(0x800A3F2C, "int GetOverlayOtBase__7CBlocks_addr_800A3F2C()")
del_items(0x800A3F34)
SetType(0x800A3F34, "unsigned short GetDown__C4CPad_addr_800A3F34(struct CPad *this)")
del_items(0x800A3F5C)
SetType(0x800A3F5C, "unsigned short GetCur__C4CPad_addr_800A3F5C(struct CPad *this)")
del_items(0x800A3F84)
SetType(0x800A3F84, "void DEC_AddAsDecRequestor__FP7TextDat(struct TextDat *Td)")
del_items(0x800A4000)
SetType(0x800A4000, "void DEC_RemoveAsDecRequestor__FP7TextDat(struct TextDat *Td)")
del_items(0x800A4058)
SetType(0x800A4058, "void DEC_DoDecompRequests__Fv()")
del_items(0x800A40B4)
SetType(0x800A40B4, "int FindThisTd__FP7TextDat(struct TextDat *Td)")
del_items(0x800A40EC)
SetType(0x800A40EC, "int FindEmptyIndex__Fv()")
del_items(0x800A4124)
SetType(0x800A4124, "void MY_TSK_Sleep__Fi(int time)")
del_items(0x800A417C)
SetType(0x800A417C, "void UPDATEPROGRESS__Fi(int inc)")
del_items(0x800A4248)
SetType(0x800A4248, "bool IsGameLoading__Fv()")
del_items(0x800A4254)
SetType(0x800A4254, "void DrawCutScreen__Fi(int lev)")
del_items(0x800A4690)
SetType(0x800A4690, "void PutUpCutScreenTSK__FP4TASK(struct TASK *T)")
del_items(0x800A4758)
SetType(0x800A4758, "void PutUpCutScreen__Fi(int lev)")
del_items(0x800A48A8)
SetType(0x800A48A8, "void TakeDownCutScreen__Fv()")
del_items(0x800A494C)
SetType(0x800A494C, "void FinishBootProgress__Fv()")
del_items(0x800A49D8)
SetType(0x800A49D8, "void FinishProgress__Fv()")
del_items(0x800A4A38)
SetType(0x800A4A38, "void PRIM_GetPrim__FPP7POLY_G4_addr_800A4A38(struct POLY_G4 **Prim)")
del_items(0x800A4AB4)
SetType(0x800A4AB4, "void _GLOBAL__D_CutScr()")
del_items(0x800A4ADC)
SetType(0x800A4ADC, "void _GLOBAL__I_CutScr()")
del_items(0x800A4B04)
SetType(0x800A4B04, "void SetRGB__6DialogUcUcUc_addr_800A4B04(struct Dialog *this, unsigned char R, unsigned char G, unsigned char B)")
del_items(0x800A4B24)
SetType(0x800A4B24, "void SetBack__6Dialogi_addr_800A4B24(struct Dialog *this, int Type)")
del_items(0x800A4B2C)
SetType(0x800A4B2C, "void SetBorder__6Dialogi_addr_800A4B2C(struct Dialog *this, int Type)")
del_items(0x800A4B34)
SetType(0x800A4B34, "void ___6Dialog_addr_800A4B34(struct Dialog *this, int __in_chrg)")
del_items(0x800A4B5C)
SetType(0x800A4B5C, "struct Dialog *__6Dialog_addr_800A4B5C(struct Dialog *this)")
del_items(0x800A4BDC)
SetType(0x800A4BDC, "int GetOverlayOtBase__7CBlocks_addr_800A4BDC()")
del_items(0x800A4BE4)
SetType(0x800A4BE4, "void ___7CScreen(struct CScreen *this, int __in_chrg)")
del_items(0x800A4C04)
SetType(0x800A4C04, "void init_mem_card__FPFii_vUc(void (*handler)(), unsigned char read_dir)")
del_items(0x800A4E3C)
SetType(0x800A4E3C, "void memcard_event__Fii(int evt, int side)")
del_items(0x800A4E74)
SetType(0x800A4E74, "void init_card__Fib(int card_number, bool read_dir)")
del_items(0x800A4F40)
SetType(0x800A4F40, "int ping_card__Fi(int card_number)")
del_items(0x800A4FD4)
SetType(0x800A4FD4, "void DealWithCard__Fi(int side)")
del_items(0x800A5098)
SetType(0x800A5098, "void CardUpdateTask__FP4TASK(struct TASK *T)")
del_items(0x800A50EC)
SetType(0x800A50EC, "void MemcardON__Fv()")
del_items(0x800A5158)
SetType(0x800A5158, "void MemcardOFF__Fv()")
del_items(0x800A5190)
SetType(0x800A5190, "void CheckSavedOptions__Fv()")
del_items(0x800A5290)
SetType(0x800A5290, "void card_removed__Fi(int card_number)")
del_items(0x800A52C8)
SetType(0x800A52C8, "int read_card_block__Fii(int card_number, int block)")
del_items(0x800A5310)
SetType(0x800A5310, "int test_hw_event__Fv()")
del_items(0x800A5390)
SetType(0x800A5390, "void ActivateMemcard__Fii(int card1, int card2)")
del_items(0x800A53CC)
SetType(0x800A53CC, "void ActivateCharacterMemcard__Fii(int card1, int card2)")
del_items(0x800A5488)
SetType(0x800A5488, "void ShowCardActionText__Fv()")
del_items(0x800A576C)
SetType(0x800A576C, "int CountdownLoad__Fi(int Counter)")
del_items(0x800A597C)
SetType(0x800A597C, "int CountdownSave__Fi(int Counter)")
del_items(0x800A5A5C)
SetType(0x800A5A5C, "void ShowLoadingBox__Fi(int Text)")
del_items(0x800A5CE8)
SetType(0x800A5CE8, "void KillItemDead__Fiii(int pnum, int InvPos, int Idx)")
del_items(0x800A632C)
SetType(0x800A632C, "void DoRemoveSpellItems__Fii(int plrno, int item)")
del_items(0x800A6464)
SetType(0x800A6464, "void ClearLoadCharItems__Fv()")
del_items(0x800A6504)
SetType(0x800A6504, "void PantsDelay__Fv()")
del_items(0x800A6540)
SetType(0x800A6540, "void SetRGB__6DialogUcUcUc_addr_800A6540(struct Dialog *this, unsigned char R, unsigned char G, unsigned char B)")
del_items(0x800A6560)
SetType(0x800A6560, "void SetBack__6Dialogi_addr_800A6560(struct Dialog *this, int Type)")
del_items(0x800A6568)
SetType(0x800A6568, "void SetBorder__6Dialogi_addr_800A6568(struct Dialog *this, int Type)")
del_items(0x800A6570)
SetType(0x800A6570, "void ___6Dialog_addr_800A6570(struct Dialog *this, int __in_chrg)")
del_items(0x800A6598)
SetType(0x800A6598, "struct Dialog *__6Dialog_addr_800A6598(struct Dialog *this)")
del_items(0x800A6618)
SetType(0x800A6618, "int GetOverlayOtBase__7CBlocks_addr_800A6618()")
del_items(0x800A6620)
SetType(0x800A6620, "void PrintSelectBack__FUs(unsigned short Str)")
del_items(0x800A66B0)
SetType(0x800A66B0, "void DrawDialogBox__FiiP4RECTiiii(int e, int f, struct RECT *DRect, int X, int Y, int W, int H)")
del_items(0x800A6794)
SetType(0x800A6794, "void DrawSpinner__FiiUcUcUciiibiT8T8Uc(int x, int y, unsigned char SpinR, unsigned char SpinG, int SpinB, int spinradius, int spinbright, int angle, bool Sparkle, int OtPos, bool cross, bool iso, int SinStep)")
del_items(0x800A6E10)
SetType(0x800A6E10, "void SetLoadedLang__F9LANG_TYPE(enum LANG_TYPE LoadLang)")
del_items(0x800A6EC0)
SetType(0x800A6EC0, "void ChangeLang__Fv()")
del_items(0x800A6F84)
SetType(0x800A6F84, "void DrawLeftRight__Fv()")
del_items(0x800A6F8C)
SetType(0x800A6F8C, "void PrintMono__Fi(int ypos)")
del_items(0x800A7044)
SetType(0x800A7044, "void DrawMenu__Fi(int MenuNo)")
del_items(0x800A8064)
SetType(0x800A8064, "int who_pressed__Fi(int pval)")
del_items(0x800A80EC)
SetType(0x800A80EC, "void CharacterLoadPad__Fv()")
del_items(0x800A8640)
SetType(0x800A8640, "void MemcardPad__Fv()")
del_items(0x800A8F24)
SetType(0x800A8F24, "void SwitchMONO__Fv()")
del_items(0x800A8F70)
SetType(0x800A8F70, "void SoundPad__Fv()")
del_items(0x800A9978)
SetType(0x800A9978, "void CentrePad__Fv()")
del_items(0x800A9BBC)
SetType(0x800A9BBC, "void CalcVolumes__Fv()")
del_items(0x800A9D18)
SetType(0x800A9D18, "void SetLoadedVolumes__Fv()")
del_items(0x800A9DC8)
SetType(0x800A9DC8, "void GetVolumes__Fv()")
del_items(0x800A9E64)
SetType(0x800A9E64, "void AlterSpeedMenu__F9GM_SPEEDS(enum GM_SPEEDS gs)")
del_items(0x800A9EB8)
SetType(0x800A9EB8, "void GameSpeedPad__Fv()")
del_items(0x800A9FE0)
SetType(0x800A9FE0, "void DrawOptions__FP4TASK(struct TASK *T)")
del_items(0x800AA6DC)
SetType(0x800AA6DC, "void ToggleOptions__Fv()")
del_items(0x800AA884)
SetType(0x800AA884, "void FormatPad__Fv()")
del_items(0x800AAB24)
SetType(0x800AAB24, "void SaveOverwritePad__Fv()")
del_items(0x800AACC8)
SetType(0x800AACC8, "void CharCardSelectMemcardPad__Fv()")
del_items(0x800AAF10)
SetType(0x800AAF10, "void LAMBO_MovePad__FP4CPad(struct CPad *P)")
del_items(0x800AB0C0)
SetType(0x800AB0C0, "void PRIM_GetPrim__FPP7POLY_G4_addr_800AB0C0(struct POLY_G4 **Prim)")
del_items(0x800AB13C)
SetType(0x800AB13C, "unsigned short GetTick__C4CPad_addr_800AB13C(struct CPad *this)")
del_items(0x800AB164)
SetType(0x800AB164, "unsigned short GetDown__C4CPad_addr_800AB164(struct CPad *this)")
del_items(0x800AB18C)
SetType(0x800AB18C, "unsigned short GetUp__C4CPad_addr_800AB18C(struct CPad *this)")
del_items(0x800AB1B4)
SetType(0x800AB1B4, "void SetPadTickMask__4CPadUs_addr_800AB1B4(struct CPad *this, unsigned short mask)")
del_items(0x800AB1BC)
SetType(0x800AB1BC, "void SetPadTick__4CPadUs_addr_800AB1BC(struct CPad *this, unsigned short tick)")
del_items(0x800AB1C4)
SetType(0x800AB1C4, "void SetRGB__6DialogUcUcUc_addr_800AB1C4(struct Dialog *this, unsigned char R, unsigned char G, unsigned char B)")
del_items(0x800AB1E4)
SetType(0x800AB1E4, "void SetBack__6Dialogi_addr_800AB1E4(struct Dialog *this, int Type)")
del_items(0x800AB1EC)
SetType(0x800AB1EC, "void SetBorder__6Dialogi_addr_800AB1EC(struct Dialog *this, int Type)")
del_items(0x800AB1F4)
SetType(0x800AB1F4, "void ___6Dialog_addr_800AB1F4(struct Dialog *this, int __in_chrg)")
del_items(0x800AB21C)
SetType(0x800AB21C, "struct Dialog *__6Dialog_addr_800AB21C(struct Dialog *this)")
del_items(0x800AB29C)
SetType(0x800AB29C, "int GetOverlayOtBase__7CBlocks_addr_800AB29C()")
del_items(0x800AB2A4)
SetType(0x800AB2A4, "struct FRAME_HDR *GetFr__7TextDati_addr_800AB2A4(struct TextDat *this, int FrNum)")
del_items(0x800AB2C0)
SetType(0x800AB2C0, "void SetBirdFrig__Fb(bool f)")
del_items(0x800AB2F4)
SetType(0x800AB2F4, "unsigned char BirdDistanceOK__Fiiii(int WorldXa, int WorldYa, int WorldXb, int WorldYb)")
del_items(0x800AB34C)
SetType(0x800AB34C, "void AlterBirdPos__FP10BIRDSTRUCTUc(struct BIRDSTRUCT *b, unsigned char rnd)")
del_items(0x800AB4AC)
SetType(0x800AB4AC, "void BirdWorld__FP10BIRDSTRUCTii(struct BIRDSTRUCT *b, int wx, int wy)")
del_items(0x800AB528)
SetType(0x800AB528, "bool CheckDist__Fii(int x, int y)")
del_items(0x800AB610)
SetType(0x800AB610, "int BirdScared__FP10BIRDSTRUCT(struct BIRDSTRUCT *b)")
del_items(0x800AB73C)
SetType(0x800AB73C, "int GetPerch__FP10BIRDSTRUCT(struct BIRDSTRUCT *b)")
del_items(0x800AB790)
SetType(0x800AB790, "void BIRD_StartHop__FP10BIRDSTRUCT(struct BIRDSTRUCT *b)")
del_items(0x800AB970)
SetType(0x800AB970, "void BIRD_DoHop__FP10BIRDSTRUCT(struct BIRDSTRUCT *b)")
del_items(0x800ABA74)
SetType(0x800ABA74, "void BIRD_StartPerch__FP10BIRDSTRUCT(struct BIRDSTRUCT *b)")
del_items(0x800ABAE0)
SetType(0x800ABAE0, "void BIRD_DoPerch__FP10BIRDSTRUCT(struct BIRDSTRUCT *b)")
del_items(0x800ABB64)
SetType(0x800ABB64, "void BIRD_DoScatter__FP10BIRDSTRUCT(struct BIRDSTRUCT *b)")
del_items(0x800ABC10)
SetType(0x800ABC10, "void CheckDirOk__FP10BIRDSTRUCT(struct BIRDSTRUCT *b)")
del_items(0x800ABD20)
SetType(0x800ABD20, "void BIRD_StartScatter__FP10BIRDSTRUCT(struct BIRDSTRUCT *b)")
del_items(0x800ABDCC)
SetType(0x800ABDCC, "void BIRD_StartFly__FP10BIRDSTRUCT(struct BIRDSTRUCT *b)")
del_items(0x800ABE58)
SetType(0x800ABE58, "void BIRD_DoFly__FP10BIRDSTRUCT(struct BIRDSTRUCT *b)")
del_items(0x800AC15C)
SetType(0x800AC15C, "void BIRD_StartLanding__FP10BIRDSTRUCT(struct BIRDSTRUCT *b)")
del_items(0x800AC168)
SetType(0x800AC168, "void BIRD_DoLanding__FP10BIRDSTRUCT(struct BIRDSTRUCT *b)")
del_items(0x800AC1D4)
SetType(0x800AC1D4, "void PlaceFlock__FP10BIRDSTRUCT(struct BIRDSTRUCT *leader)")
del_items(0x800AC2CC)
SetType(0x800AC2CC, "void ProcessFlock__FP10BIRDSTRUCT(struct BIRDSTRUCT *b)")
del_items(0x800AC3BC)
SetType(0x800AC3BC, "void InitBird__Fv()")
del_items(0x800AC494)
SetType(0x800AC494, "void ProcessBird__Fv()")
del_items(0x800AC5D8)
SetType(0x800AC5D8, "int GetBirdFrame__FP10BIRDSTRUCT(struct BIRDSTRUCT *b)")
del_items(0x800AC670)
SetType(0x800AC670, "void bscale__FP8POLY_FT4i(struct POLY_FT4 *Ft4, int height)")
del_items(0x800AC7A0)
SetType(0x800AC7A0, "void doshadow__FP10BIRDSTRUCTii(struct BIRDSTRUCT *b, int x, int y)")
del_items(0x800AC8C8)
SetType(0x800AC8C8, "void DrawLBird__Fv()")
del_items(0x800ACAFC)
SetType(0x800ACAFC, "void PRIM_GetPrim__FPP8POLY_FT4_addr_800ACAFC(struct POLY_FT4 **Prim)")
del_items(0x800ACB78)
SetType(0x800ACB78, "int GetOtPos__7CBlocksi_addr_800ACB78(struct CBlocks *this, int LogicalY)")
del_items(0x800ACBB4)
SetType(0x800ACBB4, "short PlayFMV__FPcii(char *str, int w, int h)")
del_items(0x800ACD84)
SetType(0x800ACD84, "void play_movie(char *pszMovie)")
del_items(0x800ACE4C)
SetType(0x800ACE4C, "int GetTpY__FUs_addr_800ACE4C(unsigned short tpage)")
del_items(0x800ACE68)
SetType(0x800ACE68, "int GetTpX__FUs_addr_800ACE68(unsigned short tpage)")
del_items(0x800ACE74)
SetType(0x800ACE74, "void LoadKanjiFont__FPc(char *name)")
del_items(0x800ACFB8)
SetType(0x800ACFB8, "void FreeKanji__Fv()")
del_items(0x800AD010)
SetType(0x800AD010, "void ClearKanjiCount__Fv()")
del_items(0x800AD048)
SetType(0x800AD048, "void ClearKanjiBuffer__Fv()")
del_items(0x800AD08C)
SetType(0x800AD08C, "void KANJI_SetCache__F10KANJI_FRMS(enum KANJI_FRMS ct)")
del_items(0x800AD318)
SetType(0x800AD318, "void LoadKanji__F10LANG_DB_NO(enum LANG_DB_NO NewLangDbNo)")
del_items(0x800AD448)
SetType(0x800AD448, "bool SetKanjiLoaded__Fb(bool loaded)")
del_items(0x800AD458)
SetType(0x800AD458, "bool IsKanjiLoaded__Fv()")
del_items(0x800AD464)
SetType(0x800AD464, "void KanjiSetTSK__FP4TASK(struct TASK *T)")
del_items(0x800AD4BC)
SetType(0x800AD4BC, "void KANJI_SetDb__F10LANG_DB_NO(enum LANG_DB_NO NewLangDbNo)")
del_items(0x800AD534)
SetType(0x800AD534, "int inmem__Fs(short k)")
del_items(0x800AD5BC)
SetType(0x800AD5BC, "unsigned short getb__FUs(unsigned short n)")
del_items(0x800AD5CC)
SetType(0x800AD5CC, "void ShadeBuff__FPUcii(unsigned char *b, int col, int border)")
del_items(0x800AD774)
SetType(0x800AD774, "void Crunch__FPUcT0(unsigned char *s, unsigned char *db)")
del_items(0x800AD7E8)
SetType(0x800AD7E8, "void _get_font__FPUcUsT0(unsigned char *d, unsigned short num, unsigned char *abuff)")
del_items(0x800AD8A8)
SetType(0x800AD8A8, "int getfreekan__Fv()")
del_items(0x800AD960)
SetType(0x800AD960, "enum KANJI_FRMS GetKanjiCacheFrm__Fv()")
del_items(0x800AD96C)
SetType(0x800AD96C, "struct POLY_FT4 *GetKanjiFrm__FUs(unsigned short kan)")
del_items(0x800ADC68)
SetType(0x800ADC68, "void PRIM_GetPrim__FPP8POLY_FT4_addr_800ADC68(struct POLY_FT4 **Prim)")
del_items(0x800ADCE4)
SetType(0x800ADCE4, "void DumpMonsters__7CBlocks_addr_800ADCE4(struct CBlocks *this)")
del_items(0x800ADD0C)
SetType(0x800ADD0C, "struct ALL_DECOMP_BUFFERS *GetDecompBuffers__7TextDat(struct TextDat *this)")
del_items(0x800ADD30)
SetType(0x800ADD30, "struct FRAME_HDR *GetFr__7TextDati_addr_800ADD30(struct TextDat *this, int FrNum)")
del_items(0x800ADD4C)
SetType(0x800ADD4C, "void writeblock__FP5block(struct block *theblock)")
del_items(0x800ADE34)
SetType(0x800ADE34, "int PAK_DoPak__FPUcPCUci(unsigned char *Dest, unsigned char *buffer, int insize)")
del_items(0x800AE074)
SetType(0x800AE074, "int PAK_DoUnpak__FPUcPCUc(unsigned char *Dest, unsigned char *Source)")
del_items(0x800AE114)
SetType(0x800AE114, "void fputc__5blockUc(struct block *this, unsigned char Val)")
del_items(0x800AE13C)
SetType(0x800AE13C, "void RemoveHelp__Fv()")
del_items(0x800AE150)
SetType(0x800AE150, "void HelpPad__Fv()")
del_items(0x800AE3F8)
SetType(0x800AE3F8, "int GetControlKey__FiPb(int str, bool *iscombo)")
del_items(0x800AE4A0)
SetType(0x800AE4A0, "void InitHelp__Fv()")
del_items(0x800AE4EC)
SetType(0x800AE4EC, "int DrawHelpLine__FiiPccccP10HelpStruct(int x, int y, char *txt, char R, int G, int B, struct HelpStruct *hp)")
del_items(0x800AE700)
SetType(0x800AE700, "void DisplayHelp__Fv()")
del_items(0x800AEA80)
SetType(0x800AEA80, "void DrawHelp__Fv()")
del_items(0x800AECF8)
SetType(0x800AECF8, "void _GLOBAL__D_DrawHelp__Fv()")
del_items(0x800AED38)
SetType(0x800AED38, "void _GLOBAL__I_DrawHelp__Fv()")
del_items(0x800AED60)
SetType(0x800AED60, "void SetRGB__6DialogUcUcUc_addr_800AED60(struct Dialog *this, unsigned char R, unsigned char G, unsigned char B)")
del_items(0x800AED80)
SetType(0x800AED80, "void SetBorder__6Dialogi_addr_800AED80(struct Dialog *this, int Type)")
del_items(0x800AED88)
SetType(0x800AED88, "void ___6Dialog_addr_800AED88(struct Dialog *this, int __in_chrg)")
del_items(0x800AEDB0)
SetType(0x800AEDB0, "struct Dialog *__6Dialog_addr_800AEDB0(struct Dialog *this)")
del_items(0x800AEE30)
SetType(0x800AEE30, "int GetOverlayOtBase__7CBlocks_addr_800AEE30()")
del_items(0x800AEE38)
SetType(0x800AEE38, "unsigned short GetTick__C4CPad_addr_800AEE38(struct CPad *this)")
del_items(0x800AEE60)
SetType(0x800AEE60, "unsigned short GetDown__C4CPad_addr_800AEE60(struct CPad *this)")
del_items(0x800AEE88)
SetType(0x800AEE88, "void SetPadTickMask__4CPadUs_addr_800AEE88(struct CPad *this, unsigned short mask)")
del_items(0x800AEE90)
SetType(0x800AEE90, "void SetPadTick__4CPadUs_addr_800AEE90(struct CPad *this, unsigned short tick)")
del_items(0x800AEE98)
SetType(0x800AEE98, "void DisplayMonsterTypes__Fv()")
del_items(0x800AEEA0)
SetType(0x800AEEA0, "bool IsAutoTarget__Fi(int Spell)")
del_items(0x800AEED8)
SetType(0x800AEED8, "int GetXOff__Fii(int wx, int wy)")
del_items(0x800AEF20)
SetType(0x800AEF20, "int GetYOff__Fii(int wx, int wy)")
del_items(0x800AEF6C)
SetType(0x800AEF6C, "void GetScrXY__FPiT0(int *wx, int *wy)")
del_items(0x800AF03C)
SetType(0x800AF03C, "void ClearTrails__11SpellTarget(struct SpellTarget *this)")
del_items(0x800AF064)
SetType(0x800AF064, "void Init__11SpellTargeti(struct SpellTarget *this, int plrn)")
del_items(0x800AF2C8)
SetType(0x800AF2C8, "void Remove__11SpellTarget(struct SpellTarget *this)")
del_items(0x800AF32C)
SetType(0x800AF32C, "void DrawArrow__11SpellTargetii(struct SpellTarget *this, int x1, int y1)")
del_items(0x800AF5B0)
SetType(0x800AF5B0, "void Show__11SpellTarget(struct SpellTarget *this)")
del_items(0x800AFACC)
SetType(0x800AFACC, "void ForceTarget__11SpellTargetiii(struct SpellTarget *this, int monst, int x, int y)")
del_items(0x800AFC20)
SetType(0x800AFC20, "bool TargetActive__Fi(int pnum)")
del_items(0x800AFC48)
SetType(0x800AFC48, "struct SpellTarget *GetSpellTarget__Fi(int pnum)")
del_items(0x800AFC68)
SetType(0x800AFC68, "void ArrowTask__FP4TASK(struct TASK *T)")
del_items(0x800B000C)
SetType(0x800B000C, "void SPL_Arrow__F6TARGETiii(enum TARGET t, int pnum, int times, int size)")
del_items(0x800B008C)
SetType(0x800B008C, "bool Active__11SpellTarget_addr_800B008C(struct SpellTarget *this)")
del_items(0x800B0098)
SetType(0x800B0098, "int GetOverlayOtBase__7CBlocks_addr_800B0098()")
del_items(0x800B00A0)
SetType(0x800B00A0, "unsigned short GetCur__C4CPad_addr_800B00A0(struct CPad *this)")
del_items(0x8003017C)
SetType(0x8003017C, "unsigned char TrimCol__Fs_addr_8003017C(short col)")
del_items(0x800301B4)
SetType(0x800301B4, "void DrawSpellCel__FllUclUcc(long xp, long yp, unsigned char Trans, long nCel, int w, int sel)")
del_items(0x80030D38)
SetType(0x80030D38, "void SetSpellTrans__Fc(char t)")
del_items(0x80030D44)
SetType(0x80030D44, "void DrawSpellBookTSK__FP4TASK(struct TASK *T)")
del_items(0x80030E9C)
SetType(0x80030E9C, "void DrawSpeedSpellTSK__FP4TASK(struct TASK *T)")
del_items(0x80030FCC)
SetType(0x80030FCC, "void ToggleSpell__Fi(int pnum)")
del_items(0x80031080)
SetType(0x80031080, "void DrawSpellList__Fv()")
del_items(0x80031D24)
SetType(0x80031D24, "void SetSpell__Fi(int pnum)")
del_items(0x80031E30)
SetType(0x80031E30, "void AddPanelString__FPCci(char *str, int just)")
del_items(0x80031EF0)
SetType(0x80031EF0, "void ClearPanel__Fv()")
del_items(0x80031F20)
SetType(0x80031F20, "void InitPanelStr__Fv()")
del_items(0x80031F40)
SetType(0x80031F40, "void InitControlPan__Fv()")
del_items(0x8003216C)
SetType(0x8003216C, "void DrawCtrlPan__Fv()")
del_items(0x80032198)
SetType(0x80032198, "void DoAutoMap__Fv()")
del_items(0x800321F8)
SetType(0x800321F8, "void CheckPanelInfo__Fv()")
del_items(0x80032918)
SetType(0x80032918, "void FreeControlPan__Fv()")
del_items(0x80032A28)
SetType(0x80032A28, "int CPrintString__FiPci(int No, char *pszStr, int Just)")
del_items(0x80032B44)
SetType(0x80032B44, "void PrintInfo__Fv()")
del_items(0x80032F74)
SetType(0x80032F74, "void DrawInfoBox__FP4RECT(struct RECT *InfoRect)")
del_items(0x800336A8)
SetType(0x800336A8, "void MY_PlrStringXY__Fv()")
del_items(0x80033DB8)
SetType(0x80033DB8, "void ADD_PlrStringXY__FPCcc(char *pszStr, char col)")
del_items(0x80033E60)
SetType(0x80033E60, "void DrawPlus__Fii(int n, int pnum)")
del_items(0x80033FF8)
SetType(0x80033FF8, "void ChrCheckValidButton__Fi(int move)")
del_items(0x80034304)
SetType(0x80034304, "void DrawArrows__Fv()")
del_items(0x80034404)
SetType(0x80034404, "void BuildChr__Fv()")
del_items(0x80035668)
SetType(0x80035668, "void DrawChr__Fv()")
del_items(0x80035B08)
SetType(0x80035B08, "void DrawChrTSK__FP4TASK(struct TASK *T)")
del_items(0x80035C18)
SetType(0x80035C18, "void DrawLevelUpIcon__Fi(int pnum)")
del_items(0x80035CAC)
SetType(0x80035CAC, "void CheckChrBtns__Fv()")
del_items(0x80036034)
SetType(0x80036034, "int DrawDurIcon4Item__FPC10ItemStructii(struct ItemStruct *pItem, int x, int c)")
del_items(0x800360B8)
SetType(0x800360B8, "void RedBack__Fv()")
del_items(0x800361B0)
SetType(0x800361B0, "void PrintSBookStr__FiiiPCcUcUc(int x, int y, int cspel, char *pszStr, int bright, int Staff)")
del_items(0x80036438)
SetType(0x80036438, "char GetSBookTrans__FiUc(int ii, unsigned char townok)")
del_items(0x80036698)
SetType(0x80036698, "void DrawSpellBook__Fb(bool DrawBg)")
del_items(0x80037208)
SetType(0x80037208, "void CheckSBook__Fv()")
del_items(0x800374A4)
SetType(0x800374A4, "char *get_pieces_str__Fi(int nGold)")
del_items(0x800374D8)
SetType(0x800374D8, "void _GLOBAL__D_DrawLevelUpFlag()")
del_items(0x80037500)
SetType(0x80037500, "void _GLOBAL__I_DrawLevelUpFlag()")
del_items(0x8003753C)
SetType(0x8003753C, "unsigned short GetTick__C4CPad_addr_8003753C(struct CPad *this)")
del_items(0x80037564)
SetType(0x80037564, "unsigned short GetDown__C4CPad_addr_80037564(struct CPad *this)")
del_items(0x8003758C)
SetType(0x8003758C, "void SetPadTickMask__4CPadUs_addr_8003758C(struct CPad *this, unsigned short mask)")
del_items(0x80037594)
SetType(0x80037594, "void SetPadTick__4CPadUs_addr_80037594(struct CPad *this, unsigned short tick)")
del_items(0x8003759C)
SetType(0x8003759C, "void SetRGB__6DialogUcUcUc_addr_8003759C(struct Dialog *this, unsigned char R, unsigned char G, unsigned char B)")
del_items(0x800375BC)
SetType(0x800375BC, "void SetBack__6Dialogi_addr_800375BC(struct Dialog *this, int Type)")
del_items(0x800375C4)
SetType(0x800375C4, "void SetBorder__6Dialogi_addr_800375C4(struct Dialog *this, int Type)")
del_items(0x800375CC)
SetType(0x800375CC, "void ___6Dialog_addr_800375CC(struct Dialog *this, int __in_chrg)")
del_items(0x800375F4)
SetType(0x800375F4, "struct Dialog *__6Dialog_addr_800375F4(struct Dialog *this)")
del_items(0x80037674)
SetType(0x80037674, "int GetOverlayOtBase__7CBlocks_addr_80037674()")
del_items(0x8003767C)
SetType(0x8003767C, "int GetMaxOtPos__7CBlocks_addr_8003767C()")
del_items(0x80037684)
SetType(0x80037684, "struct PAL *GetPal__7TextDati_addr_80037684(struct TextDat *this, int PalNum)")
del_items(0x800376A0)
SetType(0x800376A0, "struct FRAME_HDR *GetFr__7TextDati_addr_800376A0(struct TextDat *this, int FrNum)")
del_items(0x800376BC)
SetType(0x800376BC, "void InitCursor__Fv()")
del_items(0x800376C4)
SetType(0x800376C4, "void FreeCursor__Fv()")
del_items(0x800376CC)
SetType(0x800376CC, "void SetICursor__Fi(int i)")
del_items(0x80037728)
SetType(0x80037728, "void SetCursor__Fi(int i)")
del_items(0x8003778C)
SetType(0x8003778C, "void NewCursor__Fi(int i)")
del_items(0x800377AC)
SetType(0x800377AC, "void InitLevelCursor__Fv()")
del_items(0x8003780C)
SetType(0x8003780C, "void CheckTown__Fv()")
del_items(0x80037A98)
SetType(0x80037A98, "void CheckRportal__Fv()")
del_items(0x80037CF8)
SetType(0x80037CF8, "void CheckCursMove__Fv()")
del_items(0x80037D00)
SetType(0x80037D00, "void InitDead__Fv()")
del_items(0x80037F04)
SetType(0x80037F04, "void AddDead__Fiici(int dx, int dy, char dv, int ddir)")
del_items(0x80037F24)
SetType(0x80037F24, "void FreeGameMem__Fv()")
del_items(0x80037F5C)
SetType(0x80037F5C, "void start_game__FUi(unsigned int uMsg)")
del_items(0x8003804C)
SetType(0x8003804C, "void free_game__Fv()")
del_items(0x800380C0)
SetType(0x800380C0, "void LittleStart__FUcUc(unsigned char bNewGame, unsigned char bSinglePlayer)")
del_items(0x80038184)
SetType(0x80038184, "unsigned char StartGame__FUcUc(unsigned char bNewGame, unsigned char bSinglePlayer)")
del_items(0x80038384)
SetType(0x80038384, "void run_game_loop__FUi(unsigned int uMsg)")
del_items(0x800384EC)
SetType(0x800384EC, "unsigned char TryIconCurs__Fv()")
del_items(0x8003880C)
SetType(0x8003880C, "unsigned long DisableInputWndProc__FUlUilUl(unsigned long hWnd, unsigned int uMsg, long wParam, unsigned long lParam)")
del_items(0x80038814)
SetType(0x80038814, "unsigned long GM_Game__FUlUilUl(unsigned long hWnd, unsigned int uMsg, long wParam, unsigned long lParam)")
del_items(0x800388A8)
SetType(0x800388A8, "void LoadLvlGFX__Fv()")
del_items(0x80038960)
SetType(0x80038960, "void LoadMegaTiles__FPCc(char *LoadFile)")
del_items(0x800389F0)
SetType(0x800389F0, "void LoadAllGFX__Fv()")
del_items(0x80038A10)
SetType(0x80038A10, "void CreateLevel__Fi(int lvldir)")
del_items(0x80038B08)
SetType(0x80038B08, "void LoCreateLevel__FPv()")
del_items(0x80038C6C)
SetType(0x80038C6C, "void ClearOutDungeonMap__Fv()")
del_items(0x80038E6C)
SetType(0x80038E6C, "void AddQuestItems__Fv()")
del_items(0x80038F0C)
SetType(0x80038F0C, "void AllSolid__Fii(int x, int y)")
del_items(0x80038F4C)
SetType(0x80038F4C, "void FillCrapBits__Fv()")
del_items(0x800390EC)
SetType(0x800390EC, "void Lsaveplrpos__Fv()")
del_items(0x80039198)
SetType(0x80039198, "void Lrestoreplrpos__Fv()")
del_items(0x800391E8)
SetType(0x800391E8, "void LoadGameLevel__FUci(unsigned char firstflag, int lvldir)")
del_items(0x80039B20)
SetType(0x80039B20, "void SetSpeed__F9GM_SPEEDS(enum GM_SPEEDS Speed)")
del_items(0x80039B34)
SetType(0x80039B34, "enum GM_SPEEDS GetSpeed__Fv()")
del_items(0x80039B40)
SetType(0x80039B40, "void game_logic__Fv()")
del_items(0x80039D28)
SetType(0x80039D28, "void timeout_cursor__FUc(unsigned char bTimeout)")
del_items(0x80039DD0)
SetType(0x80039DD0, "void game_loop__FUc(unsigned char bStartup)")
del_items(0x80039E30)
SetType(0x80039E30, "void alloc_plr__Fv()")
del_items(0x80039E38)
SetType(0x80039E38, "void plr_encrypt__FUc(unsigned char bEncrypt)")
del_items(0x80039E40)
SetType(0x80039E40, "void assert_fail__FiPCcT1(int nLineNo, char *pszFile, char *pszFail)")
del_items(0x80039E60)
SetType(0x80039E60, "void assert_fail__FiPCc(int nLineNo, char *pszFile)")
del_items(0x80039E80)
SetType(0x80039E80, "void app_fatal(char *pszFile)")
del_items(0x80039EB0)
SetType(0x80039EB0, "void DoMemCardFromFrontEnd__Fv()")
del_items(0x80039ED8)
SetType(0x80039ED8, "void DoMemCardFromInGame__Fv()")
del_items(0x80039F00)
SetType(0x80039F00, "int GetActiveTowner__Fi(int t)")
del_items(0x80039F54)
SetType(0x80039F54, "void SetTownerGPtrs__FPUcPPUc(unsigned char *pData, unsigned char **pAnim)")
del_items(0x80039F74)
SetType(0x80039F74, "void NewTownerAnim__FiPUcii(int tnum, unsigned char *pAnim, int numFrames, int Delay)")
del_items(0x80039FC4)
SetType(0x80039FC4, "void InitTownerInfo__FilUciiici(int i, long w, unsigned char sel, int t, int x, int y, int ao, int tp)")
del_items(0x8003A11C)
SetType(0x8003A11C, "void InitQstSnds__Fi(int i)")
del_items(0x8003A1DC)
SetType(0x8003A1DC, "void InitSmith__Fv()")
del_items(0x8003A30C)
SetType(0x8003A30C, "void InitBarOwner__Fv()")
del_items(0x8003A444)
SetType(0x8003A444, "void InitTownDead__Fv()")
del_items(0x8003A578)
SetType(0x8003A578, "void InitWitch__Fv()")
del_items(0x8003A6AC)
SetType(0x8003A6AC, "void InitBarmaid__Fv()")
del_items(0x8003A7E0)
SetType(0x8003A7E0, "void InitBoy__Fv()")
del_items(0x8003A91C)
SetType(0x8003A91C, "void InitHealer__Fv()")
del_items(0x8003AA50)
SetType(0x8003AA50, "void InitTeller__Fv()")
del_items(0x8003AB84)
SetType(0x8003AB84, "void InitDrunk__Fv()")
del_items(0x8003ACB8)
SetType(0x8003ACB8, "void InitCows__Fv()")
del_items(0x8003AF54)
SetType(0x8003AF54, "void InitTowners__Fv()")
del_items(0x8003AFE0)
SetType(0x8003AFE0, "void FreeTownerGFX__Fv()")
del_items(0x8003B084)
SetType(0x8003B084, "void TownCtrlMsg__Fi(int i)")
del_items(0x8003B16C)
SetType(0x8003B16C, "void TownBlackSmith__Fv()")
del_items(0x8003B1F8)
SetType(0x8003B1F8, "void TownBarOwner__Fv()")
del_items(0x8003B294)
SetType(0x8003B294, "void TownDead__Fv()")
del_items(0x8003B37C)
SetType(0x8003B37C, "void TownHealer__Fv()")
del_items(0x8003B3A4)
SetType(0x8003B3A4, "void TownStory__Fv()")
del_items(0x8003B3CC)
SetType(0x8003B3CC, "void TownDrunk__Fv()")
del_items(0x8003B3F4)
SetType(0x8003B3F4, "void TownBoy__Fv()")
del_items(0x8003B41C)
SetType(0x8003B41C, "void TownWitch__Fv()")
del_items(0x8003B444)
SetType(0x8003B444, "void TownBarMaid__Fv()")
del_items(0x8003B46C)
SetType(0x8003B46C, "void TownCow__Fv()")
del_items(0x8003B494)
SetType(0x8003B494, "void ProcessTowners__Fv()")
del_items(0x8003B6E4)
SetType(0x8003B6E4, "struct ItemStruct *PlrHasItem__FiiRi(int pnum, int item, int *i)")
del_items(0x8003B7B8)
SetType(0x8003B7B8, "void CowSFX__Fi(int pnum)")
del_items(0x8003B8D4)
SetType(0x8003B8D4, "void TownerTalk__Fii(int first, int t)")
del_items(0x8003B914)
SetType(0x8003B914, "void TalkToTowner__Fii(int p, int t)")
del_items(0x8003CE9C)
SetType(0x8003CE9C, "unsigned char effect_is_playing__Fi(int nSFX)")
del_items(0x8003CEC4)
SetType(0x8003CEC4, "void stream_stop__Fv()")
del_items(0x8003CF20)
SetType(0x8003CF20, "void stream_pause__Fv()")
del_items(0x8003CF84)
SetType(0x8003CF84, "void stream_resume__Fv()")
del_items(0x8003CFD4)
SetType(0x8003CFD4, "void stream_play__FP4TSFXll(struct TSFX *pSFX, long lVolume, long lPan)")
del_items(0x8003D0C0)
SetType(0x8003D0C0, "void stream_update__Fv()")
del_items(0x8003D0C8)
SetType(0x8003D0C8, "void sfx_stop__Fv()")
del_items(0x8003D0E4)
SetType(0x8003D0E4, "void InitMonsterSND__Fi(int monst)")
del_items(0x8003D13C)
SetType(0x8003D13C, "void FreeMonsterSnd__Fv()")
del_items(0x8003D144)
SetType(0x8003D144, "unsigned char calc_snd_position__FiiPlT2(int x, int y, long *plVolume, long *plPan)")
del_items(0x8003D32C)
SetType(0x8003D32C, "void PlaySFX_priv__FP4TSFXUcii(struct TSFX *pSFX, unsigned char loc, int x, int y)")
del_items(0x8003D490)
SetType(0x8003D490, "void PlayEffect__Fii(int i, int mode)")
del_items(0x8003D5DC)
SetType(0x8003D5DC, "int RndSFX__Fi(int psfx)")
del_items(0x8003D684)
SetType(0x8003D684, "void PlaySFX__Fi(int psfx)")
del_items(0x8003D6F0)
SetType(0x8003D6F0, "void PlaySfxLoc__Fiii(int psfx, int x, int y)")
del_items(0x8003D79C)
SetType(0x8003D79C, "void sound_stop__Fv()")
del_items(0x8003D834)
SetType(0x8003D834, "void sound_update__Fv()")
del_items(0x8003D868)
SetType(0x8003D868, "void priv_sound_init__FUc(unsigned char bLoadMask)")
del_items(0x8003D8AC)
SetType(0x8003D8AC, "void sound_init__Fv()")
del_items(0x8003D954)
SetType(0x8003D954, "void stream_fade__Fv()")
del_items(0x8003D994)
SetType(0x8003D994, "int GetDirection__Fiiii(int x1, int y1, int x2, int y2)")
del_items(0x8003DA38)
SetType(0x8003DA38, "void SetRndSeed__Fl(long s)")
del_items(0x8003DA48)
SetType(0x8003DA48, "long GetRndSeed__Fv()")
del_items(0x8003DA90)
SetType(0x8003DA90, "long random__Fil(int idx, long v)")
del_items(0x8003DAFC)
SetType(0x8003DAFC, "unsigned char *DiabloAllocPtr__FUl(unsigned long dwBytes)")
del_items(0x8003DB48)
SetType(0x8003DB48, "void mem_free_dbg__FPv(void *p)")
del_items(0x8003DB98)
SetType(0x8003DB98, "unsigned char *LoadFileInMem__FPCcPUl(char *pszName, unsigned long *pdwFileLen)")
del_items(0x8003DBA0)
SetType(0x8003DBA0, "void PlayInGameMovie__FPCc(char *pszMovie)")
del_items(0x8003DBA8)
SetType(0x8003DBA8, "void Enter__9CCritSect(struct CCritSect *this)")
del_items(0x8003DBB0)
SetType(0x8003DBB0, "void InitDiabloMsg__Fc(char e)")
del_items(0x8003DC44)
SetType(0x8003DC44, "void ClrDiabloMsg__Fv()")
del_items(0x8003DC70)
SetType(0x8003DC70, "void DrawDiabloMsg__Fv()")
del_items(0x8003DDA4)
SetType(0x8003DDA4, "void interface_msg_pump__Fv()")
del_items(0x8003DDAC)
SetType(0x8003DDAC, "void ShowProgress__FUi(unsigned int uMsg)")
del_items(0x8003E180)
SetType(0x8003E180, "void InitAllItemsUseable__Fv()")
del_items(0x8003E1B8)
SetType(0x8003E1B8, "void InitItemGFX__Fv()")
del_items(0x8003E1E4)
SetType(0x8003E1E4, "unsigned char ItemPlace__Fii(int xp, int yp)")
del_items(0x8003E280)
SetType(0x8003E280, "void AddInitItems__Fv()")
del_items(0x8003E49C)
SetType(0x8003E49C, "void InitItems__Fb(bool re_init)")
del_items(0x8003E654)
SetType(0x8003E654, "void CalcPlrItemVals__FiUc(int p, unsigned char Loadgfx)")
del_items(0x8003F0CC)
SetType(0x8003F0CC, "void CalcPlrScrolls__Fi(int p)")
del_items(0x8003F44C)
SetType(0x8003F44C, "void CalcPlrStaff__FP12PlayerStruct(struct PlayerStruct *ptrplr)")
del_items(0x8003F508)
SetType(0x8003F508, "void CalcSelfItems__Fi(int pnum)")
del_items(0x8003F668)
SetType(0x8003F668, "unsigned char ItemMinStats__FPC12PlayerStructPC10ItemStruct(struct PlayerStruct *p, struct ItemStruct *x)")
del_items(0x8003F6B4)
SetType(0x8003F6B4, "void SetItemMinStats__FPC12PlayerStructP10ItemStruct(struct PlayerStruct *p, struct ItemStruct *x)")
del_items(0x8003F6E0)
SetType(0x8003F6E0, "void CalcPlrItemMin__Fi(int pnum)")
del_items(0x8003F7C0)
SetType(0x8003F7C0, "void CalcPlrBookVals__Fi(int p)")
del_items(0x8003FAA4)
SetType(0x8003FAA4, "void CalcPlrInv__FiUc(int p, unsigned char Loadgfx)")
del_items(0x8003FB54)
SetType(0x8003FB54, "void SetPlrHandItem__FP10ItemStructi(struct ItemStruct *h, int idata)")
del_items(0x8003FC6C)
SetType(0x8003FC6C, "void GetPlrHandSeed__FP10ItemStruct(struct ItemStruct *h)")
del_items(0x8003FC98)
SetType(0x8003FC98, "void GetGoldSeed__FiP10ItemStruct(int pnum, struct ItemStruct *h)")
del_items(0x8003FE00)
SetType(0x8003FE00, "void SetPlrHandSeed__FP10ItemStructi(struct ItemStruct *h, int iseed)")
del_items(0x8003FE08)
SetType(0x8003FE08, "void SetPlrHandGoldCurs__FP10ItemStruct(struct ItemStruct *h)")
del_items(0x8003FE38)
SetType(0x8003FE38, "void CreatePlrItems__Fi(int p)")
del_items(0x80040398)
SetType(0x80040398, "unsigned char ItemSpaceOk__Fii(int i, int j)")
del_items(0x80040618)
SetType(0x80040618, "unsigned char GetItemSpace__Fiic(int x, int y, char inum)")
del_items(0x80040834)
SetType(0x80040834, "void GetSuperItemSpace__Fiic(int x, int y, char inum)")
del_items(0x8004098C)
SetType(0x8004098C, "void GetSuperItemLoc__FiiRiT2(int x, int y, int *xx, int *yy)")
del_items(0x80040A54)
SetType(0x80040A54, "void CalcItemValue__Fi(int i)")
del_items(0x80040B0C)
SetType(0x80040B0C, "void GetBookSpell__Fii(int i, int lvl)")
del_items(0x80040D70)
SetType(0x80040D70, "void GetStaffPower__FiiiUc(int i, int lvl, int bs, unsigned char onlygood)")
del_items(0x80040F60)
SetType(0x80040F60, "void GetStaffSpell__FiiUc(int i, int lvl, unsigned char onlygood)")
del_items(0x8004123C)
SetType(0x8004123C, "void GetItemAttrs__Fiii(int i, int idata, int lvl)")
del_items(0x800417E8)
SetType(0x800417E8, "int RndPL__Fii(int param1, int param2)")
del_items(0x80041820)
SetType(0x80041820, "int PLVal__Fiiiii(int pv, int p1, int p2, int minv, int maxv)")
del_items(0x80041894)
SetType(0x80041894, "void SaveItemPower__Fiiiiiii(int i, int power, int param1, int param2, int minval, int maxval, int multval)")
del_items(0x80042FC4)
SetType(0x80042FC4, "void GetItemPower__FiiilUc(int i, int minlvl, int maxlvl, long flgs, int onlygood)")
del_items(0x8004342C)
SetType(0x8004342C, "void GetItemBonus__FiiiiUc(int i, int idata, int minlvl, int maxlvl, int onlygood)")
del_items(0x80043528)
SetType(0x80043528, "void SetupItem__Fi(int i)")
del_items(0x80043658)
SetType(0x80043658, "int RndItem__Fi(int m)")
del_items(0x80043898)
SetType(0x80043898, "int RndUItem__Fi(int m)")
del_items(0x80043ADC)
SetType(0x80043ADC, "int RndAllItems__Fv()")
del_items(0x80043C44)
SetType(0x80043C44, "int RndTypeItems__Fii(int itype, int imid)")
del_items(0x80043DB4)
SetType(0x80043DB4, "int CheckUnique__FiiiUc(int i, int lvl, int uper, unsigned char recreate)")
del_items(0x80043F64)
SetType(0x80043F64, "void GetUniqueItem__Fii(int i, int uid)")
del_items(0x8004421C)
SetType(0x8004421C, "void SpawnUnique__Fiii(int uid, int x, int y)")
del_items(0x8004435C)
SetType(0x8004435C, "void ItemRndDur__Fi(int ii)")
del_items(0x800443F8)
SetType(0x800443F8, "void SetupAllItems__FiiiiiUcUcUc(int ii, int idx, int iseed, int lvl, int uper, int onlygood, int recreate, int pregen)")
del_items(0x80044740)
SetType(0x80044740, "void SpawnItem__FiiiUc(int m, int x, int y, unsigned char sendmsg)")
del_items(0x80044998)
SetType(0x80044998, "void CreateItem__Fiii(int uid, int x, int y)")
del_items(0x80044AF4)
SetType(0x80044AF4, "void CreateRndItem__FiiUcUcUc(int x, int y, unsigned char onlygood, unsigned char sendmsg, int delta)")
del_items(0x80044C3C)
SetType(0x80044C3C, "void SetupAllUseful__Fiii(int ii, int iseed, int lvl)")
del_items(0x80044D28)
SetType(0x80044D28, "void CreateRndUseful__FiiiUc(int pnum, int x, int y, unsigned char sendmsg)")
del_items(0x80044DE8)
SetType(0x80044DE8, "void CreateTypeItem__FiiUciiUcUc(int x, int y, unsigned char onlygood, int itype, int imisc, int sendmsg, int delta)")
del_items(0x80044F2C)
SetType(0x80044F2C, "void RecreateEar__FiUsiUciiiiii(int ii, unsigned short ic, int iseed, unsigned char Id, int dur, int mdur, int ch, int mch, int ivalue, int ibuff)")
del_items(0x8004512C)
SetType(0x8004512C, "void SpawnQuestItem__Fiiiii(int itemid, int x, int y, int randarea, int selflag)")
del_items(0x80045380)
SetType(0x80045380, "void SpawnRock__Fv()")
del_items(0x8004552C)
SetType(0x8004552C, "void RespawnItem__FiUc(int i, unsigned char FlipFlag)")
del_items(0x800456E4)
SetType(0x800456E4, "void DeleteItem__Fii(int ii, int i)")
del_items(0x80045738)
SetType(0x80045738, "void ItemDoppel__Fv()")
del_items(0x800457F8)
SetType(0x800457F8, "void ProcessItems__Fv()")
del_items(0x80045A9C)
SetType(0x80045A9C, "void FreeItemGFX__Fv()")
del_items(0x80045AA4)
SetType(0x80045AA4, "void GetItemStr__Fi(int i)")
del_items(0x80045C4C)
SetType(0x80045C4C, "void CheckIdentify__Fii(int pnum, int cii)")
del_items(0x80045D48)
SetType(0x80045D48, "void RepairItem__FP10ItemStructi(struct ItemStruct *i, int lvl)")
del_items(0x80045E3C)
SetType(0x80045E3C, "void DoRepair__Fii(int pnum, int cii)")
del_items(0x80045F00)
SetType(0x80045F00, "void RechargeItem__FP10ItemStructi(struct ItemStruct *i, int r)")
del_items(0x80045F68)
SetType(0x80045F68, "void DoRecharge__Fii(int pnum, int cii)")
del_items(0x8004608C)
SetType(0x8004608C, "void PrintItemOil__Fc(char IDidx)")
del_items(0x80046188)
SetType(0x80046188, "void PrintItemPower__FcPC10ItemStruct(char plidx, struct ItemStruct *x)")
del_items(0x8004682C)
SetType(0x8004682C, "void PrintItemMisc__FPC10ItemStruct(struct ItemStruct *x)")
del_items(0x80046A8C)
SetType(0x80046A8C, "void PrintItemDetails__FPC10ItemStruct(struct ItemStruct *x)")
del_items(0x80046E8C)
SetType(0x80046E8C, "void PrintItemDur__FPC10ItemStruct(struct ItemStruct *x)")
del_items(0x8004719C)
SetType(0x8004719C, "void CastScroll__Fii(int pnum, int Spell)")
del_items(0x800473EC)
SetType(0x800473EC, "void UseItem__Fiii(int p, int Mid, int spl)")
del_items(0x80047A08)
SetType(0x80047A08, "unsigned char StoreStatOk__FP10ItemStruct(struct ItemStruct *h)")
del_items(0x80047A9C)
SetType(0x80047A9C, "unsigned char PremiumItemOk__Fi(int i)")
del_items(0x80047B18)
SetType(0x80047B18, "int RndPremiumItem__Fii(int minlvl, int maxlvl)")
del_items(0x80047C20)
SetType(0x80047C20, "void SpawnOnePremium__Fii(int i, int plvl)")
del_items(0x80047F14)
SetType(0x80047F14, "void SpawnPremium__Fi(int lvl)")
del_items(0x800482B4)
SetType(0x800482B4, "void WitchBookLevel__Fi(int ii)")
del_items(0x80048490)
SetType(0x80048490, "void SpawnStoreGold__Fv()")
del_items(0x80048560)
SetType(0x80048560, "void RecalcStoreStats__Fv()")
del_items(0x80048844)
SetType(0x80048844, "int ItemNoFlippy__Fv()")
del_items(0x800488A8)
SetType(0x800488A8, "void CreateSpellBook__FiiiUcUc(int x, int y, int ispell, unsigned char sendmsg, int delta)")
del_items(0x80048A38)
SetType(0x80048A38, "void CreateMagicArmor__FiiiiUcUc(int x, int y, int imisc, int icurs, int sendmsg, int delta)")
del_items(0x80048BB4)
SetType(0x80048BB4, "void CreateMagicWeapon__FiiiiUcUc(int x, int y, int imisc, int icurs, int sendmsg, int delta)")
del_items(0x80048D30)
SetType(0x80048D30, "void DrawUniqueInfo__Fv()")
del_items(0x80048EA0)
SetType(0x80048EA0, "char *MakeItemStr__FP10ItemStructUsUs(struct ItemStruct *ItemPtr, unsigned short ItemNo, unsigned short MaxLen)")
del_items(0x80049274)
SetType(0x80049274, "unsigned char SmithItemOk__Fi(int i)")
del_items(0x800492D8)
SetType(0x800492D8, "int RndSmithItem__Fi(int lvl)")
del_items(0x800493E4)
SetType(0x800493E4, "unsigned char WitchItemOk__Fi(int i)")
del_items(0x80049474)
SetType(0x80049474, "int RndWitchItem__Fi(int lvl)")
del_items(0x80049624)
SetType(0x80049624, "void BubbleSwapItem__FP10ItemStructT0(struct ItemStruct *a, struct ItemStruct *b)")
del_items(0x8004972C)
SetType(0x8004972C, "void SortWitch__Fv()")
del_items(0x800498BC)
SetType(0x800498BC, "int RndBoyItem__Fi(int lvl)")
del_items(0x800499E0)
SetType(0x800499E0, "unsigned char HealerItemOk__Fi(int i)")
del_items(0x80049B94)
SetType(0x80049B94, "int RndHealerItem__Fi(int lvl)")
del_items(0x80049C94)
SetType(0x80049C94, "void RecreatePremiumItem__Fiiii(int ii, int idx, int plvl, int iseed)")
del_items(0x80049D70)
SetType(0x80049D70, "void RecreateWitchItem__Fiiii(int ii, int idx, int lvl, int iseed)")
del_items(0x80049EDC)
SetType(0x80049EDC, "void RecreateSmithItem__Fiiii(int ii, int idx, int lvl, int iseed)")
del_items(0x80049F8C)
SetType(0x80049F8C, "void RecreateHealerItem__Fiiii(int ii, int idx, int lvl, int iseed)")
del_items(0x8004A060)
SetType(0x8004A060, "void RecreateBoyItem__Fiiii(int ii, int idx, int lvl, int iseed)")
del_items(0x8004A138)
SetType(0x8004A138, "void RecreateTownItem__FiiUsii(int ii, int idx, unsigned short icreateinfo, int iseed, int ivalue)")
del_items(0x8004A1C4)
SetType(0x8004A1C4, "void SpawnSmith__Fi(int lvl)")
del_items(0x8004A4F4)
SetType(0x8004A4F4, "void SpawnWitch__Fi(int lvl)")
del_items(0x8004AAEC)
SetType(0x8004AAEC, "void SpawnHealer__Fi(int lvl)")
del_items(0x8004B090)
SetType(0x8004B090, "void SpawnBoy__Fi(int lvl)")
del_items(0x8004B394)
SetType(0x8004B394, "void SortSmith__Fv()")
del_items(0x8004B518)
SetType(0x8004B518, "void SortHealer__Fv()")
del_items(0x8004B6A8)
SetType(0x8004B6A8, "void RecreateItem__FiiUsiii(int ii, int idx, unsigned short icreateinfo, int iseed, int ivalue, int PlrCreate)")
del_items(0x8004B8EC)
SetType(0x8004B8EC, "int veclen2__Fii(int ix, int iy)")
del_items(0x8004B954)
SetType(0x8004B954, "void set_light_bands__Fv()")
del_items(0x8004B9C4)
SetType(0x8004B9C4, "void SetLightFX__FiisssUcUcUc(int x, int y, short s_r, short s_g, int s_b, int d_r, int d_g, int d_b)")
del_items(0x8004BA30)
SetType(0x8004BA30, "void SetWeirdFX__Fv()")
del_items(0x8004BAA4)
SetType(0x8004BAA4, "void DoLighting__Fiiii(int nXPos, int nYPos, int nRadius, int Lnum)")
del_items(0x8004C778)
SetType(0x8004C778, "void DoUnLight__Fv()")
del_items(0x8004C9BC)
SetType(0x8004C9BC, "void DoUnVision__Fiiii(int nXPos, int nYPos, int nRadius, int num)")
del_items(0x8004CAC4)
SetType(0x8004CAC4, "void DoVision__FiiiUcUc(int nXPos, int nYPos, int nRadius, unsigned char doautomap, int visible)")
del_items(0x8004CEEC)
SetType(0x8004CEEC, "void FreeLightTable__Fv()")
del_items(0x8004CEF4)
SetType(0x8004CEF4, "void InitLightTable__Fv()")
del_items(0x8004CEFC)
SetType(0x8004CEFC, "void MakeLightTable__Fv()")
del_items(0x8004CF04)
SetType(0x8004CF04, "void InitLightMax__Fv()")
del_items(0x8004CF28)
SetType(0x8004CF28, "void InitLighting__Fv()")
del_items(0x8004CF6C)
SetType(0x8004CF6C, "int AddLight__Fiii(int x, int y, int r)")
del_items(0x8004CFC4)
SetType(0x8004CFC4, "void AddUnLight__Fi(int i)")
del_items(0x8004CFE8)
SetType(0x8004CFE8, "void ChangeLightRadius__Fii(int i, int r)")
del_items(0x8004D008)
SetType(0x8004D008, "void ChangeLightXY__Fiii(int i, int x, int y)")
del_items(0x8004D034)
SetType(0x8004D034, "void light_fix__Fi(int i)")
del_items(0x8004D03C)
SetType(0x8004D03C, "void ChangeLightOff__Fiii(int i, int x, int y)")
del_items(0x8004D064)
SetType(0x8004D064, "void ChangeLight__Fiiii(int i, int x, int y, int r)")
del_items(0x8004D090)
SetType(0x8004D090, "void ChangeLightColour__Fii(int i, int c)")
del_items(0x8004D0B8)
SetType(0x8004D0B8, "void ProcessLightList__Fv()")
del_items(0x8004D1D0)
SetType(0x8004D1D0, "void SavePreLighting__Fv()")
del_items(0x8004D1D8)
SetType(0x8004D1D8, "void InitVision__Fv()")
del_items(0x8004D22C)
SetType(0x8004D22C, "int AddVision__FiiiUc(int x, int y, int r, unsigned char mine)")
del_items(0x8004D2A0)
SetType(0x8004D2A0, "void ChangeVisionRadius__Fii(int id, int r)")
del_items(0x8004D354)
SetType(0x8004D354, "void ChangeVisionXY__Fiii(int id, int x, int y)")
del_items(0x8004D3D8)
SetType(0x8004D3D8, "void ProcessVisionList__Fv()")
del_items(0x8004D5E0)
SetType(0x8004D5E0, "void FreeQuestText__Fv()")
del_items(0x8004D5E8)
SetType(0x8004D5E8, "void InitQuestText__Fv()")
del_items(0x8004D5F4)
SetType(0x8004D5F4, "void CalcTextSpeed__FPCc(char *Name)")
del_items(0x8004D7B0)
SetType(0x8004D7B0, "void FadeMusicTSK__FP4TASK(struct TASK *T)")
del_items(0x8004D8FC)
SetType(0x8004D8FC, "void InitQTextMsg__Fi(int m)")
del_items(0x8004DB50)
SetType(0x8004DB50, "void DrawQTextBack__Fv()")
del_items(0x8004DCEC)
SetType(0x8004DCEC, "void DrawQTextTSK__FP4TASK(struct TASK *T)")
del_items(0x8004DFD4)
SetType(0x8004DFD4, "int KANJI_strlen__FPc(char *str)")
del_items(0x8004E014)
SetType(0x8004E014, "void DrawQText__Fv()")
del_items(0x8004E5C0)
SetType(0x8004E5C0, "void _GLOBAL__D_QBack()")
del_items(0x8004E5E8)
SetType(0x8004E5E8, "void _GLOBAL__I_QBack()")
del_items(0x8004E610)
SetType(0x8004E610, "void SetRGB__6DialogUcUcUc_addr_8004E610(struct Dialog *this, unsigned char R, unsigned char G, unsigned char B)")
del_items(0x8004E630)
SetType(0x8004E630, "void SetBorder__6Dialogi_addr_8004E630(struct Dialog *this, int Type)")
del_items(0x8004E638)
SetType(0x8004E638, "void ___6Dialog_addr_8004E638(struct Dialog *this, int __in_chrg)")
del_items(0x8004E660)
SetType(0x8004E660, "struct Dialog *__6Dialog_addr_8004E660(struct Dialog *this)")
del_items(0x8004E6E0)
SetType(0x8004E6E0, "int GetOverlayOtBase__7CBlocks_addr_8004E6E0()")
del_items(0x8004E6E8)
SetType(0x8004E6E8, "unsigned short GetDown__C4CPad_addr_8004E6E8(struct CPad *this)")
del_items(0x8004E710)
SetType(0x8004E710, "void nullmissile__Fiiiiiicii(int mi, int sx, int sy, int dx, int dy, int midir, int mienemy, int id, int dam)")
del_items(0x8004E718)
SetType(0x8004E718, "void FuncNULL__FP13MissileStructiii(struct MissileStruct *Ms, int ScrX, int ScrY, int OtPos)")
del_items(0x8004E720)
SetType(0x8004E720, "void delta_init__Fv()")
del_items(0x8004E778)
SetType(0x8004E778, "void delta_kill_monster__FiUcUcUc(int mi, unsigned char x, unsigned char y, unsigned char bLevel)")
del_items(0x8004E810)
SetType(0x8004E810, "void delta_monster_hp__FilUc(int mi, long hp, unsigned char bLevel)")
del_items(0x8004E88C)
SetType(0x8004E88C, "void delta_leave_sync__FUc(unsigned char bLevel)")
del_items(0x8004EBB4)
SetType(0x8004EBB4, "void delta_sync_object__FiUcUc(int oi, unsigned char bCmd, unsigned char bLevel)")
del_items(0x8004EC14)
SetType(0x8004EC14, "unsigned char delta_get_item__FPC9TCmdGItemUc(struct TCmdGItem *pI, unsigned char bLevel)")
del_items(0x8004EDE0)
SetType(0x8004EDE0, "void delta_put_item__FPC9TCmdPItemiiUc(struct TCmdPItem *pI, int x, int y, unsigned char bLevel)")
del_items(0x8004EF6C)
SetType(0x8004EF6C, "unsigned char delta_portal_inited__Fi(int i)")
del_items(0x8004EF90)
SetType(0x8004EF90, "unsigned char delta_quest_inited__Fi(int i)")
del_items(0x8004EFB4)
SetType(0x8004EFB4, "void DeltaAddItem__Fi(int ii)")
del_items(0x8004F1DC)
SetType(0x8004F1DC, "int DeltaExportData__FPc(char *Dst)")
del_items(0x8004F208)
SetType(0x8004F208, "int DeltaImportData__FPc(char *Src)")
del_items(0x8004F250)
SetType(0x8004F250, "void DeltaSaveLevel__Fv()")
del_items(0x8004F34C)
SetType(0x8004F34C, "void NetSendCmd__FUcUc(unsigned char bHiPri, unsigned char bCmd)")
del_items(0x8004F374)
SetType(0x8004F374, "void NetSendCmdGolem__FUcUcUcUclUc(unsigned char mx, unsigned char my, unsigned char dir, unsigned char menemy, long hp, int cl)")
del_items(0x8004F3C0)
SetType(0x8004F3C0, "void NetSendCmdLoc__FUcUcUcUc(unsigned char bHiPri, unsigned char bCmd, unsigned char x, unsigned char y)")
del_items(0x8004F3F0)
SetType(0x8004F3F0, "void NetSendCmdLocParam1__FUcUcUcUcUs(unsigned char bHiPri, unsigned char bCmd, unsigned char x, unsigned char y, int wParam1)")
del_items(0x8004F428)
SetType(0x8004F428, "void NetSendCmdLocParam2__FUcUcUcUcUsUs(unsigned char bHiPri, unsigned char bCmd, unsigned char x, unsigned char y, int wParam1, int wParam2)")
del_items(0x8004F468)
SetType(0x8004F468, "void NetSendCmdLocParam3__FUcUcUcUcUsUsUs(unsigned char bHiPri, unsigned char bCmd, unsigned char x, unsigned char y, int wParam1, int wParam2, int wParam3)")
del_items(0x8004F4B0)
SetType(0x8004F4B0, "void NetSendCmdParam1__FUcUcUs(unsigned char bHiPri, unsigned char bCmd, unsigned short wParam1)")
del_items(0x8004F4DC)
SetType(0x8004F4DC, "void NetSendCmdParam2__FUcUcUsUs(unsigned char bHiPri, unsigned char bCmd, unsigned short wParam1, unsigned short wParam2)")
del_items(0x8004F50C)
SetType(0x8004F50C, "void NetSendCmdParam3__FUcUcUsUsUs(unsigned char bHiPri, unsigned char bCmd, unsigned short wParam1, unsigned short wParam2, int wParam3)")
del_items(0x8004F544)
SetType(0x8004F544, "void NetSendCmdQuest__FUcUc(unsigned char bHiPri, unsigned char q)")
del_items(0x8004F5B8)
SetType(0x8004F5B8, "void NetSendCmdGItem__FUcUcUcUcUc(unsigned char bHiPri, unsigned char bCmd, unsigned char mast, unsigned char pnum, int ii)")
del_items(0x8004F700)
SetType(0x8004F700, "void NetSendCmdGItem2__FUcUcUcUcPC9TCmdGItem(unsigned char usonly, unsigned char bCmd, unsigned char mast, unsigned char pnum, struct TCmdGItem *p)")
del_items(0x8004F784)
SetType(0x8004F784, "unsigned char NetSendCmdReq2__FUcUcUcPC9TCmdGItem(unsigned char bCmd, unsigned char mast, unsigned char pnum, struct TCmdGItem *p)")
del_items(0x8004F7E4)
SetType(0x8004F7E4, "void NetSendCmdExtra__FPC9TCmdGItem(struct TCmdGItem *p)")
del_items(0x8004F854)
SetType(0x8004F854, "void NetSendCmdPItem__FUcUcUcUc(unsigned char bHiPri, unsigned char bCmd, unsigned char x, unsigned char y)")
del_items(0x8004F970)
SetType(0x8004F970, "void NetSendCmdChItem__FUcUc(unsigned char bHiPri, unsigned char bLoc)")
del_items(0x8004FA14)
SetType(0x8004FA14, "void NetSendCmdDelItem__FUcUc(unsigned char bHiPri, unsigned char bLoc)")
del_items(0x8004FA44)
SetType(0x8004FA44, "void NetSendCmdDItem__FUci(unsigned char bHiPri, int ii)")
del_items(0x8004FB6C)
SetType(0x8004FB6C, "unsigned char i_own_level__Fi(int nReqLevel)")
del_items(0x8004FB74)
SetType(0x8004FB74, "void NetSendCmdDamage__FUcUcUl(unsigned char bHiPri, unsigned char bPlr, unsigned long dwDam)")
del_items(0x8004FBA8)
SetType(0x8004FBA8, "void delta_close_portal__Fi(int pnum)")
del_items(0x8004FBE8)
SetType(0x8004FBE8, "void check_update_plr__Fi(int pnum)")
del_items(0x8004FBF0)
SetType(0x8004FBF0, "void On_WALKXY__FPC4TCmdi(struct TCmd *pCmd, int pnum)")
del_items(0x8004FC70)
SetType(0x8004FC70, "void On_ADDSTR__FPC4TCmdi(struct TCmd *pCmd, int pnum)")
del_items(0x8004FCA0)
SetType(0x8004FCA0, "void On_ADDMAG__FPC4TCmdi(struct TCmd *pCmd, int pnum)")
del_items(0x8004FCD0)
SetType(0x8004FCD0, "void On_ADDDEX__FPC4TCmdi(struct TCmd *pCmd, int pnum)")
del_items(0x8004FD00)
SetType(0x8004FD00, "void On_ADDVIT__FPC4TCmdi(struct TCmd *pCmd, int pnum)")
del_items(0x8004FD30)
SetType(0x8004FD30, "void On_SBSPELL__FPC4TCmdi(struct TCmd *pCmd, int pnum)")
del_items(0x8004FDA4)
SetType(0x8004FDA4, "void On_GOTOGETITEM__FPC4TCmdi(struct TCmd *pCmd, int pnum)")
del_items(0x8004FE2C)
SetType(0x8004FE2C, "void On_REQUESTGITEM__FPC4TCmdi(struct TCmd *pCmd, int pnum)")
del_items(0x8004FF6C)
SetType(0x8004FF6C, "void On_GETITEM__FPC4TCmdi(struct TCmd *pCmd, int pnum)")
del_items(0x80050140)
SetType(0x80050140, "void On_GOTOAGETITEM__FPC4TCmdi(struct TCmd *pCmd, int pnum)")
del_items(0x800501C8)
SetType(0x800501C8, "void On_REQUESTAGITEM__FPC4TCmdi(struct TCmd *pCmd, int pnum)")
del_items(0x800502FC)
SetType(0x800502FC, "void On_AGETITEM__FPC4TCmdi(struct TCmd *pCmd, int pnum)")
del_items(0x800504C8)
SetType(0x800504C8, "void On_ITEMEXTRA__FPC4TCmdi(struct TCmd *pCmd, int pnum)")
del_items(0x80050514)
SetType(0x80050514, "void On_PUTITEM__FPC4TCmdi(struct TCmd *pCmd, int pnum)")
del_items(0x800505D4)
SetType(0x800505D4, "void On_SYNCPUTITEM__FPC4TCmdi(struct TCmd *pCmd, int pnum)")
del_items(0x800506D8)
SetType(0x800506D8, "void On_RESPAWNITEM__FPC4TCmdi(struct TCmd *pCmd, int pnum)")
del_items(0x800507F4)
SetType(0x800507F4, "void On_SATTACKXY__FPC4TCmdi(struct TCmd *pCmd, int pnum)")
del_items(0x80050880)
SetType(0x80050880, "void On_SPELLXYD__FPC4TCmdi(struct TCmd *pCmd, int pnum)")
del_items(0x80050968)
SetType(0x80050968, "void On_SPELLXY__FPC4TCmdi(struct TCmd *pCmd, int pnum)")
del_items(0x80050A40)
SetType(0x80050A40, "void On_TSPELLXY__FPC4TCmdi(struct TCmd *pCmd, int pnum)")
del_items(0x80050B1C)
SetType(0x80050B1C, "void On_OPOBJXY__FPC4TCmdi(struct TCmd *pCmd, int pnum)")
del_items(0x80050BFC)
SetType(0x80050BFC, "void On_DISARMXY__FPC4TCmdi(struct TCmd *pCmd, int pnum)")
del_items(0x80050CDC)
SetType(0x80050CDC, "void On_OPOBJT__FPC4TCmdi(struct TCmd *pCmd, int pnum)")
del_items(0x80050D28)
SetType(0x80050D28, "void On_ATTACKID__FPC4TCmdi(struct TCmd *pCmd, int pnum)")
del_items(0x80050E64)
SetType(0x80050E64, "void On_SPELLID__FPC4TCmdi(struct TCmd *pCmd, int pnum)")
del_items(0x80050F2C)
SetType(0x80050F2C, "void On_SPELLPID__FPC4TCmdi(struct TCmd *pCmd, int pnum)")
del_items(0x80050FEC)
SetType(0x80050FEC, "void On_TSPELLID__FPC4TCmdi(struct TCmd *pCmd, int pnum)")
del_items(0x800510B0)
SetType(0x800510B0, "void On_TSPELLPID__FPC4TCmdi(struct TCmd *pCmd, int pnum)")
del_items(0x80051174)
SetType(0x80051174, "void On_KNOCKBACK__FPC4TCmdi(struct TCmd *pCmd, int pnum)")
del_items(0x80051230)
SetType(0x80051230, "void On_RESURRECT__FPC4TCmdi(struct TCmd *pCmd, int pnum)")
del_items(0x80051268)
SetType(0x80051268, "void On_HEALOTHER__FPC4TCmdi(struct TCmd *pCmd, int pnum)")
del_items(0x80051290)
SetType(0x80051290, "void On_TALKXY__FPC4TCmdi(struct TCmd *pCmd, int pnum)")
del_items(0x80051318)
SetType(0x80051318, "void On_NEWLVL__FPC4TCmdi(struct TCmd *pCmd, int pnum)")
del_items(0x80051348)
SetType(0x80051348, "void On_WARP__FPC4TCmdi(struct TCmd *pCmd, int pnum)")
del_items(0x8005145C)
SetType(0x8005145C, "void On_MONSTDEATH__FPC4TCmdi(struct TCmd *pCmd, int pnum)")
del_items(0x80051510)
SetType(0x80051510, "void On_KILLGOLEM__FPC4TCmdi(struct TCmd *pCmd, int pnum)")
del_items(0x8005157C)
SetType(0x8005157C, "void On_AWAKEGOLEM__FPC4TCmdi(struct TCmd *pCmd, int pnum)")
del_items(0x80051694)
SetType(0x80051694, "void On_MONSTDAMAGE__FPC4TCmdi(struct TCmd *pCmd, int pnum)")
del_items(0x80051784)
SetType(0x80051784, "void On_PLRDEAD__FPC4TCmdi(struct TCmd *pCmd, int pnum)")
del_items(0x800517CC)
SetType(0x800517CC, "void On_PLRDAMAGE__FPC4TCmdi(struct TCmd *pCmd, int pnum)")
del_items(0x800518E0)
SetType(0x800518E0, "void On_OPENDOOR__FPC4TCmdi(struct TCmd *pCmd, int pnum)")
del_items(0x8005195C)
SetType(0x8005195C, "void On_CLOSEDOOR__FPC4TCmdi(struct TCmd *pCmd, int pnum)")
del_items(0x800519D8)
SetType(0x800519D8, "void On_OPERATEOBJ__FPC4TCmdi(struct TCmd *pCmd, int pnum)")
del_items(0x80051A54)
SetType(0x80051A54, "void On_PLROPOBJ__FPC4TCmdi(struct TCmd *pCmd, int pnum)")
del_items(0x80051AD0)
SetType(0x80051AD0, "void On_BREAKOBJ__FPC4TCmdi(struct TCmd *pCmd, int pnum)")
del_items(0x80051B48)
SetType(0x80051B48, "void On_CHANGEPLRITEMS__FPC4TCmdi(struct TCmd *pCmd, int pnum)")
del_items(0x80051B50)
SetType(0x80051B50, "void On_DELPLRITEMS__FPC4TCmdi(struct TCmd *pCmd, int pnum)")
del_items(0x80051B58)
SetType(0x80051B58, "void On_PLRLEVEL__FPC4TCmdi(struct TCmd *pCmd, int pnum)")
del_items(0x80051B60)
SetType(0x80051B60, "void On_DROPITEM__FPC4TCmdi(struct TCmd *pCmd, int pnum)")
del_items(0x80051BB8)
SetType(0x80051BB8, "void On_PLAYER_JOINLEVEL__FPC4TCmdi(struct TCmd *pCmd, int pnum)")
del_items(0x80051DC0)
SetType(0x80051DC0, "void On_ACTIVATEPORTAL__FPC4TCmdi(struct TCmd *pCmd, int pnum)")
del_items(0x80051E04)
SetType(0x80051E04, "void On_DEACTIVATEPORTAL__FPC4TCmdi(struct TCmd *pCmd, int pnum)")
del_items(0x80051E64)
SetType(0x80051E64, "void On_RETOWN__FPC4TCmdi(struct TCmd *pCmd, int pnum)")
del_items(0x80051E9C)
SetType(0x80051E9C, "void On_SETSTR__FPC4TCmdi(struct TCmd *pCmd, int pnum)")
del_items(0x80051EDC)
SetType(0x80051EDC, "void On_SETDEX__FPC4TCmdi(struct TCmd *pCmd, int pnum)")
del_items(0x80051F1C)
SetType(0x80051F1C, "void On_SETMAG__FPC4TCmdi(struct TCmd *pCmd, int pnum)")
del_items(0x80051F5C)
SetType(0x80051F5C, "void On_SETVIT__FPC4TCmdi(struct TCmd *pCmd, int pnum)")
del_items(0x80051F9C)
SetType(0x80051F9C, "void On_SYNCQUEST__FPC4TCmdi(struct TCmd *pCmd, int pnum)")
del_items(0x80051FE4)
SetType(0x80051FE4, "void On_ENDSHIELD__FPC4TCmdi(struct TCmd *pCmd, int pnum)")
del_items(0x800520B4)
SetType(0x800520B4, "unsigned long ParseCmd__FiPC4TCmd(int pnum, struct TCmd *pCmd)")
del_items(0x800524D4)
SetType(0x800524D4, "struct DLevel *GetDLevel__Fib(int LevNum, bool SetLevel)")
del_items(0x8005251C)
SetType(0x8005251C, "void ReleaseDLevel__FP6DLevel(struct DLevel *Dl)")
del_items(0x80052548)
SetType(0x80052548, "void MSG_ClearOutCompMap__Fv()")
del_items(0x80052570)
SetType(0x80052570, "void _GLOBAL__D_deltaload()")
del_items(0x80052598)
SetType(0x80052598, "void _GLOBAL__I_deltaload()")
del_items(0x800525F8)
SetType(0x800525F8, "struct CrunchComp *__10CrunchComp(struct CrunchComp *this)")
del_items(0x80052630)
SetType(0x80052630, "struct PakComp *__7PakComp(struct PakComp *this)")
del_items(0x80052668)
SetType(0x80052668, "struct NoComp *__6NoComp(struct NoComp *this)")
del_items(0x800526A0)
SetType(0x800526A0, "int GetSize__14CompressedLevs(struct CompressedLevs *this)")
del_items(0x800526DC)
SetType(0x800526DC, "struct CompClass *__9CompClass(struct CompClass *this)")
del_items(0x800526F0)
SetType(0x800526F0, "void DoDecomp__C10CrunchCompPUcPCUcii(struct CrunchComp *this, unsigned char *Dest, unsigned char *Src, int DstLen, int SrcLen)")
del_items(0x80052718)
SetType(0x80052718, "int DoComp__C10CrunchCompPUcPCUci(struct CrunchComp *this, unsigned char *Dest, unsigned char *Src, int SrcLen)")
del_items(0x80052740)
SetType(0x80052740, "void DoDecomp__C7PakCompPUcPCUcii(struct PakComp *this, unsigned char *Dest, unsigned char *Src, int DstLen, int SrcLen)")
del_items(0x80052764)
SetType(0x80052764, "int DoComp__C7PakCompPUcPCUci(struct PakComp *this, unsigned char *Dest, unsigned char *Src, int SrcLen)")
del_items(0x8005278C)
SetType(0x8005278C, "void DoDecomp__C6NoCompPUcPCUcii(struct NoComp *this, unsigned char *Dest, unsigned char *Src, int DstLen, int SrcLen)")
del_items(0x800527B8)
SetType(0x800527B8, "int DoComp__C6NoCompPUcPCUci(struct NoComp *this, unsigned char *Dest, unsigned char *Src, int SrcLen)")
del_items(0x800527F0)
SetType(0x800527F0, "void NetSendLoPri__FPCUcUc(unsigned char *pbMsg, unsigned char bLen)")
del_items(0x8005281C)
SetType(0x8005281C, "int InitLevelType__Fi(int l)")
del_items(0x80052868)
SetType(0x80052868, "void SetupLocalCoords__Fv()")
del_items(0x800529C8)
SetType(0x800529C8, "void InitNewSeed__Fl(long newseed)")
del_items(0x80052A3C)
SetType(0x80052A3C, "unsigned char NetInit__FUcPUc(unsigned char bSinglePlayer, unsigned char *pfExitProgram)")
del_items(0x80052CCC)
SetType(0x80052CCC, "void PostAddL1Door__Fiiii(int i, int x, int y, int ot)")
del_items(0x80052DB4)
SetType(0x80052DB4, "void PostAddL2Door__Fiiii(int i, int x, int y, int ot)")
del_items(0x80052F00)
SetType(0x80052F00, "void PostAddArmorStand__Fi(int i)")
del_items(0x80052F88)
SetType(0x80052F88, "void PostAddObjLight__Fii(int i, int r)")
del_items(0x8005304C)
SetType(0x8005304C, "void PostAddWeaponRack__Fi(int i)")
del_items(0x800530D4)
SetType(0x800530D4, "void PostObjObjAddSwitch__Fiiii(int ot, int ox, int oy, int oi)")
del_items(0x80053170)
SetType(0x80053170, "void InitObjectGFX__Fv()")
del_items(0x8005338C)
SetType(0x8005338C, "void FreeObjectGFX__Fv()")
del_items(0x80053398)
SetType(0x80053398, "void DeleteObject__Fii(int oi, int i)")
del_items(0x8005343C)
SetType(0x8005343C, "void SetupObject__Fiiii(int i, int x, int y, int ot)")
del_items(0x800536C0)
SetType(0x800536C0, "void SetObjMapRange__Fiiiiii(int i, int x1, int y1, int x2, int y2, int v)")
del_items(0x80053720)
SetType(0x80053720, "void SetBookMsg__Fii(int i, int msg)")
del_items(0x80053748)
SetType(0x80053748, "void AddObject__Fiii(int ot, int ox, int oy)")
del_items(0x80053858)
SetType(0x80053858, "void PostAddObject__Fiii(int ot, int ox, int oy)")
del_items(0x80053CC0)
SetType(0x80053CC0, "void Obj_Light__Fii(int i, int lr)")
del_items(0x80053EE0)
SetType(0x80053EE0, "void Obj_Circle__Fi(int i)")
del_items(0x80054224)
SetType(0x80054224, "void Obj_StopAnim__Fi(int i)")
del_items(0x80054288)
SetType(0x80054288, "void DrawExpl__Fiiiiiccc(int sx, int sy, int f, int ot, int scale, int rtint, int gtint, int btint)")
del_items(0x80054580)
SetType(0x80054580, "void DrawObjExpl__FP12ObjectStructiii(struct ObjectStruct *obj, int ScrX, int ScrY, int ot)")
del_items(0x800545F0)
SetType(0x800545F0, "void Obj_Door__Fi(int i)")
del_items(0x80054760)
SetType(0x80054760, "void Obj_Sarc__Fi(int i)")
del_items(0x800547AC)
SetType(0x800547AC, "void ActivateTrapLine__Fii(int ttype, int tid)")
del_items(0x800548BC)
SetType(0x800548BC, "void Obj_FlameTrap__Fi(int i)")
del_items(0x80054BA0)
SetType(0x80054BA0, "void Obj_Trap__Fi(int i)")
del_items(0x80054EE4)
SetType(0x80054EE4, "void Obj_BCrossDamage__Fi(int i)")
del_items(0x8005512C)
SetType(0x8005512C, "void ProcessObjects__Fv()")
del_items(0x800553A4)
SetType(0x800553A4, "void ObjSetMicro__Fiii(int dx, int dy, int pn)")
del_items(0x80055514)
SetType(0x80055514, "void ObjSetMini__Fiii(int x, int y, int v)")
del_items(0x800555FC)
SetType(0x800555FC, "void ObjL1Special__Fiiii(int x1, int y1, int x2, int y2)")
del_items(0x80055604)
SetType(0x80055604, "void ObjL2Special__Fiiii(int x1, int y1, int x2, int y2)")
del_items(0x8005560C)
SetType(0x8005560C, "void DoorSet__Fiii(int oi, int dx, int dy)")
del_items(0x80055870)
SetType(0x80055870, "void RedoPlayerVision__Fv()")
del_items(0x80055914)
SetType(0x80055914, "void OperateL1RDoor__FiiUc(int pnum, int oi, unsigned char sendflag)")
del_items(0x80055C74)
SetType(0x80055C74, "void OperateL1LDoor__FiiUc(int pnum, int oi, unsigned char sendflag)")
del_items(0x8005600C)
SetType(0x8005600C, "void OperateL2RDoor__FiiUc(int pnum, int oi, unsigned char sendflag)")
del_items(0x80056378)
SetType(0x80056378, "void OperateL2LDoor__FiiUc(int pnum, int oi, unsigned char sendflag)")
del_items(0x800566E4)
SetType(0x800566E4, "void OperateL3RDoor__FiiUc(int pnum, int oi, unsigned char sendflag)")
del_items(0x800569C0)
SetType(0x800569C0, "void OperateL3LDoor__FiiUc(int pnum, int oi, unsigned char sendflag)")
del_items(0x80056C9C)
SetType(0x80056C9C, "void MonstCheckDoors__Fi(int m)")
del_items(0x80057170)
SetType(0x80057170, "void PostAddL1Objs__Fiiii(int x1, int y1, int x2, int y2)")
del_items(0x80057278)
SetType(0x80057278, "void PostAddL2Objs__Fiiii(int x1, int y1, int x2, int y2)")
del_items(0x80057374)
SetType(0x80057374, "void ObjChangeMap__Fiiii(int x1, int y1, int x2, int y2)")
del_items(0x8005752C)
SetType(0x8005752C, "void DRLG_MRectTrans__Fiiii(int x1, int y1, int x2, int y2)")
del_items(0x800575C8)
SetType(0x800575C8, "void ObjChangeMapResync__Fiiii(int x1, int y1, int x2, int y2)")
del_items(0x80057740)
SetType(0x80057740, "void OperateL1Door__FiiUc(int pnum, int i, unsigned char sendflag)")
del_items(0x8005789C)
SetType(0x8005789C, "void OperateLever__Fii(int pnum, int i)")
del_items(0x80057A80)
SetType(0x80057A80, "void OperateBook__Fii(int pnum, int i)")
del_items(0x80058138)
SetType(0x80058138, "void OperateBookLever__Fii(int pnum, int i)")
del_items(0x800585A8)
SetType(0x800585A8, "void OperateSChambBk__Fii(int pnum, int i)")
del_items(0x800587E4)
SetType(0x800587E4, "void OperateChest__FiiUc(int pnum, int i, unsigned char sendmsg)")
del_items(0x80058BA4)
SetType(0x80058BA4, "void OperateMushPatch__Fii(int pnum, int i)")
del_items(0x80058D98)
SetType(0x80058D98, "void OperateInnSignChest__Fii(int pnum, int i)")
del_items(0x80058F4C)
SetType(0x80058F4C, "void OperateSlainHero__FiiUc(int pnum, int i, unsigned char sendmsg)")
del_items(0x8005919C)
SetType(0x8005919C, "void OperateTrapLvr__Fi(int i)")
del_items(0x8005936C)
SetType(0x8005936C, "void OperateSarc__FiiUc(int pnum, int i, unsigned char sendmsg)")
del_items(0x80059524)
SetType(0x80059524, "void OperateL2Door__FiiUc(int pnum, int i, unsigned char sendflag)")
del_items(0x80059680)
SetType(0x80059680, "void OperateL3Door__FiiUc(int pnum, int i, unsigned char sendflag)")
del_items(0x800597DC)
SetType(0x800597DC, "void LoadMapObjs__FPUcii(unsigned char *pMap, int startx, int starty)")
del_items(0x800598E4)
SetType(0x800598E4, "void OperatePedistal__Fii(int pnum, int i)")
del_items(0x80059DFC)
SetType(0x80059DFC, "void TryDisarm__Fii(int pnum, int i)")
del_items(0x80059FB0)
SetType(0x80059FB0, "int ItemMiscIdIdx__Fi(int imiscid)")
del_items(0x8005A020)
SetType(0x8005A020, "void OperateShrine__Fiii(int pnum, int i, int sType)")
del_items(0x8005C414)
SetType(0x8005C414, "void OperateSkelBook__FiiUc(int pnum, int i, unsigned char sendmsg)")
del_items(0x8005C590)
SetType(0x8005C590, "void OperateBookCase__FiiUc(int pnum, int i, unsigned char sendmsg)")
del_items(0x8005C7A8)
SetType(0x8005C7A8, "void OperateDecap__FiiUc(int pnum, int i, unsigned char sendmsg)")
del_items(0x8005C890)
SetType(0x8005C890, "void OperateArmorStand__FiiUc(int pnum, int i, unsigned char sendmsg)")
del_items(0x8005CA00)
SetType(0x8005CA00, "int FindValidShrine__Fi(int i)")
del_items(0x8005CAF0)
SetType(0x8005CAF0, "void OperateGoatShrine__Fiii(int pnum, int i, int sType)")
del_items(0x8005CB98)
SetType(0x8005CB98, "void OperateCauldron__Fiii(int pnum, int i, int sType)")
del_items(0x8005CC3C)
SetType(0x8005CC3C, "unsigned char OperateFountains__Fii(int pnum, int i)")
del_items(0x8005D1E8)
SetType(0x8005D1E8, "void OperateWeaponRack__FiiUc(int pnum, int i, unsigned char sendmsg)")
del_items(0x8005D394)
SetType(0x8005D394, "void OperateStoryBook__Fii(int pnum, int i)")
del_items(0x8005D488)
SetType(0x8005D488, "void OperateLazStand__Fii(int pnum, int i)")
del_items(0x8005D5C0)
SetType(0x8005D5C0, "void OperateObject__FiiUc(int pnum, int i, unsigned char TeleFlag)")
del_items(0x8005D9F8)
SetType(0x8005D9F8, "void SyncOpL1Door__Fiii(int pnum, int cmd, int i)")
del_items(0x8005DB0C)
SetType(0x8005DB0C, "void SyncOpL2Door__Fiii(int pnum, int cmd, int i)")
del_items(0x8005DC20)
SetType(0x8005DC20, "void SyncOpL3Door__Fiii(int pnum, int cmd, int i)")
del_items(0x8005DD34)
SetType(0x8005DD34, "void SyncOpObject__Fiii(int pnum, int cmd, int i)")
del_items(0x8005DF44)
SetType(0x8005DF44, "void BreakCrux__Fii(int pnum, int i)")
del_items(0x8005E178)
SetType(0x8005E178, "void BreakBarrel__FiiiUcUc(int pnum, int i, int dam, unsigned char forcebreak, int sendmsg)")
del_items(0x8005E6D0)
SetType(0x8005E6D0, "void BreakObject__Fii(int pnum, int oi)")
del_items(0x8005E834)
SetType(0x8005E834, "void SyncBreakObj__Fii(int pnum, int oi)")
del_items(0x8005E8B0)
SetType(0x8005E8B0, "void SyncL1Doors__Fi(int i)")
del_items(0x8005E9C8)
SetType(0x8005E9C8, "void SyncCrux__Fi(int i)")
del_items(0x8005EB00)
SetType(0x8005EB00, "void SyncLever__Fi(int i)")
del_items(0x8005EB84)
SetType(0x8005EB84, "void SyncQSTLever__Fi(int i)")
del_items(0x8005EC7C)
SetType(0x8005EC7C, "void SyncPedistal__Fi(int i)")
del_items(0x8005EC84)
SetType(0x8005EC84, "void SyncL2Doors__Fi(int i)")
del_items(0x8005EDEC)
SetType(0x8005EDEC, "void SyncL3Doors__Fi(int i)")
del_items(0x8005EF18)
SetType(0x8005EF18, "void SyncObjectAnim__Fi(int o)")
del_items(0x8005F058)
SetType(0x8005F058, "void GetObjectStr__Fi(int i)")
del_items(0x8005F474)
SetType(0x8005F474, "void AddLamp__Fiii(int x, int y, int r)")
del_items(0x8005F4B4)
SetType(0x8005F4B4, "void RestoreObjectLight__Fv()")
del_items(0x8005F680)
SetType(0x8005F680, "int GetOtPos__7CBlocksi_addr_8005F680(struct CBlocks *this, int LogicalY)")
del_items(0x8005F6BC)
SetType(0x8005F6BC, "int GetNumOfFrames__7TextDatii_addr_8005F6BC(struct TextDat *this, int Creature, int Action)")
del_items(0x8005F6F4)
SetType(0x8005F6F4, "struct CCreatureHdr *GetCreature__7TextDati_addr_8005F6F4(struct TextDat *this, int Creature)")
del_items(0x8005F710)
SetType(0x8005F710, "unsigned char game_2_ui_class__FPC12PlayerStruct(struct PlayerStruct *p)")
del_items(0x8005F73C)
SetType(0x8005F73C, "void game_2_ui_player__FPC12PlayerStructP11_uiheroinfoUc(struct PlayerStruct *p, struct _uiheroinfo *heroinfo, unsigned char bHasSaveFile)")
del_items(0x8005F7F0)
SetType(0x8005F7F0, "void SetupLocalPlayer__Fv()")
del_items(0x8005F800)
SetType(0x8005F800, "unsigned char IsDplayer__Fii(int x, int y)")
del_items(0x8005F88C)
SetType(0x8005F88C, "bool ismyplr__FP12PlayerStruct(struct PlayerStruct *ptrplr)")
del_items(0x8005F8D0)
SetType(0x8005F8D0, "int plrind__FP12PlayerStruct(struct PlayerStruct *ptrplr)")
del_items(0x8005F8E4)
SetType(0x8005F8E4, "void InitPlayerGFX__FP12PlayerStruct(struct PlayerStruct *ptrplr)")
del_items(0x8005F904)
SetType(0x8005F904, "void FreePlayerGFX__FP12PlayerStruct(struct PlayerStruct *ptrplr)")
del_items(0x8005F90C)
SetType(0x8005F90C, "void NewPlrAnim__FP12PlayerStructiii(struct PlayerStruct *ptrplr, int Peq, int numFrames, int Delay)")
del_items(0x8005F928)
SetType(0x8005F928, "void ClearPlrPVars__FP12PlayerStruct(struct PlayerStruct *ptrplr)")
del_items(0x8005F944)
SetType(0x8005F944, "void SetPlrAnims__FP12PlayerStruct(struct PlayerStruct *ptrplr)")
del_items(0x8005FB80)
SetType(0x8005FB80, "void CreatePlayer__FP12PlayerStructc(struct PlayerStruct *ptrplr, char c)")
del_items(0x8005FF88)
SetType(0x8005FF88, "int CalcStatDiff__FP12PlayerStruct(struct PlayerStruct *ptrplr)")
del_items(0x8005FFF0)
SetType(0x8005FFF0, "void NextPlrLevel__FP12PlayerStruct(struct PlayerStruct *ptrplr)")
del_items(0x8006016C)
SetType(0x8006016C, "void AddPlrExperience__FP12PlayerStructil(struct PlayerStruct *ptrplr, int lvl, long exp)")
del_items(0x80060390)
SetType(0x80060390, "void AddPlrMonstExper__Filc(int lvl, long exp, char pmask)")
del_items(0x80060414)
SetType(0x80060414, "void InitPlayer__FP12PlayerStructUc(struct PlayerStruct *ptrplr, unsigned char FirstTime)")
del_items(0x8006073C)
SetType(0x8006073C, "void InitMultiView__Fv()")
del_items(0x80060744)
SetType(0x80060744, "unsigned char SolidLoc__Fii(int x, int y)")
del_items(0x80060764)
SetType(0x80060764, "void PlrClrTrans__Fii(int x, int y)")
del_items(0x800607DC)
SetType(0x800607DC, "void PlrDoTrans__Fii(int x, int y)")
del_items(0x800608F4)
SetType(0x800608F4, "void SetPlayerOld__FP12PlayerStruct(struct PlayerStruct *ptrplr)")
del_items(0x80060908)
SetType(0x80060908, "void StartStand__FP12PlayerStructi(struct PlayerStruct *ptrplr, int dir)")
del_items(0x80060994)
SetType(0x80060994, "void StartWalkStand__FP12PlayerStruct(struct PlayerStruct *ptrplr)")
del_items(0x800609F8)
SetType(0x800609F8, "void PM_ChangeLightOff__FP12PlayerStruct(struct PlayerStruct *ptrplr)")
del_items(0x80060A30)
SetType(0x80060A30, "void PM_ChangeOffset__FP12PlayerStruct(struct PlayerStruct *ptrplr)")
del_items(0x80060A5C)
SetType(0x80060A5C, "void StartAttack__FP12PlayerStructi(struct PlayerStruct *ptrplr, int d)")
del_items(0x80060BA0)
SetType(0x80060BA0, "void StartPlrBlock__FP12PlayerStructi(struct PlayerStruct *ptrplr, int dir)")
del_items(0x80060C38)
SetType(0x80060C38, "void StartSpell__FP12PlayerStructiii(struct PlayerStruct *ptrplr, int d, int cx, int cy)")
del_items(0x80060DEC)
SetType(0x80060DEC, "void RemovePlrFromMap__FP12PlayerStruct(struct PlayerStruct *ptrplr)")
del_items(0x80060DF4)
SetType(0x80060DF4, "void StartPlrHit__FP12PlayerStructiUc(struct PlayerStruct *ptrplr, int dam, unsigned char forcehit)")
del_items(0x80060F28)
SetType(0x80060F28, "void RespawnDeadItem__FP10ItemStructii(struct ItemStruct *itm, int x, int y)")
del_items(0x800610BC)
SetType(0x800610BC, "void PlrDeadItem__FP12PlayerStructP10ItemStructii(struct PlayerStruct *ptrplr, struct ItemStruct *itm, int xx, int yy)")
del_items(0x8006128C)
SetType(0x8006128C, "void StartPlayerDropItems__FP12PlayerStructi(struct PlayerStruct *ptrplr, int EarFlag)")
del_items(0x800612EC)
SetType(0x800612EC, "void TryDropPlayerItems__FP12PlayerStruct(struct PlayerStruct *ptrplr)")
del_items(0x80061428)
SetType(0x80061428, "void StartPlayerKill__FP12PlayerStructi(struct PlayerStruct *ptrplr, int earflag)")
del_items(0x80061624)
SetType(0x80061624, "void DropHalfPlayersGold__FP12PlayerStruct(struct PlayerStruct *ptrplr)")
del_items(0x80061734)
SetType(0x80061734, "void StartPlrKill__FP12PlayerStructi(struct PlayerStruct *ptrplr, int earflag)")
del_items(0x80061880)
SetType(0x80061880, "void SyncPlrKill__FP12PlayerStructi(struct PlayerStruct *ptrplr, int earflag)")
del_items(0x800618A0)
SetType(0x800618A0, "void RemovePlrMissiles__FP12PlayerStruct(struct PlayerStruct *ptrplr)")
del_items(0x80061B9C)
SetType(0x80061B9C, "void InitLevelChange__FP12PlayerStruct(struct PlayerStruct *ptrplr)")
del_items(0x80061C4C)
SetType(0x80061C4C, "void CheckPlrDead__Fi(int pnum)")
del_items(0x80061CA0)
SetType(0x80061CA0, "void StartNewLvl__FP12PlayerStructii(struct PlayerStruct *ptrplr, int fom, int lvl)")
del_items(0x80061E54)
SetType(0x80061E54, "void RestartTownLvl__FP12PlayerStruct(struct PlayerStruct *ptrplr)")
del_items(0x80061EFC)
SetType(0x80061EFC, "void StartWarpLvl__FP12PlayerStructi(struct PlayerStruct *ptrplr, int pidx)")
del_items(0x80062014)
SetType(0x80062014, "int PM_DoStand__FP12PlayerStruct(struct PlayerStruct *ptrplr)")
del_items(0x8006201C)
SetType(0x8006201C, "unsigned char ChkPlrOffsets__Fiiii(int wx1, int wy1, int wx2, int wy2)")
del_items(0x800620CC)
SetType(0x800620CC, "int PM_DoWalk__FP12PlayerStruct(struct PlayerStruct *ptrplr)")
del_items(0x800622DC)
SetType(0x800622DC, "unsigned char WeaponDur__FP12PlayerStructi(struct PlayerStruct *ptrplr, int durrnd)")
del_items(0x800624A0)
SetType(0x800624A0, "unsigned char PlrHitMonst__FP12PlayerStructi(struct PlayerStruct *ptrplr, int m)")
del_items(0x80062B04)
SetType(0x80062B04, "unsigned char PlrHitPlr__FP12PlayerStructc(struct PlayerStruct *ptrplr, char p)")
del_items(0x80062EBC)
SetType(0x80062EBC, "unsigned char PlrHitObj__FP12PlayerStructii(struct PlayerStruct *ptrplr, int mx, int my)")
del_items(0x80062F3C)
SetType(0x80062F3C, "int PM_DoAttack__FP12PlayerStruct(struct PlayerStruct *ptrplr)")
del_items(0x800632D0)
SetType(0x800632D0, "int PM_DoRangeAttack__FP12PlayerStruct(struct PlayerStruct *ptrplr)")
del_items(0x800633D0)
SetType(0x800633D0, "void ShieldDur__FP12PlayerStruct(struct PlayerStruct *ptrplr)")
del_items(0x800634A4)
SetType(0x800634A4, "int PM_DoBlock__FP12PlayerStruct(struct PlayerStruct *ptrplr)")
del_items(0x80063544)
SetType(0x80063544, "void do_spell_anim__FiiiP12PlayerStruct(int aframe, int spell, int clss, struct PlayerStruct *ptrplr)")
del_items(0x80063A24)
SetType(0x80063A24, "int PM_DoSpell__FP12PlayerStruct(struct PlayerStruct *ptrplr)")
del_items(0x80063DF0)
SetType(0x80063DF0, "void ArmorDur__FP12PlayerStruct(struct PlayerStruct *ptrplr)")
del_items(0x80063EFC)
SetType(0x80063EFC, "int PM_DoGotHit__FP12PlayerStruct(struct PlayerStruct *ptrplr)")
del_items(0x80063F90)
SetType(0x80063F90, "int PM_DoDeath__FP12PlayerStruct(struct PlayerStruct *ptrplr)")
del_items(0x80064178)
SetType(0x80064178, "int PM_DoNewLvl__FP12PlayerStruct(struct PlayerStruct *ptrplr)")
del_items(0x80064180)
SetType(0x80064180, "void CheckNewPath__FP12PlayerStruct(struct PlayerStruct *ptrplr)")
del_items(0x80064640)
SetType(0x80064640, "unsigned char PlrDeathModeOK__Fi(int p)")
del_items(0x800646A8)
SetType(0x800646A8, "void ValidatePlayer__Fv()")
del_items(0x80064BA4)
SetType(0x80064BA4, "void CheckCheatStats__FP12PlayerStruct(struct PlayerStruct *ptrplr)")
del_items(0x80064C40)
SetType(0x80064C40, "void ProcessPlayers__Fv()")
del_items(0x80064F24)
SetType(0x80064F24, "void ClrPlrPath__FP12PlayerStruct(struct PlayerStruct *ptrplr)")
del_items(0x80064F4C)
SetType(0x80064F4C, "unsigned char PosOkPlayer__FP12PlayerStructii(struct PlayerStruct *ptrplr, int px, int py)")
del_items(0x80065124)
SetType(0x80065124, "void MakePlrPath__FP12PlayerStructiiUc(struct PlayerStruct *ptrplr, int xx, int yy, unsigned char endspace)")
del_items(0x8006512C)
SetType(0x8006512C, "void CheckPlrSpell__Fv()")
del_items(0x8006558C)
SetType(0x8006558C, "void SyncInitPlrPos__FP12PlayerStruct(struct PlayerStruct *ptrplr)")
del_items(0x80065674)
SetType(0x80065674, "void SyncInitPlr__FP12PlayerStruct(struct PlayerStruct *ptrplr)")
del_items(0x800656A4)
SetType(0x800656A4, "void CheckStats__Fi(int p)")
del_items(0x80065878)
SetType(0x80065878, "void ModifyPlrStr__Fii(int p, int l)")
del_items(0x80065994)
SetType(0x80065994, "void ModifyPlrMag__Fii(int p, int l)")
del_items(0x80065A80)
SetType(0x80065A80, "void ModifyPlrDex__Fii(int p, int l)")
del_items(0x80065B64)
SetType(0x80065B64, "void ModifyPlrVit__Fii(int p, int l)")
del_items(0x80065C40)
SetType(0x80065C40, "void SetPlayerHitPoints__FP12PlayerStructi(struct PlayerStruct *ptrplr, int newhp)")
del_items(0x80065C84)
SetType(0x80065C84, "void SetPlrStr__Fii(int p, int v)")
del_items(0x80065D60)
SetType(0x80065D60, "void SetPlrMag__Fii(int p, int v)")
del_items(0x80065DD0)
SetType(0x80065DD0, "void SetPlrDex__Fii(int p, int v)")
del_items(0x80065EAC)
SetType(0x80065EAC, "void SetPlrVit__Fii(int p, int v)")
del_items(0x80065F18)
SetType(0x80065F18, "void InitDungMsgs__FP12PlayerStruct(struct PlayerStruct *ptrplr)")
del_items(0x80065F20)
SetType(0x80065F20, "void PlayDungMsgs__Fv()")
del_items(0x80066250)
SetType(0x80066250, "void CreatePlrItems__FP12PlayerStruct(struct PlayerStruct *ptrplr)")
del_items(0x80066278)
SetType(0x80066278, "void WorldToOffset__FP12PlayerStructii(struct PlayerStruct *ptrplr, int x, int y)")
del_items(0x800662BC)
SetType(0x800662BC, "void SetSpdbarGoldCurs__FP12PlayerStructi(struct PlayerStruct *ptrplr, int i)")
del_items(0x800662F0)
SetType(0x800662F0, "int GetSpellLevel__FP12PlayerStructi(struct PlayerStruct *ptrplr, int val)")
del_items(0x80066324)
SetType(0x80066324, "void BreakObject__FP12PlayerStructi(struct PlayerStruct *ptrplr, int val)")
del_items(0x80066358)
SetType(0x80066358, "void CalcPlrInv__FP12PlayerStructUc(struct PlayerStruct *ptrplr, unsigned char bl)")
del_items(0x8006638C)
SetType(0x8006638C, "void RemoveSpdBarItem__FP12PlayerStructi(struct PlayerStruct *ptrplr, int val)")
del_items(0x800663C0)
SetType(0x800663C0, "void M_StartKill__FiP12PlayerStruct(int m, struct PlayerStruct *ptrplr)")
del_items(0x800663F8)
SetType(0x800663F8, "void SetGoldCurs__FP12PlayerStructi(struct PlayerStruct *ptrplr, int i)")
del_items(0x8006642C)
SetType(0x8006642C, "void HealStart__FP12PlayerStruct(struct PlayerStruct *ptrplr)")
del_items(0x80066454)
SetType(0x80066454, "void HealotherStart__FP12PlayerStruct(struct PlayerStruct *ptrplr)")
del_items(0x8006647C)
SetType(0x8006647C, "int CalculateGold__FP12PlayerStruct(struct PlayerStruct *ptrplr)")
del_items(0x800664A4)
SetType(0x800664A4, "void M_StartHit__FiP12PlayerStructi(int m, struct PlayerStruct *ptrplr, int dam)")
del_items(0x800664EC)
SetType(0x800664EC, "void TeleStart__FP12PlayerStruct(struct PlayerStruct *ptrplr)")
del_items(0x80066514)
SetType(0x80066514, "void PhaseStart__FP12PlayerStruct(struct PlayerStruct *ptrplr)")
del_items(0x8006653C)
SetType(0x8006653C, "void RemoveInvItem__FP12PlayerStructi(struct PlayerStruct *ptrplr, int i)")
del_items(0x80066570)
SetType(0x80066570, "void PhaseEnd__FP12PlayerStruct(struct PlayerStruct *ptrplr)")
del_items(0x80066598)
SetType(0x80066598, "void OperateObject__FP12PlayerStructiUc(struct PlayerStruct *ptrplr, int oi, unsigned char bl)")
del_items(0x800665DC)
SetType(0x800665DC, "void TryDisarm__FP12PlayerStructi(struct PlayerStruct *ptrplr, int oi)")
del_items(0x80066610)
SetType(0x80066610, "void TalkToTowner__FP12PlayerStructi(struct PlayerStruct *ptrplr, int val)")
del_items(0x80066644)
SetType(0x80066644, "unsigned char PosOkPlayer__Fiii(int pnum, int x, int y)")
del_items(0x80066690)
SetType(0x80066690, "int CalcStatDiff__Fi(int pnum)")
del_items(0x800666DC)
SetType(0x800666DC, "void StartNewLvl__Fiii(int pnum, int fom, int lvl)")
del_items(0x80066728)
SetType(0x80066728, "void CreatePlayer__Fic(int pnum, char c)")
del_items(0x8006677C)
SetType(0x8006677C, "void StartStand__Fii(int pnum, int dir)")
del_items(0x800667C8)
SetType(0x800667C8, "void SetPlayerHitPoints__Fii(int pnum, int val)")
del_items(0x80066814)
SetType(0x80066814, "void MakePlrPath__FiiiUc(int pnum, int xx, int yy, unsigned char endspace)")
del_items(0x80066864)
SetType(0x80066864, "void StartWarpLvl__Fii(int pnum, int pidx)")
del_items(0x800668B0)
SetType(0x800668B0, "void SyncPlrKill__Fii(int pnum, int earflag)")
del_items(0x800668FC)
SetType(0x800668FC, "void StartPlrKill__Fii(int pnum, int val)")
del_items(0x80066948)
SetType(0x80066948, "void NewPlrAnim__Fiiii(int pnum, int Peq, int numFrames, int Delay)")
del_items(0x80066994)
SetType(0x80066994, "void AddPlrExperience__Fiil(int pnum, int lvl, long exp)")
del_items(0x800669E0)
SetType(0x800669E0, "void StartPlrBlock__Fii(int pnum, int dir)")
del_items(0x80066A2C)
SetType(0x80066A2C, "void StartPlrHit__FiiUc(int pnum, int dam, unsigned char forcehit)")
del_items(0x80066A7C)
SetType(0x80066A7C, "void StartSpell__Fiiii(int pnum, int d, int cx, int cy)")
del_items(0x80066AC8)
SetType(0x80066AC8, "void InitPlayer__FiUc(int pnum, unsigned char FirstTime)")
del_items(0x80066B18)
SetType(0x80066B18, "void PM_ChangeLightOff__Fi(int pnum)")
del_items(0x80066B64)
SetType(0x80066B64, "void CheckNewPath__Fi(int pnum)")
del_items(0x80066BB0)
SetType(0x80066BB0, "void FreePlayerGFX__Fi(int pnum)")
del_items(0x80066BFC)
SetType(0x80066BFC, "void InitDungMsgs__Fi(int pnum)")
del_items(0x80066C48)
SetType(0x80066C48, "void InitPlayerGFX__Fi(int pnum)")
del_items(0x80066C94)
SetType(0x80066C94, "void SyncInitPlrPos__Fi(int pnum)")
del_items(0x80066CE0)
SetType(0x80066CE0, "void SetPlrAnims__Fi(int pnum)")
del_items(0x80066D2C)
SetType(0x80066D2C, "void ClrPlrPath__Fi(int pnum)")
del_items(0x80066D78)
SetType(0x80066D78, "void SyncInitPlr__Fi(int pnum)")
del_items(0x80066DC4)
SetType(0x80066DC4, "void RestartTownLvl__Fi(int pnum)")
del_items(0x80066E10)
SetType(0x80066E10, "void SetPlayerOld__Fi(int pnum)")
del_items(0x80066E5C)
SetType(0x80066E5C, "void GetGoldSeed__FP12PlayerStructP10ItemStruct(struct PlayerStruct *ptrplr, struct ItemStruct *h)")
del_items(0x80066E90)
SetType(0x80066E90, "void PRIM_GetPrim__FPP8POLY_FT4_addr_80066E90(struct POLY_FT4 **Prim)")
del_items(0x80066F0C)
SetType(0x80066F0C, "bool Active__11SpellTarget_addr_80066F0C(struct SpellTarget *this)")
del_items(0x80066F18)
SetType(0x80066F18, "struct CPlayer *GetPlayer__7CPlayeri_addr_80066F18(int PNum)")
del_items(0x80066F68)
SetType(0x80066F68, "int GetLastOtPos__C7CPlayer_addr_80066F68(struct CPlayer *this)")
del_items(0x80066F74)
SetType(0x80066F74, "int GetLastScrY__C7CPlayer(struct CPlayer *this)")
del_items(0x80066F80)
SetType(0x80066F80, "int GetLastScrX__C7CPlayer(struct CPlayer *this)")
del_items(0x80066F8C)
SetType(0x80066F8C, "void CheckRPortalOK__FPiT0(int *rx, int *ry)")
del_items(0x80066FCC)
SetType(0x80066FCC, "void CheckQuests__Fv()")
del_items(0x800674A4)
SetType(0x800674A4, "unsigned char ForceQuests__Fv()")
del_items(0x80067648)
SetType(0x80067648, "unsigned char QuestStatus__Fi(int i)")
del_items(0x800676DC)
SetType(0x800676DC, "void CheckQuestKill__FiUc(int m, unsigned char sendmsg)")
del_items(0x80067CA4)
SetType(0x80067CA4, "void SetReturnLvlPos__Fv()")
del_items(0x80067DB4)
SetType(0x80067DB4, "void GetReturnLvlPos__Fv()")
del_items(0x80067E08)
SetType(0x80067E08, "void ResyncQuests__Fv()")
del_items(0x800682F4)
SetType(0x800682F4, "void PrintQLString__FiiUcPcc(int x, int y, unsigned char cjustflag, char *str, int col)")
del_items(0x80068548)
SetType(0x80068548, "void DrawQuestLog__Fv()")
del_items(0x80068740)
SetType(0x80068740, "void DrawQuestLogTSK__FP4TASK(struct TASK *T)")
del_items(0x80068818)
SetType(0x80068818, "void StartQuestlog__Fv()")
del_items(0x8006894C)
SetType(0x8006894C, "void QuestlogUp__Fv()")
del_items(0x800689E4)
SetType(0x800689E4, "void QuestlogDown__Fv()")
del_items(0x80068A98)
SetType(0x80068A98, "void RemoveQLog__Fv()")
del_items(0x80068B50)
SetType(0x80068B50, "void QuestlogEnter__Fv()")
del_items(0x80068C1C)
SetType(0x80068C1C, "void QuestlogESC__Fv()")
del_items(0x80068C44)
SetType(0x80068C44, "void SetMultiQuest__FiiUci(int q, int s, unsigned char l, int v1)")
del_items(0x80068CC4)
SetType(0x80068CC4, "void _GLOBAL__D_questlog()")
del_items(0x80068CEC)
SetType(0x80068CEC, "void _GLOBAL__I_questlog()")
del_items(0x80068D14)
SetType(0x80068D14, "void SetRGB__6DialogUcUcUc_addr_80068D14(struct Dialog *this, unsigned char R, unsigned char G, unsigned char B)")
del_items(0x80068D34)
SetType(0x80068D34, "void SetBack__6Dialogi_addr_80068D34(struct Dialog *this, int Type)")
del_items(0x80068D3C)
SetType(0x80068D3C, "void SetBorder__6Dialogi_addr_80068D3C(struct Dialog *this, int Type)")
del_items(0x80068D44)
SetType(0x80068D44, "void ___6Dialog_addr_80068D44(struct Dialog *this, int __in_chrg)")
del_items(0x80068D6C)
SetType(0x80068D6C, "struct Dialog *__6Dialog_addr_80068D6C(struct Dialog *this)")
del_items(0x80068DEC)
SetType(0x80068DEC, "int GetOverlayOtBase__7CBlocks_addr_80068DEC()")
del_items(0x80068DF4)
SetType(0x80068DF4, "void DrawView__Fii(int StartX, int StartY)")
del_items(0x80068FA8)
SetType(0x80068FA8, "void DrawAndBlit__Fv()")
del_items(0x8006907C)
SetType(0x8006907C, "void FreeStoreMem__Fv()")
del_items(0x80069084)
SetType(0x80069084, "void DrawSTextBack__Fv()")
del_items(0x800690F4)
SetType(0x800690F4, "void DrawStoreArrows__Fv()")
del_items(0x80069274)
SetType(0x80069274, "void PrintSString__FiiUcPcci(int x, int y, unsigned char cjustflag, char *str, int col, int val)")
del_items(0x8006971C)
SetType(0x8006971C, "void DrawSLine__Fi(int y)")
del_items(0x800697B0)
SetType(0x800697B0, "void ClearSText__Fii(int s, int e)")
del_items(0x80069848)
SetType(0x80069848, "void AddSLine__Fi(int y)")
del_items(0x80069898)
SetType(0x80069898, "void AddSTextVal__Fii(int y, int val)")
del_items(0x800698C0)
SetType(0x800698C0, "void OffsetSTextY__Fii(int y, int yo)")
del_items(0x800698E8)
SetType(0x800698E8, "void AddSText__FiiUcPccUc(int x, int y, unsigned char j, char *str, int clr, int sel)")
del_items(0x800699A4)
SetType(0x800699A4, "void PrintStoreItem__FPC10ItemStructic(struct ItemStruct *x, int l, char iclr)")
del_items(0x80069EA8)
SetType(0x80069EA8, "void StoreAutoPlace__Fv()")
del_items(0x8006A4E8)
SetType(0x8006A4E8, "void S_StartSmith__Fv()")
del_items(0x8006A670)
SetType(0x8006A670, "void S_ScrollSBuy__Fi(int idx)")
del_items(0x8006A878)
SetType(0x8006A878, "void S_StartSBuy__Fv()")
del_items(0x8006AA48)
SetType(0x8006AA48, "void S_ScrollSPBuy__Fi(int idx)")
del_items(0x8006ACA8)
SetType(0x8006ACA8, "unsigned char S_StartSPBuy__Fv()")
del_items(0x8006AE68)
SetType(0x8006AE68, "unsigned char SmithSellOk__Fi(int i)")
del_items(0x8006AF50)
SetType(0x8006AF50, "void S_ScrollSSell__Fi(int idx)")
del_items(0x8006B1A4)
SetType(0x8006B1A4, "void S_StartSSell__Fv()")
del_items(0x8006B5DC)
SetType(0x8006B5DC, "unsigned char SmithRepairOk__Fi(int i)")
del_items(0x8006B684)
SetType(0x8006B684, "void AddStoreHoldRepair__FP10ItemStructi(struct ItemStruct *itm, int i)")
del_items(0x8006B86C)
SetType(0x8006B86C, "void S_StartSRepair__Fv()")
del_items(0x8006BD3C)
SetType(0x8006BD3C, "void S_StartWitch__Fv()")
del_items(0x8006BEC4)
SetType(0x8006BEC4, "int CheckWitchItem__Fi(int idx)")
del_items(0x8006BF68)
SetType(0x8006BF68, "void S_ScrollWBuy__Fi(int idx)")
del_items(0x8006C1AC)
SetType(0x8006C1AC, "void S_StartWBuy__Fv()")
del_items(0x8006C500)
SetType(0x8006C500, "unsigned char WitchSellOk__Fi(int i)")
del_items(0x8006C64C)
SetType(0x8006C64C, "void S_StartWSell__Fv()")
del_items(0x8006CCC4)
SetType(0x8006CCC4, "unsigned char WitchRechargeOk__Fi(int i)")
del_items(0x8006CD50)
SetType(0x8006CD50, "void AddStoreHoldRecharge__FG10ItemStructi(struct ItemStruct itm, int i)")
del_items(0x8006CED8)
SetType(0x8006CED8, "void S_StartWRecharge__Fv()")
del_items(0x8006D308)
SetType(0x8006D308, "void S_StartNoMoney__Fv()")
del_items(0x8006D370)
SetType(0x8006D370, "void S_StartNoRoom__Fv()")
del_items(0x8006D3D0)
SetType(0x8006D3D0, "void S_StartNoItems__Fv()")
del_items(0x8006D484)
SetType(0x8006D484, "void S_StartConfirm__Fv()")
del_items(0x8006D7EC)
SetType(0x8006D7EC, "void S_StartBoy__Fv()")
del_items(0x8006D994)
SetType(0x8006D994, "void S_StartBBoy__Fv()")
del_items(0x8006DBC8)
SetType(0x8006DBC8, "void S_StartHealer__Fv()")
del_items(0x8006DD9C)
SetType(0x8006DD9C, "void S_ScrollHBuy__Fi(int idx)")
del_items(0x8006DF84)
SetType(0x8006DF84, "void S_StartHBuy__Fv()")
del_items(0x8006E0BC)
SetType(0x8006E0BC, "void S_StartStory__Fv()")
del_items(0x8006E1AC)
SetType(0x8006E1AC, "unsigned char IdItemOk__FP10ItemStruct(struct ItemStruct *i)")
del_items(0x8006E1E0)
SetType(0x8006E1E0, "void AddStoreHoldId__FG10ItemStructi(struct ItemStruct itm, int i)")
del_items(0x8006E2BC)
SetType(0x8006E2BC, "void S_StartSIdentify__Fv()")
del_items(0x8006ED5C)
SetType(0x8006ED5C, "void S_StartIdShow__Fv()")
del_items(0x8006EF34)
SetType(0x8006EF34, "void S_StartTalk__Fv()")
del_items(0x8006F164)
SetType(0x8006F164, "void S_StartTavern__Fv()")
del_items(0x8006F25C)
SetType(0x8006F25C, "void S_StartBarMaid__Fv()")
del_items(0x8006F330)
SetType(0x8006F330, "void S_StartDrunk__Fv()")
del_items(0x8006F404)
SetType(0x8006F404, "void StartStore__Fc(char s)")
del_items(0x8006F760)
SetType(0x8006F760, "void DrawStoreHelpText__Fv()")
del_items(0x8006F7FC)
SetType(0x8006F7FC, "void DrawSText__Fv()")
del_items(0x8006F83C)
SetType(0x8006F83C, "void DrawSTextTSK__FP4TASK(struct TASK *T)")
del_items(0x8006F944)
SetType(0x8006F944, "void DoThatDrawSText__Fv()")
del_items(0x8006FB4C)
SetType(0x8006FB4C, "void STextESC__Fv()")
del_items(0x8006FCF0)
SetType(0x8006FCF0, "void STextUp__Fv()")
del_items(0x8006FE74)
SetType(0x8006FE74, "void STextDown__Fv()")
del_items(0x80070008)
SetType(0x80070008, "void S_SmithEnter__Fv()")
del_items(0x800700E0)
SetType(0x800700E0, "void SetGoldCurs__Fii(int pnum, int i)")
del_items(0x80070160)
SetType(0x80070160, "void SetSpdbarGoldCurs__Fii(int pnum, int i)")
del_items(0x800701E0)
SetType(0x800701E0, "void TakePlrsMoney__Fl(long cost)")
del_items(0x8007062C)
SetType(0x8007062C, "void SmithBuyItem__Fv()")
del_items(0x800708AC)
SetType(0x800708AC, "void S_SBuyEnter__Fv()")
del_items(0x80070B10)
SetType(0x80070B10, "void SmithBuyPItem__Fv()")
del_items(0x80070CD4)
SetType(0x80070CD4, "void S_SPBuyEnter__Fv()")
del_items(0x80070F40)
SetType(0x80070F40, "unsigned char StoreGoldFit__Fi(int idx)")
del_items(0x800711F8)
SetType(0x800711F8, "void PlaceStoreGold__Fl(long v)")
del_items(0x80071498)
SetType(0x80071498, "void StoreSellItem__Fv()")
del_items(0x800717DC)
SetType(0x800717DC, "void S_SSellEnter__Fv()")
del_items(0x800718EC)
SetType(0x800718EC, "void SmithRepairItem__Fv()")
del_items(0x80071B60)
SetType(0x80071B60, "void S_SRepairEnter__Fv()")
del_items(0x80071CC4)
SetType(0x80071CC4, "void S_WitchEnter__Fv()")
del_items(0x80071DA4)
SetType(0x80071DA4, "void WitchBuyItem__Fv()")
del_items(0x80072028)
SetType(0x80072028, "void S_WBuyEnter__Fv()")
del_items(0x800722B0)
SetType(0x800722B0, "void S_WSellEnter__Fv()")
del_items(0x800723F0)
SetType(0x800723F0, "void WitchRechargeItem__Fv()")
del_items(0x8007256C)
SetType(0x8007256C, "void S_WRechargeEnter__Fv()")
del_items(0x800726D0)
SetType(0x800726D0, "void S_BoyEnter__Fv()")
del_items(0x80072868)
SetType(0x80072868, "void BoyBuyItem__Fv()")
del_items(0x80072908)
SetType(0x80072908, "void HealerBuyItem__Fv()")
del_items(0x80072C34)
SetType(0x80072C34, "void S_BBuyEnter__Fv()")
del_items(0x80072E48)
SetType(0x80072E48, "void StoryIdItem__Fv()")
del_items(0x80073198)
SetType(0x80073198, "void S_ConfirmEnter__Fv()")
del_items(0x800732B4)
SetType(0x800732B4, "void S_HealerEnter__Fv()")
del_items(0x8007334C)
SetType(0x8007334C, "void S_HBuyEnter__Fv()")
del_items(0x80073580)
SetType(0x80073580, "void S_StoryEnter__Fv()")
del_items(0x8007361C)
SetType(0x8007361C, "void S_SIDEnter__Fv()")
del_items(0x800737A0)
SetType(0x800737A0, "void S_TalkEnter__Fv()")
del_items(0x800739A0)
SetType(0x800739A0, "void S_TavernEnter__Fv()")
del_items(0x80073A14)
SetType(0x80073A14, "void S_BarmaidEnter__Fv()")
del_items(0x80073A88)
SetType(0x80073A88, "void S_DrunkEnter__Fv()")
del_items(0x80073AFC)
SetType(0x80073AFC, "void STextEnter__Fv()")
del_items(0x80073CC0)
SetType(0x80073CC0, "void CheckStoreBtn__Fv()")
del_items(0x80073DAC)
SetType(0x80073DAC, "void ReleaseStoreBtn__Fv()")
del_items(0x80073DC0)
SetType(0x80073DC0, "void _GLOBAL__D_pSTextBoxCels()")
del_items(0x80073DE8)
SetType(0x80073DE8, "void _GLOBAL__I_pSTextBoxCels()")
del_items(0x80073E10)
SetType(0x80073E10, "unsigned short GetDown__C4CPad_addr_80073E10(struct CPad *this)")
del_items(0x80073E38)
SetType(0x80073E38, "void SetRGB__6DialogUcUcUc_addr_80073E38(struct Dialog *this, unsigned char R, unsigned char G, unsigned char B)")
del_items(0x80073E58)
SetType(0x80073E58, "void SetBorder__6Dialogi_addr_80073E58(struct Dialog *this, int Type)")
del_items(0x80073E60)
SetType(0x80073E60, "void ___6Dialog_addr_80073E60(struct Dialog *this, int __in_chrg)")
del_items(0x80073E88)
SetType(0x80073E88, "struct Dialog *__6Dialog_addr_80073E88(struct Dialog *this)")
del_items(0x80073F08)
SetType(0x80073F08, "int GetOverlayOtBase__7CBlocks_addr_80073F08()")
del_items(0x80073F10)
SetType(0x80073F10, "void T_DrawView__Fii(int StartX, int StartY)")
del_items(0x800740C0)
SetType(0x800740C0, "void T_FillSector__FPUcT0iiiib(unsigned char *P3Tiles, unsigned char *pSector, int xi, int yi, int w, int h, bool AddSec)")
del_items(0x80074304)
SetType(0x80074304, "void T_FillTile__FPUciii(unsigned char *P3Tiles, int xx, int yy, int t)")
del_items(0x80074414)
SetType(0x80074414, "void TownFixupBodges__Fv()")
del_items(0x80074454)
SetType(0x80074454, "void T_Pass3__Fv()")
del_items(0x800747E0)
SetType(0x800747E0, "void CreateTown__Fi(int entry)")
del_items(0x80074934)
SetType(0x80074934, "unsigned char *GRL_LoadFileInMemSig__FPCcPUl(char *Name, unsigned long *Len)")
del_items(0x80074A18)
SetType(0x80074A18, "void GRL_StripDir__FPcPCc(char *Dest, char *Src)")
del_items(0x80074AB0)
SetType(0x80074AB0, "void InitVPTriggers__Fv()")
del_items(0x80074AF8)
SetType(0x80074AF8, "bool FindLevTrig__Fiii(int x, int y, int l)")
del_items(0x80074B90)
SetType(0x80074B90, "void ScanMap__FPsi(short *list, int l)")
del_items(0x80074C98)
SetType(0x80074C98, "int FindBlock__Fii(int x, int y)")
del_items(0x80074D34)
SetType(0x80074D34, "void ChangeBlock__Fiii(int x, int y, int bl)")
del_items(0x80074E78)
SetType(0x80074E78, "void ScanBlocks__FPs(short *list)")
del_items(0x80074F80)
SetType(0x80074F80, "void BuildLevTrigs__Fv()")
del_items(0x80075114)
SetType(0x80075114, "void DrawFRIG__Fv()")
del_items(0x80075134)
SetType(0x80075134, "unsigned char ForceTownTrig__Fv()")
del_items(0x80075410)
SetType(0x80075410, "unsigned char ForceL1Trig__Fv()")
del_items(0x800755D0)
SetType(0x800755D0, "unsigned char ForceL2Trig__Fv()")
del_items(0x800758D0)
SetType(0x800758D0, "unsigned char ForceL3Trig__Fv()")
del_items(0x80075BDC)
SetType(0x80075BDC, "unsigned char ForceL4Trig__Fv()")
del_items(0x80075F18)
SetType(0x80075F18, "void Freeupstairs__Fv()")
del_items(0x80075FC8)
SetType(0x80075FC8, "unsigned char ForceSKingTrig__Fv()")
del_items(0x80076054)
SetType(0x80076054, "unsigned char ForceSChambTrig__Fv()")
del_items(0x800760E0)
SetType(0x800760E0, "unsigned char ForcePWaterTrig__Fv()")
del_items(0x8007616C)
SetType(0x8007616C, "void CheckTrigForce__Fv()")
del_items(0x80076478)
SetType(0x80076478, "void FadeGameOut__Fv()")
del_items(0x8007651C)
SetType(0x8007651C, "bool IsTrigger__Fii(int x, int y)")
del_items(0x80076614)
SetType(0x80076614, "bool CheckTrigLevel__Fi(int level)")
del_items(0x80076650)
SetType(0x80076650, "void CheckTriggers__Fi(int pnum)")
del_items(0x80076BDC)
SetType(0x80076BDC, "int GetManaAmount__Fii(int id, int sn)")
del_items(0x80076E90)
SetType(0x80076E90, "void UseMana__Fii(int id, int sn)")
del_items(0x80077020)
SetType(0x80077020, "unsigned char CheckSpell__FiicUc(int id, int sn, char st, unsigned char manaonly)")
del_items(0x800770C0)
SetType(0x800770C0, "void CastSpell__Fiiiiiiii(int id, int spl, int sx, int sy, int dx, int dy, int caster, int spllvl)")
del_items(0x800773D8)
SetType(0x800773D8, "void DoResurrect__Fii(int pnum, int rid)")
del_items(0x80077640)
SetType(0x80077640, "void DoHealOther__Fii(int pnum, int rid)")
del_items(0x800778A4)
SetType(0x800778A4, "void snd_update__FUc(unsigned char bStopAll)")
del_items(0x800778AC)
SetType(0x800778AC, "void snd_stop_snd__FP4TSnd(struct TSnd *pSnd)")
del_items(0x800778E8)
SetType(0x800778E8, "void snd_play_snd__FP4TSFXll(struct TSFX *pSnd, long lVolume, long lPan)")
del_items(0x80077930)
SetType(0x80077930, "void snd_play_msnd__FUsll(unsigned short pszName, long lVolume, long lPan)")
del_items(0x800779D0)
SetType(0x800779D0, "void snd_init__FUl(unsigned long hWnd)")
del_items(0x800779E0)
SetType(0x800779E0, "void music_stop__Fv()")
del_items(0x80077A20)
SetType(0x80077A20, "void music_fade__Fv()")
del_items(0x80077A60)
SetType(0x80077A60, "void music_start__Fi(int nTrack)")
del_items(0x80077B00)
SetType(0x80077B00, "unsigned char snd_playing__Fi(int SFXNo)")
del_items(0x80077B20)
SetType(0x80077B20, "void ClrCursor__Fi(int num)")
del_items(0x80077B7C)
SetType(0x80077B7C, "void HappyMan__Fi(int n)")
del_items(0x80077B8C)
SetType(0x80077B8C, "void flyabout__7GamePad(struct GamePad *this)")
del_items(0x80077F88)
SetType(0x80077F88, "void CloseInvChr__Fv()")
del_items(0x80077FD0)
SetType(0x80077FD0, "void WorldToOffset__Fiii(int pnum, int WorldX, int WorldY)")
del_items(0x80078050)
SetType(0x80078050, "char pad_UpIsUpRight__Fic(int pval, char other)")
del_items(0x8007810C)
SetType(0x8007810C, "struct GamePad *__7GamePadi(struct GamePad *this, int player_num)")
del_items(0x800781C0)
SetType(0x800781C0, "void SetMoveStyle__7GamePadc(struct GamePad *this, char style_num)")
del_items(0x800781C8)
SetType(0x800781C8, "void SetDownButton__7GamePadiPFi_v(struct GamePad *this, int pad_val, void (*func)())")
del_items(0x8007820C)
SetType(0x8007820C, "void SetComboDownButton__7GamePadiPFi_v(struct GamePad *this, int pad_val, void (*func)())")
del_items(0x80078250)
SetType(0x80078250, "void SetAllButtons__7GamePadP11KEY_ASSIGNS(struct GamePad *this, struct KEY_ASSIGNS *actions)")
del_items(0x800784B8)
SetType(0x800784B8, "void GetAllButtons__7GamePadP11KEY_ASSIGNS(struct GamePad *this, struct KEY_ASSIGNS *actions)")
del_items(0x80078670)
SetType(0x80078670, "int GetActionButton__7GamePadPFi_v(struct GamePad *this, void (*func)())")
del_items(0x800786CC)
SetType(0x800786CC, "void SetUpAction__7GamePadPFi_vT1(struct GamePad *this, void (*func)(), void (*upfunc)())")
del_items(0x80078708)
SetType(0x80078708, "void RunFunc__7GamePadi(struct GamePad *this, int pad)")
del_items(0x800787F4)
SetType(0x800787F4, "void ButtonDown__7GamePadi(struct GamePad *this, int button)")
del_items(0x80078C0C)
SetType(0x80078C0C, "void TestButtons__7GamePad(struct GamePad *this)")
del_items(0x80078D18)
SetType(0x80078D18, "bool CheckCentre__7GamePadi(struct GamePad *this, int dir)")
del_items(0x80078E10)
SetType(0x80078E10, "int CheckDirs__7GamePadi(struct GamePad *this, int dir)")
del_items(0x80078E40)
SetType(0x80078E40, "int CheckDirs__7GamePadiii(struct GamePad *this, int dir, int wx, int wy)")
del_items(0x80078F48)
SetType(0x80078F48, "int CheckSide__7GamePadi(struct GamePad *this, int dir)")
del_items(0x80078F88)
SetType(0x80078F88, "bool newDirOk__7GamePadi(struct GamePad *this, int dir)")
del_items(0x80079038)
SetType(0x80079038, "int CheckDiagBodge__7GamePadi(struct GamePad *this, int dir)")
del_items(0x8007932C)
SetType(0x8007932C, "int CheckIsoBodge__7GamePadi(struct GamePad *this, int dir)")
del_items(0x80079698)
SetType(0x80079698, "int CheckBodge__7GamePadi(struct GamePad *this, int dir)")
del_items(0x800797F8)
SetType(0x800797F8, "void walk__7GamePadi(struct GamePad *this, int cmd)")
del_items(0x80079B40)
SetType(0x80079B40, "void check_around_player__7GamePad(struct GamePad *this)")
del_items(0x80079E7C)
SetType(0x80079E7C, "void show_combos__7GamePad(struct GamePad *this)")
del_items(0x8007A108)
SetType(0x8007A108, "void Handle__7GamePad(struct GamePad *this)")
del_items(0x8007A804)
SetType(0x8007A804, "void GamePadTask__FP4TASK(struct TASK *T)")
del_items(0x8007A8FC)
SetType(0x8007A8FC, "struct GamePad *GetGamePad__Fi(int pnum)")
del_items(0x8007A91C)
SetType(0x8007A91C, "void PostGamePad__Fiiii(int val, int var1, int var2, int var3)")
del_items(0x8007AA20)
SetType(0x8007AA20, "void Init_GamePad__Fv()")
del_items(0x8007AA50)
SetType(0x8007AA50, "void InitGamePadVars__Fv()")
del_items(0x8007ABDC)
SetType(0x8007ABDC, "int SetWalkStyle__Fii(int pnum, int style)")
del_items(0x8007AC4C)
SetType(0x8007AC4C, "char GetPadStyle__Fi(int pnum)")
del_items(0x8007AC70)
SetType(0x8007AC70, "void _GLOBAL__I_flyflag()")
del_items(0x8007ACA8)
SetType(0x8007ACA8, "bool Active__11SpellTarget_addr_8007ACA8(struct SpellTarget *this)")
del_items(0x8007ACB4)
SetType(0x8007ACB4, "void MoveToScrollTarget__7CBlocks_addr_8007ACB4(struct CBlocks *this)")
del_items(0x8007ACC8)
SetType(0x8007ACC8, "unsigned short GetDown__C4CPad_addr_8007ACC8(struct CPad *this)")
del_items(0x8007ACF0)
SetType(0x8007ACF0, "unsigned short GetUp__C4CPad_addr_8007ACF0(struct CPad *this)")
del_items(0x8007AD18)
SetType(0x8007AD18, "unsigned short GetCur__C4CPad_addr_8007AD18(struct CPad *this)")
del_items(0x8007AD40)
SetType(0x8007AD40, "void DoGameTestStuff__Fv()")
del_items(0x8007AD6C)
SetType(0x8007AD6C, "void DoInitGameStuff__Fv()")
del_items(0x8007ADA0)
SetType(0x8007ADA0, "void *SMemAlloc(unsigned long bytes, char *filename, int linenumber, unsigned long flags)")
del_items(0x8007ADC0)
SetType(0x8007ADC0, "unsigned char SMemFree(void *ptr, char *filename, int linenumber, unsigned long flags)")
del_items(0x8007ADE0)
SetType(0x8007ADE0, "void GRL_InitGwin__Fv()")
del_items(0x8007ADEC)
SetType(0x8007ADEC, "unsigned long (*GRL_SetWindowProc__FPFUlUilUl_Ul(unsigned long (*NewProc)()))()")
del_items(0x8007ADFC)
SetType(0x8007ADFC, "void GRL_CallWindowProc__FUlUilUl(unsigned long hw, unsigned int msg, long wp, unsigned long lp)")
del_items(0x8007AE24)
SetType(0x8007AE24, "unsigned char GRL_PostMessage__FUlUilUl(unsigned long hWnd, unsigned int Msg, long wParam, unsigned long lParam)")
del_items(0x8007AED0)
SetType(0x8007AED0, "char *Msg2Txt__Fi(int Msg)")
del_items(0x8007AF18)
SetType(0x8007AF18, "enum LANG_TYPE LANG_GetLang__Fv()")
del_items(0x8007AF24)
SetType(0x8007AF24, "void LANG_SetDb__F10LANG_DB_NO(enum LANG_DB_NO NewLangDbNo)")
del_items(0x8007B0F8)
SetType(0x8007B0F8, "char *GetStr__Fi(int StrId)")
del_items(0x8007B174)
SetType(0x8007B174, "void LANG_ReloadMainTXT__Fv()")
del_items(0x8007B1B8)
SetType(0x8007B1B8, "void LANG_SetLang__F9LANG_TYPE(enum LANG_TYPE NewLanguageType)")
del_items(0x8007B2D0)
SetType(0x8007B2D0, "void DumpCurrentText__Fv()")
del_items(0x8007B328)
SetType(0x8007B328, "int CalcNumOfStrings__FPPc(char **TPtr)")
del_items(0x8007B334)
SetType(0x8007B334, "void GetLangFileName__F9LANG_TYPEPc(enum LANG_TYPE NewLanguageType, char *Dest)")
del_items(0x8007B414)
SetType(0x8007B414, "char *GetLangFileNameExt__F9LANG_TYPE(enum LANG_TYPE NewLanguageType)")
del_items(0x8007B494)
SetType(0x8007B494, "void DoPortalFX__FP8POLY_FT4iiii(struct POLY_FT4 *Ft4, int R, int G, int B, int OtPos)")
del_items(0x8007B804)
SetType(0x8007B804, "struct POLY_FT4 *TempPrintMissile__FiiiiiiiiccUcUcUcc(int ScrX, int ScrY, int OtPos, int spell, int aframe, int direction, int anim, int sfx, int xflip, int yflip, int red, int grn, int blu, int semi)")
del_items(0x8007BBEC)
SetType(0x8007BBEC, "void FuncTOWN__FP13MissileStructiii(struct MissileStruct *Ms, int ScrX, int ScrY, int OtPos)")
del_items(0x8007BD8C)
SetType(0x8007BD8C, "void FuncRPORTAL__FP13MissileStructiii(struct MissileStruct *Ms, int ScrX, int ScrY, int OtPos)")
del_items(0x8007BEA8)
SetType(0x8007BEA8, "void FuncFIREBOLT__FP13MissileStructiii(struct MissileStruct *Ms, int ScrX, int ScrY, int OtPos)")
del_items(0x8007BF50)
SetType(0x8007BF50, "void FuncHBOLT__FP13MissileStructiii(struct MissileStruct *Ms, int ScrX, int ScrY, int OtPos)")
del_items(0x8007C008)
SetType(0x8007C008, "void FuncLIGHTNING__FP13MissileStructiii(struct MissileStruct *Ms, int ScrX, int ScrY, int OtPos)")
del_items(0x8007C070)
SetType(0x8007C070, "void FuncGUARDIAN__FP13MissileStructiii(struct MissileStruct *Ms, int ScrX, int ScrY, int OtPos)")
del_items(0x8007C194)
SetType(0x8007C194, "void FuncFIREWALL__FP13MissileStructiii(struct MissileStruct *Ms, int ScrX, int ScrY, int OtPos)")
del_items(0x8007C22C)
SetType(0x8007C22C, "void FuncFIREMOVE__FP13MissileStructiii(struct MissileStruct *Ms, int ScrX, int ScrY, int OtPos)")
del_items(0x8007C2C4)
SetType(0x8007C2C4, "void FuncFLAME__FP13MissileStructiii(struct MissileStruct *Ms, int ScrX, int ScrY, int OtPos)")
del_items(0x8007C330)
SetType(0x8007C330, "void FuncARROW__FP13MissileStructiii(struct MissileStruct *Ms, int ScrX, int ScrY, int OtPos)")
del_items(0x8007C3E0)
SetType(0x8007C3E0, "void FuncFARROW__FP13MissileStructiii(struct MissileStruct *Ms, int ScrX, int ScrY, int OtPos)")
del_items(0x8007C4D8)
SetType(0x8007C4D8, "void FuncLARROW__FP13MissileStructiii(struct MissileStruct *Ms, int ScrX, int ScrY, int OtPos)")
del_items(0x8007C5C8)
SetType(0x8007C5C8, "void FuncMAGMABALL__FP13MissileStructiii(struct MissileStruct *Ms, int ScrX, int ScrY, int OtPos)")
del_items(0x8007C664)
SetType(0x8007C664, "void FuncBONESPIRIT__FP13MissileStructiii(struct MissileStruct *Ms, int ScrX, int ScrY, int OtPos)")
del_items(0x8007C788)
SetType(0x8007C788, "void FuncACID__FP13MissileStructiii(struct MissileStruct *Ms, int ScrX, int ScrY, int OtPos)")
del_items(0x8007C830)
SetType(0x8007C830, "void FuncACIDSPLAT__FP13MissileStructiii(struct MissileStruct *Ms, int ScrX, int ScrY, int OtPos)")
del_items(0x8007C898)
SetType(0x8007C898, "void FuncACIDPUD__FP13MissileStructiii(struct MissileStruct *Ms, int ScrX, int ScrY, int OtPos)")
del_items(0x8007C900)
SetType(0x8007C900, "void FuncFLARE__FP13MissileStructiii(struct MissileStruct *Ms, int ScrX, int ScrY, int OtPos)")
del_items(0x8007CA8C)
SetType(0x8007CA8C, "void FuncFLAREXP__FP13MissileStructiii(struct MissileStruct *Ms, int ScrX, int ScrY, int OtPos)")
del_items(0x8007CC08)
SetType(0x8007CC08, "void FuncCBOLT__FP13MissileStructiii(struct MissileStruct *Ms, int ScrX, int ScrY, int OtPos)")
del_items(0x8007CC74)
SetType(0x8007CC74, "void FuncBOOM__FP13MissileStructiii(struct MissileStruct *Ms, int ScrX, int ScrY, int OtPos)")
del_items(0x8007CCD4)
SetType(0x8007CCD4, "void FuncELEMENT__FP13MissileStructiii(struct MissileStruct *Ms, int ScrX, int ScrY, int OtPos)")
del_items(0x8007CDA8)
SetType(0x8007CDA8, "void FuncMISEXP__FP13MissileStructiii(struct MissileStruct *Ms, int ScrX, int ScrY, int OtPos)")
del_items(0x8007CE14)
SetType(0x8007CE14, "void FuncRHINO__FP13MissileStructiii(struct MissileStruct *Ms, int ScrX, int ScrY, int OtPos)")
del_items(0x8007CE1C)
SetType(0x8007CE1C, "void FuncFLASH__FP13MissileStructiii(struct MissileStruct *Ms, int ScrX, int ScrY, int OtPos)")
del_items(0x8007CF7C)
SetType(0x8007CF7C, "void FuncMANASHIELD__FP13MissileStructiii(struct MissileStruct *Ms, int ScrX, int ScrY, int OtPos)")
del_items(0x8007CFDC)
SetType(0x8007CFDC, "void FuncFLASH2__FP13MissileStructiii(struct MissileStruct *Ms, int ScrX, int ScrY, int OtPos)")
del_items(0x8007CFE4)
SetType(0x8007CFE4, "void FuncRESURRECTBEAM__FP13MissileStructiii(struct MissileStruct *Ms, int ScrX, int ScrY, int OtPos)")
del_items(0x8007D018)
SetType(0x8007D018, "void FuncWEAPEXP__FP13MissileStructiii(struct MissileStruct *Ms, int ScrX, int ScrY, int OtPos)")
del_items(0x8007D0B4)
SetType(0x8007D0B4, "void PRIM_GetPrim__FPP8POLY_FT4_addr_8007D0B4(struct POLY_FT4 **Prim)")
del_items(0x8007D130)
SetType(0x8007D130, "struct CPlayer *GetPlayer__7CPlayeri_addr_8007D130(int PNum)")
del_items(0x8007D180)
SetType(0x8007D180, "int GetLastScrY__C7CPlayer_addr_8007D180(struct CPlayer *this)")
del_items(0x8007D18C)
SetType(0x8007D18C, "int GetLastScrX__C7CPlayer_addr_8007D18C(struct CPlayer *this)")
del_items(0x8007D198)
SetType(0x8007D198, "int GetNumOfFrames__7TextDat_addr_8007D198(struct TextDat *this)")
del_items(0x8007D1AC)
SetType(0x8007D1AC, "struct FRAME_HDR *GetFr__7TextDati_addr_8007D1AC(struct TextDat *this, int FrNum)")
del_items(0x8007D1C8)
SetType(0x8007D1C8, "void ML_Init__Fv()")
del_items(0x8007D200)
SetType(0x8007D200, "int ML_GetList__Fi(int Level)")
del_items(0x8007D280)
SetType(0x8007D280, "int ML_SetRandomList__Fi(int Level)")
del_items(0x8007D318)
SetType(0x8007D318, "int ML_SetList__Fii(int Level, int List)")
del_items(0x8007D3C8)
SetType(0x8007D3C8, "int ML_GetPresetMonsters__FiPiUl(int currlevel, int *typelist, unsigned long QuestsNeededMask)")
del_items(0x8007D5B8)
SetType(0x8007D5B8, "struct POLY_FT4 *DefaultObjPrint__FP12ObjectStructiiP7TextDatiii(struct ObjectStruct *OStr, int ScrX, int ScrY, struct TextDat *ObjDat, int OtPos, int XOffSet, int YOffSet)")
del_items(0x8007D74C)
SetType(0x8007D74C, "struct POLY_FT4 *LightObjPrint__FP12ObjectStructiiP7TextDati(struct ObjectStruct *OStr, int ScrX, int ScrY, struct TextDat *ObjDat, int OtPos)")
del_items(0x8007D810)
SetType(0x8007D810, "struct POLY_FT4 *PrintOBJ_SARC__FP12ObjectStructiiP7TextDati(struct ObjectStruct *OStr, int ScrX, int ScrY, struct TextDat *ObjDat, int OtPos)")
del_items(0x8007D8D8)
SetType(0x8007D8D8, "void ResetFlames__Fv()")
del_items(0x8007D9A0)
SetType(0x8007D9A0, "void PrintOBJ_FIRE__Fiii(int ScrX, int ScrY, int OtPos)")
del_items(0x8007DB58)
SetType(0x8007DB58, "struct POLY_FT4 *DoorObjPrint__FP12ObjectStructiiP7TextDati(struct ObjectStruct *OStr, int ScrX, int ScrY, struct TextDat *ObjDat, int OtPos)")
del_items(0x8007DD94)
SetType(0x8007DD94, "void DrawLightSpark__Fiii(int xo, int yo, int ot)")
del_items(0x8007DE74)
SetType(0x8007DE74, "struct POLY_FT4 *PrintOBJ_L1LIGHT__FP12ObjectStructiiP7TextDati(struct ObjectStruct *OStr, int ScrX, int ScrY, struct TextDat *ObjDat, int OtPos)")
del_items(0x8007DED4)
SetType(0x8007DED4, "void PrintTorchStick__Fiiii(int x, int y, int f, int OtPos)")
del_items(0x8007DF68)
SetType(0x8007DF68, "struct POLY_FT4 *PrintOBJ_TORCHL__FP12ObjectStructiiP7TextDati(struct ObjectStruct *OStr, int ScrX, int ScrY, struct TextDat *ObjDat, int OtPos)")
del_items(0x8007DFEC)
SetType(0x8007DFEC, "struct POLY_FT4 *PrintOBJ_TORCHR__FP12ObjectStructiiP7TextDati(struct ObjectStruct *OStr, int ScrX, int ScrY, struct TextDat *ObjDat, int OtPos)")
del_items(0x8007E070)
SetType(0x8007E070, "struct POLY_FT4 *PrintOBJ_TORCHL2__FP12ObjectStructiiP7TextDati(struct ObjectStruct *OStr, int ScrX, int ScrY, struct TextDat *ObjDat, int OtPos)")
del_items(0x8007E0FC)
SetType(0x8007E0FC, "struct POLY_FT4 *PrintOBJ_TORCHR2__FP12ObjectStructiiP7TextDati(struct ObjectStruct *OStr, int ScrX, int ScrY, struct TextDat *ObjDat, int OtPos)")
del_items(0x8007E188)
SetType(0x8007E188, "struct POLY_FT4 *PrintOBJ_BARRELEX__FP12ObjectStructiiP7TextDati(struct ObjectStruct *OStr, int ScrX, int ScrY, struct TextDat *ObjDat, int OtPos)")
del_items(0x8007E2E0)
SetType(0x8007E2E0, "struct POLY_FT4 *PrintOBJ_SHRINEL__FP12ObjectStructiiP7TextDati(struct ObjectStruct *OStr, int ScrX, int ScrY, struct TextDat *ObjDat, int OtPos)")
del_items(0x8007E3B8)
SetType(0x8007E3B8, "struct POLY_FT4 *PrintOBJ_SHRINER__FP12ObjectStructiiP7TextDati(struct ObjectStruct *OStr, int ScrX, int ScrY, struct TextDat *ObjDat, int OtPos)")
del_items(0x8007E490)
SetType(0x8007E490, "struct POLY_FT4 *PrintOBJ_BOOKCANDLE__FP12ObjectStructiiP7TextDati(struct ObjectStruct *OStr, int ScrX, int ScrY, struct TextDat *ObjDat, int OtPos)")
del_items(0x8007E4B4)
SetType(0x8007E4B4, "struct POLY_FT4 *PrintOBJ_MCIRCLE1__FP12ObjectStructiiP7TextDati(struct ObjectStruct *OStr, int ScrX, int ScrY, struct TextDat *ObjDat, int OtPos)")
del_items(0x8007E650)
SetType(0x8007E650, "struct POLY_FT4 *PrintOBJ_STORYBOOK__FP12ObjectStructiiP7TextDati(struct ObjectStruct *OStr, int ScrX, int ScrY, struct TextDat *ObjDat, int OtPos)")
del_items(0x8007E7D8)
SetType(0x8007E7D8, "struct POLY_FT4 *PrintOBJ_STORYCANDLE__FP12ObjectStructiiP7TextDati(struct ObjectStruct *OStr, int ScrX, int ScrY, struct TextDat *ObjDat, int OtPos)")
del_items(0x8007E7FC)
SetType(0x8007E7FC, "struct POLY_FT4 *PrintOBJ_CANDLE1__FP12ObjectStructiiP7TextDati(struct ObjectStruct *OStr, int ScrX, int ScrY, struct TextDat *ObjDat, int OtPos)")
del_items(0x8007E820)
SetType(0x8007E820, "struct POLY_FT4 *PrintOBJ_CANDLE2__FP12ObjectStructiiP7TextDati(struct ObjectStruct *OStr, int ScrX, int ScrY, struct TextDat *ObjDat, int OtPos)")
del_items(0x8007E844)
SetType(0x8007E844, "struct POLY_FT4 *PrintOBJ_STAND__FP12ObjectStructiiP7TextDati(struct ObjectStruct *OStr, int ScrX, int ScrY, struct TextDat *ObjDat, int OtPos)")
del_items(0x8007E880)
SetType(0x8007E880, "struct POLY_FT4 *PrintOBJ_SKFIRE__FP12ObjectStructiiP7TextDati(struct ObjectStruct *OStr, int ScrX, int ScrY, struct TextDat *ObjDat, int OtPos)")
del_items(0x8007E8E4)
SetType(0x8007E8E4, "struct POLY_FT4 *PRIM_GetCopy__FP8POLY_FT4_addr_8007E8E4(struct POLY_FT4 *Prim)")
del_items(0x8007E920)
SetType(0x8007E920, "void PRIM_CopyPrim__FP8POLY_FT4T0_addr_8007E920(struct POLY_FT4 *Dest, struct POLY_FT4 *Source)")
del_items(0x8007E948)
SetType(0x8007E948, "void PRIM_GetPrim__FPP8POLY_FT4_addr_8007E948(struct POLY_FT4 **Prim)")
del_items(0x8007E9C4)
SetType(0x8007E9C4, "int GetNumOfFrames__7TextDatii_addr_8007E9C4(struct TextDat *this, int Creature, int Action)")
del_items(0x8007E9FC)
SetType(0x8007E9FC, "struct CCreatureHdr *GetCreature__7TextDati_addr_8007E9FC(struct TextDat *this, int Creature)")
del_items(0x8007EA18)
SetType(0x8007EA18, "struct FRAME_HDR *GetFr__7TextDati_addr_8007EA18(struct TextDat *this, int FrNum)")
del_items(0x8007EA34)
SetType(0x8007EA34, "void LoadPalette__FPCc(char *pszFileName)")
del_items(0x8007EA3C)
SetType(0x8007EA3C, "void LoadRndLvlPal__Fi(int l)")
del_items(0x8007EA44)
SetType(0x8007EA44, "void ResetPal__Fv()")
del_items(0x8007EA4C)
SetType(0x8007EA4C, "void SetFadeLevel__Fi(int fadeval)")
del_items(0x8007EA7C)
SetType(0x8007EA7C, "bool GetFadeState__Fv()")
del_items(0x8007EA88)
SetType(0x8007EA88, "void SetPolyXY__FP8POLY_GT4PUc(struct POLY_GT4 *gt4, unsigned char *coords)")
del_items(0x8007EBA4)
SetType(0x8007EBA4, "void SmearScreen__Fv()")
del_items(0x8007EBAC)
SetType(0x8007EBAC, "void DrawFadedScreen__Fv()")
del_items(0x8007EC34)
SetType(0x8007EC34, "void BlackPalette__Fv()")
del_items(0x8007ED30)
SetType(0x8007ED30, "void PaletteFadeInTask__FP4TASK(struct TASK *T)")
del_items(0x8007EDC0)
SetType(0x8007EDC0, "bool PaletteFadeIn__Fi(int fr)")
del_items(0x8007EE18)
SetType(0x8007EE18, "void PaletteFadeOutTask__FP4TASK(struct TASK *T)")
del_items(0x8007EEC8)
SetType(0x8007EEC8, "bool PaletteFadeOut__Fi(int fr)")
del_items(0x8007EF1C)
SetType(0x8007EF1C, "int GetMaxOtPos__7CBlocks_addr_8007EF1C()")
del_items(0x8007EF24)
SetType(0x8007EF24, "void M_CheckEFlag__Fi(int i)")
del_items(0x8007EF4C)
SetType(0x8007EF4C, "void M_ClearSquares__Fi(int i)")
del_items(0x8007F08C)
SetType(0x8007F08C, "unsigned char IsSkel__Fi(int mt)")
del_items(0x8007F0EC)
SetType(0x8007F0EC, "void NewMonsterAnim__FiR10AnimStructii(int i, struct AnimStruct *anim, int md, int AnimType)")
del_items(0x8007F140)
SetType(0x8007F140, "unsigned char M_Talker__Fi(int i)")
del_items(0x8007F1A8)
SetType(0x8007F1A8, "void M_Enemy__Fi(int i)")
del_items(0x8007F3C0)
SetType(0x8007F3C0, "void ClearMVars__Fi(int i)")
del_items(0x8007F43C)
SetType(0x8007F43C, "void InitMonster__Fiiiii(int i, int rd, int mtype, int x, int y)")
del_items(0x8007F9D4)
SetType(0x8007F9D4, "int AddMonster__FiiiiUc(int x, int y, int dir, int mtype, int InMap)")
del_items(0x8007FA74)
SetType(0x8007FA74, "void M_StartStand__Fii(int i, int md)")
del_items(0x8007FBE4)
SetType(0x8007FBE4, "void M_UpdateLeader__Fi(int i)")
del_items(0x8007FCF4)
SetType(0x8007FCF4, "void ActivateSpawn__Fiiii(int i, int x, int y, int dir)")
del_items(0x8007FD94)
SetType(0x8007FD94, "unsigned char SpawnSkeleton__Fiii(int ii, int x, int y)")
del_items(0x8007FF84)
SetType(0x8007FF84, "void M_StartSpStand__Fii(int i, int md)")
del_items(0x8008006C)
SetType(0x8008006C, "unsigned char PosOkMonst__Fiii(int i, int x, int y)")
del_items(0x800802C0)
SetType(0x800802C0, "unsigned char CanPut__Fii(int i, int j)")
del_items(0x80080574)
SetType(0x80080574, "int encode_enemy__Fi(int m)")
del_items(0x800805D4)
SetType(0x800805D4, "unsigned short GetAutomapType__FiiUc(int x, int y, unsigned char view)")
del_items(0x800806A8)
SetType(0x800806A8, "void SetAutomapView__Fii(int x, int y)")
del_items(0x80080AF8)
SetType(0x80080AF8, "void AddWarpMissile__Fiii(int i, int x, int y)")
del_items(0x80080BE0)
SetType(0x80080BE0, "void SyncPortals__Fv()")
del_items(0x80080D34)
SetType(0x80080D34, "void ActivatePortal__FiiiiiUc(int i, int x, int y, int lvl, int lvltype, int sp)")
del_items(0x80080DC0)
SetType(0x80080DC0, "void DeactivatePortal__Fi(int i)")
del_items(0x80080DE0)
SetType(0x80080DE0, "unsigned char PortalOnLevel__Fi(int i)")
del_items(0x80080E18)
SetType(0x80080E18, "void DelMis__Fii(int mi, int i)")
del_items(0x80080E78)
SetType(0x80080E78, "void RemovePortalMissile__Fi(int id)")
del_items(0x80080FD4)
SetType(0x80080FD4, "void SetCurrentPortal__Fi(int p)")
del_items(0x80080FE0)
SetType(0x80080FE0, "void GetPortalLevel__Fv()")
del_items(0x80081144)
SetType(0x80081144, "void GetPortalLvlPos__Fv()")
del_items(0x800811F8)
SetType(0x800811F8, "struct CompLevelMaps *__13CompLevelMapsRC9CompClass(struct CompLevelMaps *this, struct CompClass *NewCompObj)")
del_items(0x80081264)
SetType(0x80081264, "void ___13CompLevelMaps(struct CompLevelMaps *this, int __in_chrg)")
del_items(0x800812F4)
SetType(0x800812F4, "void Init__13CompLevelMaps(struct CompLevelMaps *this)")
del_items(0x80081324)
SetType(0x80081324, "void InitAllMaps__13CompLevelMaps(struct CompLevelMaps *this)")
del_items(0x80081378)
SetType(0x80081378, "struct DLevel *GetMap__13CompLevelMapsi(struct CompLevelMaps *this, int MapNum)")
del_items(0x800813F4)
SetType(0x800813F4, "void ReleaseMap__13CompLevelMapsP6DLevel(struct CompLevelMaps *this, struct DLevel *Dl)")
del_items(0x80081494)
SetType(0x80081494, "void ImportData__13CompLevelMapsP14CompressedLevs(struct CompLevelMaps *this, struct CompressedLevs *Levs)")
del_items(0x80081540)
SetType(0x80081540, "int ExportData__13CompLevelMapsPUc(struct CompLevelMaps *this, unsigned char *U8Dest)")
del_items(0x800815EC)
SetType(0x800815EC, "void MakeSureMapXDecomped__13CompLevelMapsi(struct CompLevelMaps *this, int MapNum)")
del_items(0x80081698)
SetType(0x80081698, "void Init__4AMap(struct AMap *this)")
del_items(0x80081704)
SetType(0x80081704, "int WriteCompressed__4AMapPUcRC9CompClass(struct AMap *this, unsigned char *Dest, struct CompClass *CompObj)")
del_items(0x80081778)
SetType(0x80081778, "void SetCompData__4AMapPCUci(struct AMap *this, unsigned char *Data, int NewSize)")
del_items(0x80081868)
SetType(0x80081868, "struct DLevel *GetMap__4AMap(struct AMap *this)")
del_items(0x80081988)
SetType(0x80081988, "void ReleaseMap__4AMapP6DLevel(struct AMap *this, struct DLevel *Dl)")
del_items(0x80081A18)
SetType(0x80081A18, "void CompressMap__4AMapRC9CompClass(struct AMap *this, struct CompClass *CompObj)")
del_items(0x80081BDC)
SetType(0x80081BDC, "void DecompressMap__4AMapRC9CompClass(struct AMap *this, struct CompClass *CompObj)")
del_items(0x80081D10)
SetType(0x80081D10, "void CheckMapNum__13CompLevelMapsi(struct CompLevelMaps *this, int MapNum)")
del_items(0x80081D44)
SetType(0x80081D44, "bool IsCompressed__4AMap(struct AMap *this)")
del_items(0x80081D50)
SetType(0x80081D50, "void ___4AMap(struct AMap *this, int __in_chrg)")
del_items(0x80081D98)
SetType(0x80081D98, "struct AMap *__4AMap(struct AMap *this)")
del_items(0x80081DCC)
SetType(0x80081DCC, "bool IS_GameOver__Fv()")
del_items(0x80081DF4)
SetType(0x80081DF4, "void GO_DoGameOver__Fv()")
del_items(0x80081E3C)
SetType(0x80081E3C, "void GameOverTask__FP4TASK(struct TASK *T)")
del_items(0x80082040)
SetType(0x80082040, "void PrintGameOver__Fv()")
del_items(0x80082180)
SetType(0x80082180, "unsigned short GetDown__C4CPad_addr_80082180(struct CPad *this)")
del_items(0x800821A8)
SetType(0x800821A8, "void SetRGB__6DialogUcUcUc_addr_800821A8(struct Dialog *this, unsigned char R, unsigned char G, unsigned char B)")
del_items(0x800821C8)
SetType(0x800821C8, "void SetBack__6Dialogi_addr_800821C8(struct Dialog *this, int Type)")
del_items(0x800821D0)
SetType(0x800821D0, "void SetBorder__6Dialogi_addr_800821D0(struct Dialog *this, int Type)")
del_items(0x800821D8)
SetType(0x800821D8, "void ___6Dialog_addr_800821D8(struct Dialog *this, int __in_chrg)")
del_items(0x80082200)
SetType(0x80082200, "struct Dialog *__6Dialog_addr_80082200(struct Dialog *this)")
del_items(0x80082280)
SetType(0x80082280, "int GetOverlayOtBase__7CBlocks_addr_80082280()")
del_items(0x80082288)
SetType(0x80082288, "int GetMaxOtPos__7CBlocks_addr_80082288()")
del_items(0x80082290)
SetType(0x80082290, "void VER_InitVersion__Fv()")
del_items(0x800822D4)
SetType(0x800822D4, "char *VER_GetVerString__Fv()")
del_items(0x800822E4)
SetType(0x800822E4, "int CharPair2Num__FPc(char *Str)")
del_items(0x8008230C)
SetType(0x8008230C, "int FindGetItem__FiUsi(int idx, unsigned short ci, int iseed)")
del_items(0x800823C0)
SetType(0x800823C0, "void gamemenu_off__Fv()")
del_items(0x800823C8)
SetType(0x800823C8, "void DPIECE_ERROR__Fv()")
del_items(0x800823D0)
SetType(0x800823D0, "void AllocdPiece__Fv()")
del_items(0x80082428)
SetType(0x80082428, "void FreedPiece__Fv()")
del_items(0x8008246C)
SetType(0x8008246C, "void ConvertdPiece__Fv()")
del_items(0x80082634)
SetType(0x80082634, "short GetDPiece__Fii(int x, int y)")
del_items(0x800826BC)
SetType(0x800826BC, "void SetDPiece__Fiis(int x, int y, short v)")
del_items(0x80082750)
SetType(0x80082750, "void SetdDead__FiiUc(int x, int y, unsigned char v)")
del_items(0x80082790)
SetType(0x80082790, "unsigned char GetdDead__Fii(int x, int y)")
del_items(0x800827B8)
SetType(0x800827B8, "void SetSOLID__Fii(int x, int y)")
del_items(0x80082844)
SetType(0x80082844, "void ClearSOLID__Fii(int x, int y)")
del_items(0x800828D0)
SetType(0x800828D0, "bool GetSOLID__Fii(int x, int y)")
del_items(0x80082918)
SetType(0x80082918, "void SetMISSILE__Fii(int x, int y)")
del_items(0x800829A4)
SetType(0x800829A4, "void ClearMISSILE__Fii(int x, int y)")
del_items(0x80082A30)
SetType(0x80082A30, "bool GetMISSILE__Fii(int x, int y)")
del_items(0x80082A60)
SetType(0x80082A60, "void SetBLOCK__Fii(int x, int y)")
del_items(0x80082AEC)
SetType(0x80082AEC, "void ClearBLOCK__Fii(int x, int y)")
del_items(0x80082B78)
SetType(0x80082B78, "bool GetBLOCK__Fii(int x, int y)")
del_items(0x80082BA8)
SetType(0x80082BA8, "void SetTRAP__Fii(int x, int y)")
del_items(0x80082C34)
SetType(0x80082C34, "void ClearTRAP__Fii(int x, int y)")
del_items(0x80082CC0)
SetType(0x80082CC0, "bool GetTRAP__Fii(int x, int y)")
del_items(0x8001FEFC)
SetType(0x8001FEFC, "void DoEpi(struct TASK *T)")
del_items(0x8001FF4C)
SetType(0x8001FF4C, "void DoPro(struct TASK *T)")
del_items(0x8001FF9C)
SetType(0x8001FF9C, "unsigned char TSK_OpenModule(unsigned long MemType)")
del_items(0x80020010)
SetType(0x80020010, "struct TASK *TSK_AddTask(unsigned long Id, void (*Main)(), int StackSize, int DataSize)")
del_items(0x800201F8)
SetType(0x800201F8, "void TSK_DoTasks()")
del_items(0x800203B8)
SetType(0x800203B8, "void TSK_Sleep(int Frames)")
del_items(0x80020494)
SetType(0x80020494, "void ReturnToSchedulerIfCurrentTask(struct TASK *T)")
del_items(0x8002051C)
SetType(0x8002051C, "void TSK_Die()")
del_items(0x80020548)
SetType(0x80020548, "void TSK_Kill(struct TASK *T)")
del_items(0x80020598)
SetType(0x80020598, "struct TASK *TSK_GetFirstActive()")
del_items(0x800205A8)
SetType(0x800205A8, "unsigned char TSK_IsStackCorrupted(struct TASK *T)")
del_items(0x80020624)
SetType(0x80020624, "void TSK_JumpAndResetStack(void (*RunFunc)())")
del_items(0x8002066C)
SetType(0x8002066C, "void TSK_RepointProc(struct TASK *T, void (*Func)())")
del_items(0x800206B0)
SetType(0x800206B0, "struct TASK *TSK_GetCurrentTask()")
del_items(0x800206C0)
SetType(0x800206C0, "unsigned char TSK_IsCurrentTask(struct TASK *T)")
del_items(0x800206D8)
SetType(0x800206D8, "struct TASK *TSK_Exist(struct TASK *T, unsigned long Id, unsigned long Mask)")
del_items(0x80020730)
SetType(0x80020730, "void TSK_SetExecFilter(unsigned long Id, unsigned long Mask)")
del_items(0x80020748)
SetType(0x80020748, "void TSK_ClearExecFilter()")
del_items(0x8002076C)
SetType(0x8002076C, "int TSK_KillTasks(struct TASK *CallingT, unsigned long Id, unsigned long Mask)")
del_items(0x8002086C)
SetType(0x8002086C, "void TSK_IterateTasks(unsigned long Id, unsigned long Mask, void (*CallBack)())")
del_items(0x800208E4)
SetType(0x800208E4, "void TSK_MakeTaskInactive(struct TASK *T)")
del_items(0x800208F8)
SetType(0x800208F8, "void TSK_MakeTaskActive(struct TASK *T)")
del_items(0x8002090C)
SetType(0x8002090C, "void TSK_MakeTaskImmortal(struct TASK *T)")
del_items(0x80020920)
SetType(0x80020920, "void TSK_MakeTaskMortal(struct TASK *T)")
del_items(0x80020934)
SetType(0x80020934, "unsigned char TSK_IsTaskActive(struct TASK *T)")
del_items(0x80020948)
SetType(0x80020948, "unsigned char TSK_IsTaskMortal(struct TASK *T)")
del_items(0x8002095C)
SetType(0x8002095C, "void DetachFromList(struct TASK **Head, struct TASK *ThisObj)")
del_items(0x800209A8)
SetType(0x800209A8, "void AddToList(struct TASK **Head, struct TASK *ThisObj)")
del_items(0x800209C8)
SetType(0x800209C8, "void LoTskKill(struct TASK *T)")
del_items(0x80020A38)
SetType(0x80020A38, "void ExecuteTask(struct TASK *T)")
del_items(0x80020A88)
SetType(0x80020A88, "void (*TSK_SetDoTasksPrologue(void (*Func)()))()")
del_items(0x80020AA0)
SetType(0x80020AA0, "void (*TSK_SetDoTasksEpilogue(void (*Func)()))()")
del_items(0x80020AB8)
SetType(0x80020AB8, "void (*TSK_SetTaskPrologue(void (*Pro)()))()")
del_items(0x80020AD0)
SetType(0x80020AD0, "void (*TSK_SetTaskEpilogue(void (*Epi)()))()")
del_items(0x80020AE8)
SetType(0x80020AE8, "void TSK_SetEpiProFilter(unsigned long Id, unsigned long Mask)")
del_items(0x80020B00)
SetType(0x80020B00, "void TSK_ClearEpiProFilter()")
del_items(0x80020B34)
SetType(0x80020B34, "void TSK_SetExtraStackProtection(unsigned char OnOff)")
del_items(0x80020B44)
SetType(0x80020B44, "void (*TSK_SetStackFloodCallback(void (*Func)()))()")
del_items(0x80020B5C)
SetType(0x80020B5C, "int TSK_SetExtraStackSize(int Size)")
del_items(0x80020B84)
SetType(0x80020B84, "void ExtraMarkStack(unsigned long *Stack, int SizeLongs)")
del_items(0x80020BB0)
SetType(0x80020BB0, "int CheckExtraStack(unsigned long *Stack, int LongsToCheck)")
del_items(0x80020BEC)
SetType(0x80020BEC, "void TICK_InitModule()")
del_items(0x80020C0C)
SetType(0x80020C0C, "void TICK_Set(unsigned long Val)")
del_items(0x80020C1C)
SetType(0x80020C1C, "unsigned long TICK_Get()")
del_items(0x80020C2C)
SetType(0x80020C2C, "void TICK_Update()")
del_items(0x80020C4C)
SetType(0x80020C4C, "unsigned long TICK_GetAge(unsigned long OldTick)")
del_items(0x80020C78)
SetType(0x80020C78, "char *TICK_GetDateString()")
del_items(0x80020C88)
SetType(0x80020C88, "char *TICK_GetTimeString()")
del_items(0x80020C98)
SetType(0x80020C98, "unsigned char GU_InitModule()")
del_items(0x80020CC4)
SetType(0x80020CC4, "void GU_SetRndSeed(unsigned long *Tab)")
del_items(0x80020CF4)
SetType(0x80020CF4, "unsigned long GU_GetRnd()")
del_items(0x80020D84)
SetType(0x80020D84, "long GU_GetSRnd()")
del_items(0x80020DA4)
SetType(0x80020DA4, "unsigned long GU_GetRndRange(unsigned int Range)")
del_items(0x80020DE0)
SetType(0x80020DE0, "unsigned int GU_AlignVal(unsigned int w, unsigned int round)")
del_items(0x80020E04)
SetType(0x80020E04, "void main()")
del_items(0x80020E54)
SetType(0x80020E54, "unsigned char DBG_OpenModule()")
del_items(0x80020E5C)
SetType(0x80020E5C, "void DBG_PollHost()")
del_items(0x80020E64)
SetType(0x80020E64, "void DBG_Halt()")
del_items(0x80020E6C)
SetType(0x80020E6C, "void DBG_SendMessage(char *e)")
del_items(0x80020E84)
SetType(0x80020E84, "void DBG_SetMessageHandler(void (*Func)())")
del_items(0x80020E94)
SetType(0x80020E94, "void DBG_Error(char *Text, char *File, int Line)")
del_items(0x80020EC8)
SetType(0x80020EC8, "void DBG_SetErrorFunc(void (*EFunc)())")
del_items(0x80020ED8)
SetType(0x80020ED8, "void SendPsyqString(char *e)")
del_items(0x80020EE0)
SetType(0x80020EE0, "void DBG_SetPollRoutine(void (*Func)())")
del_items(0x80020EF0)
SetType(0x80020EF0, "unsigned long GTIMSYS_GetTimer()")
del_items(0x80020F14)
SetType(0x80020F14, "void GTIMSYS_ResetTimer()")
del_items(0x80020F38)
SetType(0x80020F38, "unsigned long GTIMSYS_InitTimer()")
del_items(0x8002116C)
SetType(0x8002116C, "struct MEM_INFO *GSYS_GetWorkMemInfo()")
del_items(0x8002117C)
SetType(0x8002117C, "void GSYS_SetStackAndJump(void *Stack, void (*Func)(), void *Param)")
del_items(0x800211B8)
SetType(0x800211B8, "void GSYS_MarkStack(void *Stack, unsigned long StackSize)")
del_items(0x800211C8)
SetType(0x800211C8, "unsigned char GSYS_IsStackCorrupted(void *Stack, unsigned long StackSize)")
del_items(0x800211E0)
SetType(0x800211E0, "unsigned char GSYS_InitMachine()")
del_items(0x80021234)
SetType(0x80021234, "unsigned char GSYS_CheckPtr(void *Ptr)")
del_items(0x80021268)
SetType(0x80021268, "unsigned char GSYS_IsStackOutOfBounds(void *Stack, unsigned long StackSize)")
del_items(0x800212D4)
SetType(0x800212D4, "void GAL_SetErrorChecking(unsigned char OnOff)")
del_items(0x800212E4)
SetType(0x800212E4, "long GAL_SplitBlock(long CurBlock, unsigned long Size)")
del_items(0x80021404)
SetType(0x80021404, "void GAL_InitModule()")
del_items(0x800214BC)
SetType(0x800214BC, "unsigned char GAL_AddMemType(struct MEM_INIT_INFO *M)")
del_items(0x800215DC)
SetType(0x800215DC, "long GAL_Alloc(unsigned long Size, unsigned long Type, char *Name)")
del_items(0x80021774)
SetType(0x80021774, "void *GAL_Lock(long Handle)")
del_items(0x800217DC)
SetType(0x800217DC, "unsigned char GAL_Unlock(long Handle)")
del_items(0x80021860)
SetType(0x80021860, "unsigned char GAL_Free(long Handle)")
del_items(0x80021908)
SetType(0x80021908, "unsigned long GAL_GetFreeMem(unsigned long Type)")
del_items(0x8002197C)
SetType(0x8002197C, "unsigned long GAL_GetUsedMem(unsigned long Type)")
del_items(0x800219F0)
SetType(0x800219F0, "unsigned long GAL_LargestFreeBlock(unsigned long Type)")
del_items(0x80021A6C)
SetType(0x80021A6C, "void AttachHdrToList(struct MEM_HDR **Head, struct MEM_HDR *Block)")
del_items(0x80021A8C)
SetType(0x80021A8C, "void DetachHdrFromList(struct MEM_HDR **Head, struct MEM_HDR *Block)")
del_items(0x80021AD8)
SetType(0x80021AD8, "unsigned char IsActiveValidHandle(long Handle)")
del_items(0x80021B10)
SetType(0x80021B10, "void *AlignPtr(void *P, unsigned long Align)")
del_items(0x80021B40)
SetType(0x80021B40, "unsigned long AlignSize(unsigned long Size, unsigned long Align)")
del_items(0x80021B70)
SetType(0x80021B70, "struct MEM_HDR *FindClosestSizedBlock(struct MEM_HDR *Head, unsigned long Size)")
del_items(0x80021BC8)
SetType(0x80021BC8, "struct MEM_HDR *FindHighestMemBlock(struct MEM_HDR *Head, unsigned long Size)")
del_items(0x80021C30)
SetType(0x80021C30, "struct MEM_HDR *FindLowestMemBlock(struct MEM_HDR *Head, unsigned long Size)")
del_items(0x80021C98)
SetType(0x80021C98, "struct MEM_INIT_INFO *GetMemInitInfoBlockFromType(unsigned long Type)")
del_items(0x80021CD4)
SetType(0x80021CD4, "void MergeToEmptyList(struct MEM_INIT_INFO *MI, struct MEM_HDR *M)")
del_items(0x80021DA8)
SetType(0x80021DA8, "long GAL_AllocAt(unsigned long Size, void *Addr, unsigned long Type, char *Name)")
del_items(0x80021E84)
SetType(0x80021E84, "long LoAlloc(struct MEM_INIT_INFO *M, struct MEM_HDR *Block, void *Addr, unsigned long Size, char *Name)")
del_items(0x8002201C)
SetType(0x8002201C, "struct MEM_HDR *FindBlockInTheseBounds(struct MEM_HDR *Head, void *Addr, unsigned long Size)")
del_items(0x80022088)
SetType(0x80022088, "struct MEM_HDR *GetFreeMemHdrBlock()")
del_items(0x80022110)
SetType(0x80022110, "void ReleaseMemHdrBlock(struct MEM_HDR *Index)")
del_items(0x80022150)
SetType(0x80022150, "void GAL_IterateEmptyMem(unsigned long MemType, void (*Func)())")
del_items(0x800221D4)
SetType(0x800221D4, "void GAL_IterateUsedMem(unsigned long MemType, void (*Func)())")
del_items(0x80022270)
SetType(0x80022270, "unsigned char GAL_SetMemName(long Hnd, char *Text)")
del_items(0x800222E0)
SetType(0x800222E0, "unsigned long GAL_TotalMem(unsigned long Type)")
del_items(0x80022334)
SetType(0x80022334, "void *GAL_MemBase(unsigned long Type)")
del_items(0x80022388)
SetType(0x80022388, "unsigned char GAL_DefragMem(unsigned long type)")
del_items(0x8002240C)
SetType(0x8002240C, "unsigned char GSetError(enum GAL_ERROR_CODE Err)")
del_items(0x80022468)
SetType(0x80022468, "unsigned char GAL_CheckMem(unsigned long Type)")
del_items(0x80022564)
SetType(0x80022564, "unsigned char CheckCollisions(struct MEM_INIT_INFO *M, struct MEM_HDR *MemHdr)")
del_items(0x80022610)
SetType(0x80022610, "unsigned char AreBlocksColliding(struct MEM_HDR *Hdr1, struct MEM_HDR *Hdr2)")
del_items(0x80022668)
SetType(0x80022668, "char *GAL_GetErrorText(enum GAL_ERROR_CODE Err)")
del_items(0x80022698)
SetType(0x80022698, "enum GAL_ERROR_CODE GAL_GetLastErrorCode()")
del_items(0x800226A8)
SetType(0x800226A8, "char *GAL_GetLastErrorText()")
del_items(0x800226D0)
SetType(0x800226D0, "int GAL_HowManyEmptyRegions(unsigned long Type)")
del_items(0x80022738)
SetType(0x80022738, "int GAL_HowManyUsedRegions(unsigned long Type)")
del_items(0x800227A0)
SetType(0x800227A0, "void GAL_SetTimeStamp(int Time)")
del_items(0x800227B0)
SetType(0x800227B0, "void GAL_IncTimeStamp()")
del_items(0x800227D0)
SetType(0x800227D0, "int GAL_GetTimeStamp()")
del_items(0x800227E0)
SetType(0x800227E0, "long GAL_AlignSizeToType(unsigned long Size, unsigned long MemType)")
del_items(0x80022830)
SetType(0x80022830, "long GAL_AllocMultiStruct(struct GAL_STRUCT *G, unsigned long Type, char *Name)")
del_items(0x80022880)
SetType(0x80022880, "unsigned int GAL_ProcessMultiStruct(struct GAL_STRUCT *G, unsigned long Type)")
del_items(0x8002292C)
SetType(0x8002292C, "long GAL_GetSize(long hnd)")
del_items(0x80022988)
SetType(0x80022988, "unsigned char GazDefragMem(unsigned long MemType)")
del_items(0x80022AF0)
SetType(0x80022AF0, "void PutBlocksInRegionIntoList(struct MEM_REG *Reg, struct MEM_HDR **ToList, struct MEM_HDR **FromList)")
del_items(0x80022B94)
SetType(0x80022B94, "unsigned char CollideRegions(struct MEM_REG *Reg1, struct MEM_REG *Reg2)")
del_items(0x80022BC8)
SetType(0x80022BC8, "void DeleteEmptyBlocks(struct MEM_INIT_INFO *M)")
del_items(0x80022C34)
SetType(0x80022C34, "unsigned char GetRegion(struct MEM_REG *Reg, struct MEM_HDR *LockedBlocks, struct MEM_INIT_INFO *M)")
del_items(0x80022D2C)
SetType(0x80022D2C, "struct MEM_HDR *FindNextBlock(void *Addr, struct MEM_HDR *Blocks)")
del_items(0x80022D68)
SetType(0x80022D68, "unsigned long ShuffleBlocks(struct MEM_HDR *Blocks, struct MEM_REG *Reg, struct MEM_INIT_INFO *M)")
del_items(0x80022DF8)
SetType(0x80022DF8, "void PutAllLockedBlocksOntoList(struct MEM_HDR **ToHead, struct MEM_HDR **FromHead)")
del_items(0x80022E74)
SetType(0x80022E74, "void SortMemHdrListByAddr(struct MEM_HDR **Head)")
del_items(0x80022F28)
SetType(0x80022F28, "void GraftMemHdrList(struct MEM_HDR **ToList, struct MEM_HDR **FromList)")
del_items(0x80022F84)
SetType(0x80022F84, "void GAL_MemDump(unsigned long Type)")
del_items(0x80022FF8)
SetType(0x80022FF8, "void GAL_SetVerbosity(enum GAL_VERB_LEV G)")
del_items(0x80023008)
SetType(0x80023008, "int CountFreeBlocks()")
del_items(0x80023034)
SetType(0x80023034, "void SetBlockName(struct MEM_HDR *MemHdr, char *NewName)")
del_items(0x8002307C)
SetType(0x8002307C, "int GAL_GetNumFreeHeaders()")
del_items(0x8002308C)
SetType(0x8002308C, "unsigned long GAL_GetLastTypeAlloced()")
del_items(0x8002309C)
SetType(0x8002309C, "void (*GAL_SetAllocFilter(void (*NewFilter)()))()")
del_items(0x800230B4)
SetType(0x800230B4, "unsigned char GAL_SortUsedRegionsBySize(unsigned long MemType)")
del_items(0x80023108)
SetType(0x80023108, "unsigned char SortSize(struct MEM_HDR *B1, struct MEM_HDR *B2)")
del_items(0x80023118)
SetType(0x80023118, "unsigned char GAL_SortUsedRegionsByAddress(unsigned long MemType)")
del_items(0x8002316C)
SetType(0x8002316C, "unsigned char SortAddr(struct MEM_HDR *B1, struct MEM_HDR *B2)")
del_items(0x8002317C)
SetType(0x8002317C, "void SortMemHdrList(struct MEM_HDR **Head, unsigned char (*CompFunc)())")
del_items(0x80025538)
SetType(0x80025538, "int vsprintf(char *str, char *fmt, char *ap)")
del_items(0x80025584)
SetType(0x80025584, "int _doprnt(char *fmt0, char *argp, struct FILE *fp)")
|
11502745
|
class Solution:
def magicalString(self, n: int) -> int:
S = [1, 2, 2]
idx = 2
while len(S) < n:
S += [3 - S[-1]] * S[idx]
idx += 1
return S[:n].count(1)
|
11502810
|
from mnist import MNIST
import numpy as np
from bayes_nn import conf
class Dataloader:
def __init__(self, loc='data/raw'):
"""
Dataloader for the MNIST data. Relies on this library
https://pypi.python.org/pypi/python-mnist/0.3
:param loc:
"""
mndata = MNIST(loc)
self.data = {}
# train data
images, labels = mndata.load_training()
images = np.array(images)
labels = np.array(labels).astype(np.int64)
self.data['X_train'] = self.normalize(images)
self.data['y_train'] = labels
# test data
images, labels = mndata.load_testing()
images = np.array(images)
labels = np.array(labels).astype(np.int64)
self.data['X_test'] = self.normalize(images)
self.data['y_test'] = labels
@staticmethod
def normalize(images, reverse=False):
"""
Normalize the images with fixed values
:param images:
:param reverse:
:return:
"""
mean = 33
std = 78
conf.range = ((0-33)/78, (255-33)/78)
if reverse:
return images*std + mean
else:
return (images-mean)/std
def sample(self, dataset='train', batch_size=None):
assert dataset in ['train', 'test']
if batch_size is None:
if dataset == 'train':
batch_size = conf.batch_size
else:
batch_size = conf.batch_size_test
num_samples = self.data['X_'+dataset].shape[0]
permutation = np.random.choice(num_samples, size=(batch_size,))
im = self.data['X_'+dataset][permutation]
lbl = self.data['y_'+dataset][permutation]
return im, lbl
def sample_NCHW(self, *args, **kwargs):
"""
sample images in the NCHW format
Num_samples x CHANNELS x HEIGHT x WIDTH
:param args:
:param kwargs:
:return:
"""
im, lbl = self.sample(*args, **kwargs)
im = np.reshape(im, (-1, 1, 28, 28))
return im, lbl
def bootstrap_yourself(self):
"""
Applies a bootstrap to its training data.
A bootstrap is simply sampling with replacement on your own data set
:return:
"""
num_samples = self.data['X_train'].shape[0]
ind = np.random.choice(num_samples, size=(num_samples,), replace=True)
self.data['X_train'] = self.data['X_train'][ind]
self.data['y_train'] = self.data['y_train'][ind]
|
11502815
|
import sys
import math
from tests.utils.runtest import makesuite, run
from tests.utils.testcase import TestCase
from System import UInt32
class Number(object):
def __long__(self):
return 0L
def __float__(self):
return 0.0001
class BugTest(TestCase):
def testLogWorksNow(self):
math.log(Number())
math.log10(Number())
def testUIntLen(self):
class C(object):
def __len__(self):
return UInt32(123)
self.assertEquals(len(C()), 123, "uint len bug is back (are you using ipy 2.0 instead of 2.0.1?)")
suite = makesuite(BugTest)
if __name__ == '__main__':
run(suite)
|
11502817
|
import cv2
import numpy as np
from vision.modules import ModuleBase
import gui_options
import shm
capture_source = 'forward'
# fll - frame L* lower, flu - frame L* upper
# pbl - pike L* lower
vision_options = [gui_options.IntOption("fhl", 57, 0, 255), gui_options.IntOption("fhu", 100, 0, 255),
gui_options.IntOption("fsl", 0, 0, 255), gui_options.IntOption("fsu", 255, 0, 255),
gui_options.IntOption("fvl", 118, 0, 255), gui_options.IntOption("fvu", 225, 0, 255),
gui_options.IntOption("phl", 0, 0, 255), gui_options.IntOption("phu", 255, 0, 255),
gui_options.IntOption("psl", 20, 0, 255), gui_options.IntOption("psu", 80, 0, 255),
gui_options.IntOption("pvl", 0, 0, 255), gui_options.IntOption("pvu", 75, 0, 255),
gui_options.IntOption("Sfs", 16), gui_options.IntOption("Sps", 4), gui_options.IntOption("yl", 200),
gui_options.IntOption("al", 3000), gui_options.IntOption("pm", 3), gui_options.IntOption("pl", 10),
gui_options.FloatOption("plf", 0.35), gui_options.IntOption("pmdx", 10),
gui_options.FloatOption("pmdf", 0.25), gui_options.IntOption("bxl", 10),
gui_options.IntOption("bxu", 1010), gui_options.IntOption("pal", 1000)]
CAMERA_WIDTH = 1020
class Portal(ModuleBase.ModuleBase):
def __init__(self):
super(Portal, self).__init__(True)
def process(self, Mp):
self.post("orig", Mp)
Mp2 = np.copy(Mp)
# Apparently, some of these functions modify their arguments without documenting it well...
self.kf = cv2.getStructuringElement(cv2.MORPH_RECT, (self.options["Sfs"], self.options["Sfs"]))
self.kp = cv2.getStructuringElement(cv2.MORPH_RECT, (self.options["Sps"], self.options["Sps"]))
M = cv2.cvtColor(Mp, cv2.COLOR_BGR2HSV)
Ms = cv2.split(M)
Mfh = cv2.inRange(Ms[0], self.options["fhl"], self.options["fhu"])
Mfs = cv2.inRange(Ms[1], self.options["fsl"], self.options["fsu"])
Mfv = cv2.inRange(Ms[2], self.options["fvl"], self.options["fvu"])
Mph = cv2.inRange(Ms[0], self.options["phl"], self.options["phu"])
Mps = cv2.inRange(Ms[1], self.options["psl"], self.options["psu"])
Mpv = cv2.inRange(Ms[2], self.options["pvl"], self.options["pvu"])
# Mfhh = cv2.equalizeHist(Mfh)
Mf = Mfh & Mfv
Mp = Mps
#Mfh = cv2.equalizeHist(Mf)
#Mph = cv2.equalizeHist(Mp)
Mff = cv2.dilate(Mf, self.kf)
Mpf = cv2.dilate(cv2.erode(Mp, self.kp), self.kp)
self.post("Mff", Mff)
self.post("Mpf", Mpf)
# findContours modifies
Mff2 = np.copy(Mff)
_, cs, _ = cv2.findContours(Mff2, cv2.RETR_LIST, cv2.CHAIN_APPROX_SIMPLE)
# largest contour with y greater than yl
cm = None
cma = -1
for c in cs:
ca = cv2.contourArea(c)
if ca > cma:
ylok = True
for p in c:
if p[0][1] < self.options["yl"]:
ylok = False
break
if not ylok:
continue
cm = c
cma = ca
if cma < self.options["al"] or cm is None:
print("Portal: failed to identify any contours.")
shm.wire_results.area.set(-1)
return
Mc = np.copy(Mp2)
cv2.drawContours(Mc, cs, -1, (255, 255, 0), thickness=2)
self.post("all contours", Mc)
Mc2 = np.copy(Mp2)
cv2.drawContours(Mc2, [cm], -1, (255, 255, 0), thickness=2)
self.post("max contour", Mc2)
# get the longest side of the rectangle (should be the bottom, except for extreme angles!)
#osx = rm[0][0]
#osy = rm[0][1]
#for i in range(1, )
# identify left and right posts
shm.wire_results.area.set(cma)
cmr = []
M = self.options["pm"]
i = 0
for p in cm:
if i % M == 0:
cmr.append(p)
i += 1
MDX = self.options["pmdx"]
MDF = self.options["pmdf"]
posts = []
postxs = []
postys = []
post = [cmr[0]]
ox = cmr[0][0]
for p in cmr:
dx = abs(p[0] - ox)
if dx[0] > MDX:
posts.append(post)
postxs.append(ox[0])
my = 10000
for q in post:
if q[0][1] < my:
my = q[0][1]
postys.append(my)
post = [p]
ox = p[0]
else:
post.append(p)
ox += (p[0] - ox) * MDF
posts = sorted(posts, key=lambda x: -len(x))
pa = None
pb = None
Mc3 = np.copy(Mp2)
if len(posts) > 0 and len(posts[0]) >= self.options["pl"]:
pa = posts[0]
cv2.drawContours(Mc3, [np.array(pa)], -1, (255, 0, 0), thickness=8)
shm.wire_results.ap.set(1)
shm.wire_results.ay.set(postys[0])
shm.wire_results.ax.set(postxs[0])
if len(posts) > 1 and len(posts[1]) >= self.options["pl"] and len(posts[1]) >= self.options["plf"] * len(
posts[0]):
pb = posts[1]
cv2.drawContours(Mc3, [np.array(pb)], -1, (0, 255, 0), thickness=8)
shm.wire_results.bp.set(1)
shm.wire_results.by.set(postys[1])
shm.wire_results.bx.set(postxs[1])
else:
shm.wire_results.bp.set(0)
else:
shm.wire_results.ap.set(0)
self.post("post", Mc3)
# check if bounding box runs into border of screen
# if it runs into the left side, we're probably on the RHS, if it runs into the right, probably on the LHS
# thanks to <NAME>. for the idea!b
bl = False
br = False
for p in cm:
if p[0][0] <= self.options["bxl"]:
bl = True
elif p[0][0] >= self.options["bxu"]:
br = True
#print(sorted([p[0][0] for p in cm])[0])
print(Mpf.shape)
Mpcb = np.zeros([Mpf.shape[0], Mpf.shape[1]], dtype=np.uint8)
cv2.drawContours(Mpcb, [cv2.convexHull(cm)], -1, 255, thickness=cv2.FILLED)
self.post("pikeb", Mpcb)
Mpc = Mpcb & Mpf
xs = 0
ys = 0
n = 0
for x in range(0, Mpcb.shape[0], 5):
for y in range(0, Mpcb.shape[1], 5):
if Mpc[x][y] == 255:
xs += x
ys += y
n += 1
if n > 0:
shm.wire_results.y.set(xs / n)
shm.wire_results.x.set(ys / n)
else:
shm.wire_results.y.set(-1)
shm.wire_results.x.set(-1)
self.post("pike", Mpc)
xs = 0
ys = 0
n = 0
for x in range(0, Mpc.shape[0], 5):
for y in range(0, Mpc.shape[1], 5):
if Mpc[x][y] == 255:
xs += x
ys += y
n += 1
if n > 0:
shm.wire_results.pp.set(1)
shm.wire_results.px.set(ys / n)
shm.wire_results.py.set(xs / n)
else:
shm.wire_results.pp.set(0)
breach = False
if bl:
breach = True
shm.wire_results.breach_left.set(True)
else:
shm.wire_results.breach_left.set(False)
if br:
breach = True
shm.wire_results.breach_right.set(True)
else:
shm.wire_results.breach_right.set(False)
shm.wire_results.breach.set(breach)
angle = None
if bl and not br:
shm.wire_results.angle.set(-50)
if br and not bl:
shm.wire_results.angle.set(50)
if not breach:
rm = cv2.minAreaRect(cm)
#print("ANGLE (- is CCW)")
#print(rm[2])
if rm[1][1] > rm[1][0]:
#print("ANGLE (adjusted)")
# if L > W, subtract from -90
#print(-90 - rm[2])
angle = 90 + rm[2]
else:
angle = rm[2]
print((rm[0:2], angle))
shm.wire_results.angle.set(angle)
|
11502831
|
import tensorflow as tf
from sandbox.rocky.tf.policies.gaussian_mlp_policy import GaussianMLPPolicy
from sandbox.rocky.tf.envs.base import TfEnv
from rllab.baselines.linear_feature_baseline import LinearFeatureBaseline
from rllab.envs.gym_env import GymEnv
from inverse_rl.envs.env_utils import CustomGymEnv
from inverse_rl.algos.irl_trpo import IRLTRPO
from sandbox.rocky.tf.policies.gaussian_mlp_inverse_policy import GaussianMLPInversePolicy
from inverse_rl.models.eairl import *
from inverse_rl.models.qvar import *
from inverse_rl.models.empowerment import *
from inverse_rl.models.architectures import relu_net
from inverse_rl.utils.log_utils import rllab_logdir, load_latest_experts, load_latest_experts_multiple_runs
from inverse_rl.utils.hyper_sweep import run_sweep_parallel, run_sweep_serial
def main(exp_name=None, fusion=True):
env = TfEnv(CustomGymEnv('CustomAnt-v0', record_video=False, record_log=False))
# load ~2 iterations worth of data from each forward RL experiment as demos
experts = load_latest_experts_multiple_runs('data/ant_data_collect', n=2)
#experts = load_latest_experts('data/ant_data_collect', n=5)
#qvar: inverse model q(a|s,s')
qvar= GaussianMLPInversePolicy(name='qvar_model', env_spec=env.spec, hidden_sizes=(32, 32))
qvar_model = Qvar(env=env,qvar=qvar, expert_trajs=experts, fusion=True, max_itrs=10)
#Empowerment-based Adversarial Inverse Reinforcement Learning, set score_discrim=True
irl_model = EAIRL(env=env, expert_trajs=experts, state_only=False, fusion=fusion, max_itrs=10, score_discrim=True)
#Empowerment-based potential functions gamma* Phi(s')-Phi(s)
empw_model = Empowerment(env=env,fusion=True, max_itrs=4)
t_empw_model = Empowerment(env=env,scope='t_efn',fusion=True, max_itrs=2, name='empowerment2')
policy = GaussianMLPPolicy(name='policy', env_spec=env.spec, hidden_sizes=(32, 32))
algo = IRLTRPO(
env=env,
policy=policy,
empw=empw_model,
tempw=t_empw_model,
qvar_model=qvar_model,
irl_model=irl_model,
n_itr=130,
batch_size=20000,
max_path_length=500,
discount=0.99,
store_paths=True,
target_empw_update=5,
irl_model_wt=1.0,
entropy_weight=0.1,
lambda_i=1.0,
zero_environment_reward=True,
baseline=LinearFeatureBaseline(env_spec=env.spec),
)
with rllab_logdir(algo=algo, dirname='data/ant_state_irl'):
#with rllab_logdir(algo=algo, dirname='data/ant_state_irl/%s' % exp_name): # if you use multiple runs, use this line instead of above
with tf.Session():
algo.train()
if __name__ == "__main__":
params_dict = {
'fusion': [True]
}
main()
#run_sweep_parallel(main, params_dict, repeat=3)
|
11502874
|
from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
from django.conf import settings
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
from django.contrib.staticfiles.views import serve
from django.views.decorators.cache import never_cache
admin.autodiscover()
from parks.views import HomePageView, BackboneHomePageView, HackathonHomePageView
admin.autodiscover()
urlpatterns = patterns('',
# Home
# url(r'^$', HomePageView.as_view(), name='home'),
# Backbone App
url(r'^$', BackboneHomePageView.as_view(), name='backbone_home'),
# Hackathon App
url(r'^hackathon', HackathonHomePageView.as_view(), name='hackathon_home'),
# Parks
url(r'^parks/', include('parks.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
# grappelli
url(r'^grappelli/', include('grappelli.urls')),
)
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
if settings.DEBUG:
# Never cache static files in development!
static_view = never_cache(serve)
urlpatterns += patterns('',
url(r'^static/(?P<path>.*)$', static_view, {
'document_root': settings.STATIC_ROOT,
})
)
|
11502891
|
import tensorflow as tf
class DQNetwork18_eval:
def __init__(self, batch_size, state_size=[5,5,4], action_space=5, num_objects=5, learning_rate=0.0002, seq_len = 50, name='DQNetwork'):
self.state_size = state_size
self.action_size = action_space*num_objects
self.learning_rate = learning_rate
self.seq_len = seq_len
with tf.variable_scope(name, reuse = True):
# We create the placeholders
# *state_size means that we take each elements of state_size in tuple hence is like if we wrote
# [None, 84, 84, 4]
self.inputs_ = tf.placeholder(tf.float32, [None, self.seq_len, *state_size], name="inputs")
# self.action_chain = tf.placeholder(tf.float32, [None, self.action_size * (frame_num-1)], name="action_chain")
# Remember that target_Q is the R(s,a) + ymax Qhat(s', a')
# self.conflict_matrix = tf.placeholder(tf.float32, [None, num_objects, num_objects, 2], name="conflict_matrix")
self.finish_tag = tf.placeholder(tf.float32,[None, self.seq_len, num_objects], name="finish_tag")
# conflict_matrix_and = tf.logical_and(tf.cast(self.conflict_matrix[...,0],tf.bool),tf.cast(self.conflict_matrix[...,1],tf.bool))
# self.conflict_matrix = tf.cast(self.conflict_matrix,tf.float32)
# conflict_matrix_and = tf.cast(conflict_matrix_and,tf.float32)
#self.state_in = ((tf.placeholder(tf.float32, [None, 256], name = "state_in_c1"), tf.placeholder(tf.float32, [None, 256], name = "state_in_h1")),
# (tf.placeholder(tf.float32, [None, 256], name = "state_in_c2"), tf.placeholder(tf.float32, [None, 256], name = "state_in_h2")))
self.state_in = tf.nn.rnn_cell.LSTMStateTuple(tf.placeholder(tf.float32, [None, 256], name = "lstm_c1"),
tf.placeholder(tf.float32, [None, 256], name = "lstm_h1"))
"""
First convnet:
CNN
BatchNormalization
ELU
"""
# Input is 15*15*55
self.inputs = tf.reshape(self.inputs_, [-1, *self.state_size])# combine the first two dims
self.conv1 = tf.layers.conv2d(inputs = self.inputs,
filters = 64,
kernel_size = [5,5],
strides = [2,2],
padding = "SAME",
kernel_initializer=tf.contrib.layers.xavier_initializer_conv2d(),
name = "conv1")
self.conv1_batchnorm = tf.layers.batch_normalization(self.conv1,
training = False,
epsilon = 1e-5,
name = 'batch_norm1')
self.conv1_out = tf.nn.elu(self.conv1_batchnorm, name="conv1_out")
## --> [8, 8, 64]
print('conv1_out',self.conv1_out)
"""
Second convnet:
ResNet block
BatchNormalization
ELU
"""
self.conv2_1 = tf.layers.conv2d(inputs = self.conv1_out,
filters = 64,
kernel_size = [3,3],
strides = [1,1],
padding = "SAME",
kernel_initializer=tf.contrib.layers.xavier_initializer_conv2d(),
name = "conv2_1")
self.conv2_batchnorm_1 = tf.layers.batch_normalization(self.conv2_1,
training = False,
epsilon = 1e-5,
name = 'batch_norm2_1')
self.conv2_out_1 = tf.nn.elu(self.conv2_batchnorm_1, name="conv2_out_1")
self.conv2_2 = tf.layers.conv2d(inputs = self.conv2_out_1,
filters = 64,
kernel_size = [1,1],
strides = [1,1],
padding = "SAME",
kernel_initializer=tf.contrib.layers.xavier_initializer_conv2d(),
name = "conv2_2")
self.conv2_batchnorm_2 = tf.layers.batch_normalization(self.conv2_2,
training = False,
epsilon = 1e-5,
name = 'batch_norm2_2')
self.conv2_out_2 = tf.nn.elu(self.conv2_batchnorm_2+self.conv1_out, name="conv2_out_2")
## --> [4, 4, 128]
print('conv2_out',self.conv2_out_2)
"""
Third convnet:
CNN
BatchNormalization
ELU
"""
# Input is 15*15*55
self.conv3 = tf.layers.conv2d(inputs = self.conv2_out_2,
filters = 128,
kernel_size = [3,3],
strides = [2,2],
padding = "SAME",
kernel_initializer=tf.contrib.layers.xavier_initializer_conv2d(),
name = "conv3")
self.conv3_batchnorm = tf.layers.batch_normalization(self.conv3,
training = False,
epsilon = 1e-5,
name = 'batch_norm3')
self.conv3_out = tf.nn.elu(self.conv3_batchnorm, name="conv3_out")
print('conv3_out',self.conv3_out)
"""
Forth convnet:
ResNet block
BatchNormalization
ELU
"""
self.conv4_1 = tf.layers.conv2d(inputs = self.conv3_out,
filters = 128,
kernel_size = [3,3],
strides = [1,1],
padding = "SAME",
kernel_initializer=tf.contrib.layers.xavier_initializer_conv2d(),
name = "conv4_1")
self.conv4_batchnorm_1 = tf.layers.batch_normalization(self.conv4_1,
training = False,
epsilon = 1e-5,
name = 'batch_norm4_1')
self.conv4_out_1 = tf.nn.elu(self.conv4_batchnorm_1, name="conv4_out_1")
self.conv4_2 = tf.layers.conv2d(inputs = self.conv4_out_1,
filters = 128,
kernel_size = [1,1],
strides = [1,1],
padding = "SAME",
kernel_initializer=tf.contrib.layers.xavier_initializer_conv2d(),
name = "conv4_2")
self.conv4_batchnorm_2 = tf.layers.batch_normalization(self.conv4_2,
training = False,
epsilon = 1e-5,
name = 'batch_norm4_2')
self.conv4_out_2 = tf.nn.elu(self.conv4_batchnorm_2+self.conv3_out, name="conv4_out_2")
print('conv4_out',self.conv4_out_2)
## --> [4, 4, 128]
"""
Fifth convnet:
CNN
BatchNormalization
ELU
"""
# Input is 15*15*55
self.conv5 = tf.layers.conv2d(inputs = self.conv4_out_2,
filters = 256,
kernel_size = [3,3],
strides = [2,2],
padding = "SAME",
kernel_initializer=tf.contrib.layers.xavier_initializer_conv2d(),
name = "conv5")
self.conv5_batchnorm = tf.layers.batch_normalization(self.conv5,
training = False,
epsilon = 1e-5,
name = 'batch_norm5')
self.conv5_out = tf.nn.elu(self.conv5_batchnorm, name="conv5_out")
print('conv5_out',self.conv5_out)
"""
Sixth convnet:
ResNet block
BatchNormalization
ELU
"""
self.conv6_1 = tf.layers.conv2d(inputs = self.conv5_out,
filters = 256,
kernel_size = [3,3],
strides = [1,1],
padding = "SAME",
kernel_initializer=tf.contrib.layers.xavier_initializer_conv2d(),
name = "conv6_1")
self.conv6_batchnorm_1 = tf.layers.batch_normalization(self.conv6_1,
training = False,
epsilon = 1e-5,
name = 'batch_norm6_1')
self.conv6_out_1 = tf.nn.elu(self.conv6_batchnorm_1, name="conv6_out_1")
self.conv6_2 = tf.layers.conv2d(inputs = self.conv6_out_1,
filters = 256,
kernel_size = [1,1],
strides = [1,1],
padding = "SAME",
kernel_initializer=tf.contrib.layers.xavier_initializer_conv2d(),
name = "conv6_2")
self.conv6_batchnorm_2 = tf.layers.batch_normalization(self.conv6_2,
training = False,
epsilon = 1e-5,
name = 'batch_norm6_2')
self.conv6_out_2 = tf.nn.elu(self.conv6_batchnorm_2+self.conv5_out, name="conv6_out_2")
print('conv6_out',self.conv6_out_2)
self.finish_tag_ = tf.reshape(self.finish_tag, [-1, num_objects])
#print("finish_tag")
#print(self.finish_tag_.shape)
self.flatten_ = tf.concat([tf.contrib.layers.flatten(self.conv6_out_2), self.finish_tag_], -1)
#print("flatten_")
#print(self.flatten_.shape)
self.flatten = tf.reshape(self.flatten_, [-1, self.seq_len, int(self.flatten_.shape[-1])])
#print("flatten")
#print(self.flatten.shape)
## --> [1152]
def lstm_layer(lstm_size, number_of_layers):
'''
This method is used to create LSTM layer/s for PixelRNN
Input(s): lstm_cell_unitis - used to define the number of units in a LSTM layer
number_of_layers - used to define how many of LSTM layers do we want in the network
batch_size - in this method this information is used to build starting state for the network
Output(s): cell - lstm layer
init_state - zero vectors used as a starting state for the network
'''
def cell_f(size):
return tf.nn.rnn_cell.LSTMCell(lstm_size, name='basic_lstm_cell')
# cell = tf.contrib.rnn.MultiRNNCell([cell(lstm_size) for _ in range(number_of_layers)])
cell = cell_f(lstm_size)
init_state = cell.zero_state(batch_size, tf.float32)
return cell, init_state
cell, self.init_state = lstm_layer(256, 1)
self.rnn, self.state_out = tf.nn.dynamic_rnn(cell, self.flatten, initial_state = self.state_in)
print(self.rnn)
self.output_ = tf.layers.dense(inputs = self.rnn,
kernel_initializer=tf.contrib.layers.xavier_initializer(),
units = self.action_size,
activation=None,
name = "output_internal")
self.output = tf.reshape(self.output_, [-1, self.seq_len, self.action_size], name = "output_external")
print(self.output_)
print(self.output)
class DQNetwork18_2:
def __init__(self, batch_size, state_size=[5,5,4], action_space=5, num_objects=5, learning_rate=0.0002, seq_len = 50, name='DQNetwork'):
self.state_size = state_size
self.action_size = action_space*num_objects
self.learning_rate = learning_rate
self.seq_len = seq_len
with tf.variable_scope(name):
# We create the placeholders
# *state_size means that we take each elements of state_size in tuple hence is like if we wrote
# [None, 84, 84, 4]
self.inputs_ = tf.placeholder(tf.float32, [None, self.seq_len, *state_size], name="inputs")
self.actions_ = tf.placeholder(tf.float32, [None, self.seq_len, self.action_size], name="actions_")
# self.action_chain = tf.placeholder(tf.float32, [None, self.action_size * (frame_num-1)], name="action_chain")
# Remember that target_Q is the R(s,a) + ymax Qhat(s', a')
self.target_Q_ = tf.placeholder(tf.float32, [None, self.seq_len], name="target")
# self.conflict_matrix = tf.placeholder(tf.float32, [None, num_objects, num_objects, 2], name="conflict_matrix")
self.finish_tag = tf.placeholder(tf.float32,[None, self.seq_len, num_objects], name="finish_tag")
#mask
self.mask = tf.placeholder(tf.float32, [None, self.seq_len])
self.lr = tf.placeholder(tf.float32, name="learnig_rate")
# conflict_matrix_and = tf.logical_and(tf.cast(self.conflict_matrix[...,0],tf.bool),tf.cast(self.conflict_matrix[...,1],tf.bool))
# self.conflict_matrix = tf.cast(self.conflict_matrix,tf.float32)
# conflict_matrix_and = tf.cast(conflict_matrix_and,tf.float32)
"""
First convnet:
CNN
BatchNormalization
ELU
"""
# Input is 15*15*55
self.inputs = tf.reshape(self.inputs_, [-1, *self.state_size])# combine the first two dims
self.conv1 = tf.layers.conv2d(inputs = self.inputs,
filters = 64,
kernel_size = [5,5],
strides = [2,2],
padding = "SAME",
kernel_initializer=tf.contrib.layers.xavier_initializer_conv2d(),
name = "conv1")
self.conv1_batchnorm = tf.layers.batch_normalization(self.conv1,
training = True,
epsilon = 1e-5,
name = 'batch_norm1')
self.conv1_out = tf.nn.elu(self.conv1_batchnorm, name="conv1_out")
## --> [8, 8, 64]
print('conv1_out',self.conv1_out)
"""
Second convnet:
ResNet block
BatchNormalization
ELU
"""
self.conv2_1 = tf.layers.conv2d(inputs = self.conv1_out,
filters = 64,
kernel_size = [3,3],
strides = [1,1],
padding = "SAME",
kernel_initializer=tf.contrib.layers.xavier_initializer_conv2d(),
name = "conv2_1")
self.conv2_batchnorm_1 = tf.layers.batch_normalization(self.conv2_1,
training = True,
epsilon = 1e-5,
name = 'batch_norm2_1')
self.conv2_out_1 = tf.nn.elu(self.conv2_batchnorm_1, name="conv2_out_1")
self.conv2_2 = tf.layers.conv2d(inputs = self.conv2_out_1,
filters = 64,
kernel_size = [1,1],
strides = [1,1],
padding = "SAME",
kernel_initializer=tf.contrib.layers.xavier_initializer_conv2d(),
name = "conv2_2")
self.conv2_batchnorm_2 = tf.layers.batch_normalization(self.conv2_2,
training = True,
epsilon = 1e-5,
name = 'batch_norm2_2')
self.conv2_out_2 = tf.nn.elu(self.conv2_batchnorm_2+self.conv1_out, name="conv2_out_2")
## --> [4, 4, 128]
print('conv2_out',self.conv2_out_2)
"""
Third convnet:
CNN
BatchNormalization
ELU
"""
# Input is 15*15*55
self.conv3 = tf.layers.conv2d(inputs = self.conv2_out_2,
filters = 128,
kernel_size = [3,3],
strides = [2,2],
padding = "SAME",
kernel_initializer=tf.contrib.layers.xavier_initializer_conv2d(),
name = "conv3")
self.conv3_batchnorm = tf.layers.batch_normalization(self.conv3,
training = True,
epsilon = 1e-5,
name = 'batch_norm3')
self.conv3_out = tf.nn.elu(self.conv3_batchnorm, name="conv3_out")
print('conv3_out',self.conv3_out)
"""
Forth convnet:
ResNet block
BatchNormalization
ELU
"""
self.conv4_1 = tf.layers.conv2d(inputs = self.conv3_out,
filters = 128,
kernel_size = [3,3],
strides = [1,1],
padding = "SAME",
kernel_initializer=tf.contrib.layers.xavier_initializer_conv2d(),
name = "conv4_1")
self.conv4_batchnorm_1 = tf.layers.batch_normalization(self.conv4_1,
training = True,
epsilon = 1e-5,
name = 'batch_norm4_1')
self.conv4_out_1 = tf.nn.elu(self.conv4_batchnorm_1, name="conv4_out_1")
self.conv4_2 = tf.layers.conv2d(inputs = self.conv4_out_1,
filters = 128,
kernel_size = [1,1],
strides = [1,1],
padding = "SAME",
kernel_initializer=tf.contrib.layers.xavier_initializer_conv2d(),
name = "conv4_2")
self.conv4_batchnorm_2 = tf.layers.batch_normalization(self.conv4_2,
training = True,
epsilon = 1e-5,
name = 'batch_norm4_2')
self.conv4_out_2 = tf.nn.elu(self.conv4_batchnorm_2+self.conv3_out, name="conv4_out_2")
print('conv4_out',self.conv4_out_2)
## --> [4, 4, 128]
"""
Fifth convnet:
CNN
BatchNormalization
ELU
"""
# Input is 15*15*55
self.conv5 = tf.layers.conv2d(inputs = self.conv4_out_2,
filters = 256,
kernel_size = [3,3],
strides = [2,2],
padding = "SAME",
kernel_initializer=tf.contrib.layers.xavier_initializer_conv2d(),
name = "conv5")
self.conv5_batchnorm = tf.layers.batch_normalization(self.conv5,
training = True,
epsilon = 1e-5,
name = 'batch_norm5')
self.conv5_out = tf.nn.elu(self.conv5_batchnorm, name="conv5_out")
print('conv5_out',self.conv5_out)
"""
Sixth convnet:
ResNet block
BatchNormalization
ELU
"""
self.conv6_1 = tf.layers.conv2d(inputs = self.conv5_out,
filters = 256,
kernel_size = [3,3],
strides = [1,1],
padding = "SAME",
kernel_initializer=tf.contrib.layers.xavier_initializer_conv2d(),
name = "conv6_1")
self.conv6_batchnorm_1 = tf.layers.batch_normalization(self.conv6_1,
training = True,
epsilon = 1e-5,
name = 'batch_norm6_1')
self.conv6_out_1 = tf.nn.elu(self.conv6_batchnorm_1, name="conv6_out_1")
self.conv6_2 = tf.layers.conv2d(inputs = self.conv6_out_1,
filters = 256,
kernel_size = [1,1],
strides = [1,1],
padding = "SAME",
kernel_initializer=tf.contrib.layers.xavier_initializer_conv2d(),
name = "conv6_2")
self.conv6_batchnorm_2 = tf.layers.batch_normalization(self.conv6_2,
training = True,
epsilon = 1e-5,
name = 'batch_norm6_2')
self.conv6_out_2 = tf.nn.elu(self.conv6_batchnorm_2+self.conv5_out, name="conv6_out_2")
print('conv6_out',self.conv6_out_2)
# """
# Third convnet:
# CNN
# BatchNormalization
# ELU
# """
# self.conv3 = tf.layers.conv2d(inputs = self.conv2_out,
# filters = 128,
# kernel_size = [4,4],
# strides = [2,2],
# padding = "VALID",
# kernel_initializer=tf.contrib.layers.xavier_initializer_conv2d(),
# name = "conv3")
# self.conv3_batchnorm = tf.layers.batch_normalization(self.conv3,
# training = True,
# epsilon = 1e-5,
# name = 'batch_norm3')
# self.conv3_out = tf.nn.elu(self.conv3_batchnorm, name="conv3_out")
# ## --> [3, 3, 128]
# self.flatten = tf.layers.flatten(self.inputs_)
# self.flatten = tf.concat([tf.contrib.layers.flatten(self.conv6_out_2),tf.contrib.layers.flatten(self.conflict_matrix), tf.contrib.layers.flatten(conflict_matrix_and), self.finish_tag], -1)
self.finish_tag_ = tf.reshape(self.finish_tag, [-1, num_objects])
self.flatten = tf.concat([tf.contrib.layers.flatten(self.conv6_out_2), self.finish_tag_], -1)
self.flatten = tf.reshape(self.flatten, [-1, self.seq_len, int(self.flatten.shape[-1])])
## --> [1152]
def lstm_layer(lstm_size, number_of_layers, batch_size):
'''
This method is used to create LSTM layer/s for PixelRNN
Input(s): lstm_cell_unitis - used to define the number of units in a LSTM layer
number_of_layers - used to define how many of LSTM layers do we want in the network
batch_size - in this method this information is used to build starting state for the network
Output(s): cell - lstm layer
init_state - zero vectors used as a starting state for the network
'''
def cell_f(size):
return tf.nn.rnn_cell.LSTMCell(size, name='basic_lstm_cell')
# cell = tf.contrib.rnn.MultiRNNCell([cell(lstm_size) for _ in range(number_of_layers)])
cell = cell_f(lstm_size)
init_state = cell.zero_state(batch_size, tf.float32)
return cell, init_state
cell, init_state = lstm_layer(256, 1, batch_size)
outputs, states = tf.nn.dynamic_rnn(cell, self.flatten, initial_state=init_state)
print(outputs)
self.rnn = tf.reshape(outputs, [-1, 256])
self.output_ = tf.layers.dense(inputs = self.rnn,
kernel_initializer=tf.contrib.layers.xavier_initializer(),
units = self.action_size,
activation=None,
name = "output_internal")
self.output = tf.reshape(self.output_, [-1, self.seq_len, self.action_size], name = "output_external")
print(self.output_)
print(self.output)
# Q is our predicted Q value.
self.Q = tf.reduce_sum(tf.multiply(self.output, self.actions_), axis=2) # bs x seq_len
# The loss is the difference between our predicted Q_values and the Q_target
# Sum(Qtarget - Q)^2
self.target_Q = self.target_Q_
temp = tf.square(self.target_Q - self.Q) # bs x seq_len
temp = tf.multiply(temp, self.mask)
# loss_details = tf.reduce_mean(tf.reshape(temp,[-1, num_objects, action_space]),axis=[0,1], name = "loss_details")
# print(loss_details)
# self.loss_details = [loss_details[i] for i in range(action_space)]
# temp = tf.reshape(tf.reduce_mean(temp, axis = 1), [-1, seq_len])
# self.loss = tf.reduce_mean(tf.multiply(temp, self.mask))
self.loss = tf.reduce_mean(temp)
self.optimizer = tf.train.RMSPropOptimizer(self.lr).minimize(self.loss)
# self.optimizer2 = tf.train.RMSPropOptimizer(0.00005).minimize(self.loss)
|
11502940
|
from django.http import HttpResponse
from django.shortcuts import render_to_response
from django.template import Context, loader
from django.http import HttpResponseRedirect
from django.core.urlresolvers import reverse
from siptracklib.utils import object_by_attribute
import siptracklib.errors
from siptrackweb.views import helpers
from siptrackweb.forms import *
@helpers.authcheck
def index(request, parent_oid):
pm = helpers.PageManager(request, 'stweb/views/networktrees/index.html')
parent = pm.setVar('parent', pm.object_store.getOID(parent_oid))
pm.path(parent)
pm.section('network')
pm.render_var['network_tree_list'] = list(parent.listChildren(include = ['network tree']))
return pm.render()
@helpers.authcheck
def add(request, parent_oid):
pm = helpers.PageManager(request, 'stweb/generic_form.html')
parent = pm.setVar('parent', pm.object_store.getOID(parent_oid))
pm.path(parent)
pm.section('network')
pm.render_var['network_tree_list'] = parent.listChildren(include = ['network tree'])
pm.addForm(NetworkTreeAddForm(), '/networktree/add/post/%s/' % (parent_oid))
return pm.render()
@helpers.authcheck
def add_post(request, parent_oid):
pm = helpers.PageManager(request, 'stweb/generic_form.html')
parent = pm.setVar('parent', pm.object_store.getOID(parent_oid))
pm.path(parent)
pm.section('network')
pm.render_var['network_tree_list'] = parent.listChildren(include = ['network tree'])
pm.addForm(NetworkTreeAddForm(request.POST), '/networktree/add/post/%s/' % (parent_oid))
if not pm.form.is_valid():
return pm.error()
nt = parent.add('network tree', pm.form.cleaned_data['protocol'])
nt.attributes['name'] = pm.form.cleaned_data['name']
return pm.redirect('network.display', (nt.oid,))
@helpers.authcheck
def delete(request, oid):
pm = helpers.PageManager(request, 'stweb/generic_form.html')
pm.addForm(DeleteForm(), '/networktree/delete/post/%s/' % (oid), message='Removing network tree.')
pm.section('network')
nt = pm.setVar('network_tree', pm.object_store.getOID(oid))
pm.path(nt)
pm.render_var['parent'] = nt.parent
pm.render_var['network_tree_list'] = nt.parent.listChildren(include = ['network tree'])
return pm.render()
@helpers.authcheck
def delete_post(request, oid):
pm = helpers.PageManager(request, 'stweb/generic_form.html')
pm.addForm(DeleteForm(request.POST), '/networktree/delete/post/%s/' % (oid), message='Removing network tree.')
pm.section('network')
nt = pm.object_store.getOID(oid)
parent_oid = nt.parent.oid
nt.delete()
return pm.redirect('network.tree.index', (parent_oid,))
|
11502987
|
import itertools
from collections import OrderedDict
import torch.nn as nn
class EmbeddingMixin:
def build_embeddings(
self, default_embedding_size, fixed_embedding_size=False):
embeddings = OrderedDict()
embedding_sizes = OrderedDict()
for feature in itertools.chain(
self.features.category_features,
self.features.sequence_features):
if feature.embedding_name not in embeddings:
embedding_size = default_embedding_size
if not fixed_embedding_size:
embedding_size = (feature.embedding_size
if feature.embedding_size
else default_embedding_size)
embeddings[feature.embedding_name] = nn.Embedding(
feature.dimension(), embedding_size, padding_idx=0)
embedding_sizes[feature.embedding_name] = embedding_size
self.add_module(
f"embedding:{feature.embedding_name}",
embeddings[feature.embedding_name])
if feature.name != feature.embedding_name:
embeddings[feature.name] = embeddings[feature.embedding_name]
embedding_sizes[feature.name] = (
embedding_sizes[feature.embedding_name])
if feature.embedding_size and (
feature.embedding_size !=
embedding_sizes[feature.name]):
raise RuntimeWarning(
f"embedding_size of {feature.name} should be "
f"the same with {feature.embedding_name}")
return (embeddings, embedding_sizes)
|
11502989
|
import torch
import torch.nn as nn
from torch.autograd import Variable
from torch.nn.utils.rnn import pack_padded_sequence, pad_packed_sequence
import torch.nn.functional as F
import numpy as np
import itertools
def flatten(l):
return list(itertools.chain.from_iterable(l))
seqs = ['ghatmasala','nicela','chutpakodas']
# make <pad> idx 0
vocab = ['<pad>'] + sorted(list(set(flatten(seqs))))
# make model
embed = nn.Embedding(len(vocab), 10).cuda()
lstm = nn.LSTM(10, 5).cuda()
vectorized_seqs = [[vocab.index(tok) for tok in seq]for seq in seqs]
# get the length of each seq in your batch
seq_lengths = torch.cuda.LongTensor(map(len, vectorized_seqs))
# dump padding everywhere, and place seqs on the left.
# NOTE: you only need a tensor as big as your longest sequence
seq_tensor = Variable(torch.zeros((len(vectorized_seqs), seq_lengths.max()))).long().cuda()
for idx, (seq, seqlen) in enumerate(zip(vectorized_seqs, seq_lengths)):
seq_tensor[idx, :seqlen] = torch.LongTensor(seq)
# SORT YOUR TENSORS BY LENGTH!
seq_lengths, perm_idx = seq_lengths.sort(0, descending=True)
seq_tensor = seq_tensor[perm_idx]
# utils.rnn lets you give (B,L,D) tensors where B is the batch size, L is the maxlength, if you use batch_first=True
# Otherwise, give (L,B,D) tensors
seq_tensor = seq_tensor.transpose(0,1) # (B,L,D) -> (L,B,D)
# embed your sequences
seq_tensor = embed(seq_tensor)
# pack them up nicely
packed_input = pack_padded_sequence(seq_tensor, seq_lengths.cpu().numpy())
# throw them through your LSTM (remember to give batch_first=True here if you packed with it)
packed_output, (ht, ct) = lstm(packed_input)
# unpack your output if required
output, _ = pad_packed_sequence(packed_output)
print output
# Or if you just want the final hidden state?
print ht[-1]
|
11502992
|
from cvxpy.atoms.affine.sum import sum
from cvxpy.reductions.dgp2dcp.atom_canonicalizers.sum_canon import sum_canon
def norm1_canon(expr, args):
assert len(args) == 1
tmp = sum(args[0], expr.axis, expr.keepdims)
return sum_canon(tmp, tmp.args)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.