mosquito-visualizer / server.jl
cfei1994's picture
Upload 5 files
b33ca75 verified
using Oxygen, HTTP, JSON3, CSV, DataFrames, Dates, Random, Statistics, Interpolations
parse_date(d::AbstractString) = DateTime(d, dateformat"yyyy-mm-dd HH:MM:SS.ssss")
function export_clean_traces(file;
min_track_length = 1.0, min_start_time = 300.0, max_start_time=Inf,
wall_buffer_x = 0.0 , wall_buffer_y = 0.0, wall_buffer_z = 0.0
)
data = CSV.read(file, DataFrame)
df = convert(DataFrame,data)
groups = groupby(df, 1)
starting_times, clean_traces = save_data(groups, file;
min_track_length=min_track_length, min_start_time=min_start_time, max_start_time=max_start_time,
wall_buffer_x=wall_buffer_x, wall_buffer_y=wall_buffer_y, wall_buffer_z=wall_buffer_z
)
return starting_times, clean_traces
end
function save_data(groups, file;
min_track_length = 1.0, min_start_time = 300.0, max_start_time = Inf,
wall_buffer_x = 0.0 , wall_buffer_y = 0.0, wall_buffer_z = 0.0
)
# first, compute positions of wall
min_x = Vector{Float64}()
max_x = Vector{Float64}()
min_y = Vector{Float64}()
max_y = Vector{Float64}()
min_z = Vector{Float64}()
max_z = Vector{Float64}()
for g in groups
push!(min_x,minimum(g[:,3]))
push!(max_x,maximum(g[:,3]))
push!(min_y,minimum(g[:,4]))
push!(max_y,maximum(g[:,4]))
push!(min_z,minimum(g[:,5]))
push!(max_z,maximum(g[:,5]))
end
wall_robust = [(quantile(min_x,0.01),quantile(max_x,0.99)),(quantile(min_y,0.01),quantile(max_y,0.99)),(quantile(min_z,0.01),quantile(max_z,0.99))]
println("Wall x position: ", wall_robust[1])
println("Wall y position: ", wall_robust[2])
println("Wall z position: ", wall_robust[3])
# Filter tracks based on time requirements
experiment_start_time = parse_date(groups[1].datetime[1][1:end-3])
long_traces = []
for g in groups
track_end_time = parse_date(g.datetime[end][1:end-3])
track_start_time = parse_date(g.datetime[1][1:end-3])
if Dates.value(track_end_time - track_start_time)/1000 > min_track_length && Dates.value(track_start_time-experiment_start_time)/1000 > min_start_time && Dates.value(track_start_time-experiment_start_time)/1000 < max_start_time
push!(long_traces,g)
end
end
# Filter tracks based on position from wall
final_traces = []
times = []
starting_times = []
for g in long_traces
if all(g[:,3] .> wall_robust[1][1]+wall_buffer_x) && all(g[:,3] .< wall_robust[1][2]-wall_buffer_x) && all(g[:,4] .> wall_robust[2][1]+wall_buffer_y) && all(g[:,4] .< wall_robust[2][2]-wall_buffer_y) && all(g[:,5] .> wall_robust[3][1]+wall_buffer_z) && all(g[:,5] .< wall_robust[3][2]-wall_buffer_z)
push!(final_traces,g)
tmp = []
for i in 1:length(g.datetime)
push!(tmp,Dates.value(parse_date(g.datetime[i][1:end-3])-parse_date(g.datetime[1][1:end-3]))/1000)
end
push!(starting_times,Dates.value(parse_date(g.datetime[1][1:end-3])-experiment_start_time)/1000)
push!(times,tmp)
end
end
clean_traces = Vector{Matrix{Float64}}()
for g in 1:length(final_traces)
if sum(diff(times[g]) .<= 0.0) == 0
# ts = LinRange(0.0,times[g][end],Int(round(times[g][end]*100)+1))
ts = collect(0.0:0.01:times[g][end]) # fix dt to 0.01 seconds
interp_linear_xs = linear_interpolation(times[g], final_traces[g][:,3])
interp_linear_ys = linear_interpolation(times[g], final_traces[g][:,4])
interp_linear_zs = linear_interpolation(times[g], final_traces[g][:,5])
temp = zeros(length(ts),5)
temp[:,1] = interp_linear_xs.(ts) / 100 # convert cm to m
temp[:,2] = interp_linear_ys.(ts) / 100
temp[:,3] = interp_linear_zs.(ts) / 100
temp[:,4] = ts
for j in 1:length(ts)
if round(ts[j],digits=2) in times[g]
temp[j,5] = 1.0
else
temp[j,5] = 0.0
end
end
push!(clean_traces,temp)
else
println("Skipping track ",g)
deleteat!(starting_times, g)
end
end
return starting_times, clean_traces
end
function unzip(collection)
firsts = [x[1] for x in collection]
seconds = [x[2] for x in collection]
return firsts, seconds
end
# Re-use your existing processing logic here
function process_big_csv(filepath)
start_times, traces = export_clean_traces(filepath;
min_track_length = 1.0, min_start_time = 0.0, max_start_time=Inf,
)
combined = collect(zip(start_times, traces))
shuffle!(combined)
# Separate the shuffled arrays
start_times, traces = unzip(combined);
payload = Dict("trajectories"=> [Dict("start_time" => start_times[i],
"x" => Float32.(traces[i][:,1]),
"y" => Float32.(traces[i][:,3]),
"z" => Float32.(traces[i][:,2]),
) for i in 1:length(traces)]
)
return payload # A dictionary/struct
end
@get "/" function()
return file("index.html")
end
@post "/process-path" function(req)
# Julia receives the file path or raw bytes
# Processes it at 100% precision
data = JSON3.read(String(req.body))
filepath = data.path
data = process_big_csv(filepath)
return data # Automatically converted to JSON
end
@post "/process-file" function(req)
try
# req.body contains the raw bytes of the uploaded 600MB file
# We wrap it in an IOBuffer so CSV.read can process it in RAM
data = process_big_csv(IOBuffer(req.body))
# Return results (Oxygen handles JSON conversion)
return data
catch e
return HTTP.Response(500, JSON3.write(Dict("error" => string(e))))
end
end
staticfiles(".")
serve(host="0.0.0.0", port=7860)