repo_name
stringlengths 6
97
| path
stringlengths 3
341
| text
stringlengths 8
1.02M
|
|---|---|---|
malloc3/YG_Harmonization
|
libraries/BiotekPlateReaderCalibration.rb
|
<gh_stars>0
needs 'Standard Libs/MatrixTools'
needs 'YG_Harmonization/PlateReaderMethods'
# This module is used for doing the extraction and calculations required to
# successfully calibrate the Biotek Plate reader
#
module BiotekPlateReaderCalibration
include PlateReaderMethods
include MatrixTools
require 'csv'
require 'open-uri'
CAL_TEMPLATE_FILENAME = "calibration_template_v1"
CAL_MEASUREMENTS = ['cal_od', 'cal_gfp']
# Takes in a csv upload file, extracts the information on it
# into a datamatrix object which is returned.
# Specificly tuned to the output file of the biotek plate reader.
#
# @param upload [Upload] the object which can be resolved to calibration csv
# @return [WellMatrix] a WellMatrix holding the measurement for each well
def extract_measurement_matrix_from_csv(upload)
url = upload.url
table = []
CSV.new(open(url)).each { |line| table.push(line) }
dm = WellMatrix.create_empty(96, 'NA') if table.size > 25
dm = WellMatrix.create_empty(24, 'NA') if table.size <= 25
table.each_with_index do |row, idx|
next if idx.zero?
well_coord = row[2]
next if well_coord.nil?
measurement = row[3].to_f
next if measurement.nil?
dm.set(well_coord, measurement)
end
dm
end
# Returns the average OD measurement for different dilutions and well volumes.
# The plotted result of this method can be fit to a curve
# to be used for calibrating the plate reader. This is very specific to the
# Eriberto's calibration of the biotek plate reader.
#
# @param upload [Upload] the object whihc can be resolved to calibration csv
# @return [Hash] a hash containing averaged measurements for every concentration and volume tested
def get_calibration_data_hash(upload)
method = upload.name
dm = extract_measurement_matrix_from_csv(upload)
result = {}
data_by_conc = Hash.new { |h, key| h[key] = [0, 0] }
if method.include? 'gfp'
show {note "#{method}"}
starting_concentration = 50.0#uM
# first 4 rows are serial dilutions
for i in 0...4
12.times do |j|
# each column is a 2x dilution of the previous, starting at 50uM
this_conc = starting_concentration / (2**j)
data = data_by_conc[this_conc]
data[0] += dm[i, j].to_f
data[1] += 1
data_by_conc[this_conc] = data
end
end
# add serial dilution averages to result hash
data_by_conc.each_key do |k,|
data = data_by_conc[k]
result[k] = data[0] / data[1]
end
return result
elsif method.include? 'od'
# row 5, 6 are lud dilutions and pure solution respectively
for i in 4...6
for j in 0...4
data_by_conc["100_#{i}"][0] += dm[i, j].to_f
data_by_conc["100_#{i}"][1] += 1
end
for j in 4...8
data_by_conc["200_#{i}"][0] += dm[i, j].to_f
data_by_conc["200_#{i}"][1] += 1
end
for j in 8...12
data_by_conc["300_#{i}"][0] += dm[i, j].to_f
data_by_conc["300_#{i}"][1] += 1
end
end
# add lud averages to result hash
for i in 1..3
lud_avg = data_by_conc["#{i}00_4"][0] / data_by_conc["#{i}00_4"][1]
sol_avg = data_by_conc["#{i}00_5"][0] / data_by_conc["#{i}00_5"][1]
result["lud#{i}00"] = (lud_avg - sol_avg).round(5) # Returns blanked averages
end
end
result
end
#-------------Plate ReaderCalibration------------------------------------------------------------#
# Finds the first
def check_cal_plate_date()
create_a_new_cal_plt = false
calibration_plate = nil # if the plate is less than a month old use the cal plate
# Look through flat bottom plates and see which one has flourescein inside
flour_samp = Sample.find_by_name("Fluorescein Sodium Salt" )
test_plts = find(:item,{object_type: { name: "96 Well Flat Bottom (black)" }} ).select {|i| i.location != 'deleted'}
# Check to see if there is a calibration plate that is less than a month old
test_plts.each do |item_id|
mat = Collection.find(item_id).matrix.flatten.uniq
if mat.include? flour_samp.id
date_created = Item.find(item_id).get('date_created')
present = todays_date()
plus_month = [date_created[0..1], date_created[2..3], date_created[4..7]].map {|i| i.to_i}
plus_month[0] = plus_month[0] + 1
date_created = [date_created[0..1], date_created[2..3], date_created[4..7]].map {|i| i.to_i}
today = [present[0..1], present[2..3], present[4..7]].map {|i| i.to_i}
# log_info 'CALIBRATION PLATE AGE','date_created', date_created, 'plus_month', plus_month, 'todays_date', today
if date_created[0] == plus_month[0] # Checking month
if plus_month[1] >= date_created[1] # Checking day
create_a_new_cal_plt = true
Item.find(item_id).mark_as_deleted
else
calibration_plate = item_id
end
else
calibration_plate = item_id
end
end
end
return create_a_new_cal_plt, calibration_plate
end
# Creates a calibration plate for the plate reader with a Fluorescein dye and an optical density reagent
#
# @params flour [string]
# @params ludox [string]
# @params collection [collection obj] container of plate reader cal solutions
def create_cal_plate(cal_coll)
flour_samp = Sample.find_by_name("Fluorescein Sodium Salt" )
ludox_samp = Sample.find_by_name("LUDOX Stock")
# Items and materials required for calibration plate
flour_item = find(:item, { sample: { name: flour_samp.name }, object_type: { name: "1mM Fluorescein Stock" } } ).first
ludox_item = find(:item, { sample: { name: ludox_samp.name }, object_type: { name: "1X LUDOX Aliquot" } } ).first
cal_items = [flour_item, ludox_item]
take cal_items, interactive: true
h2o_type = "Nuclease-free water" # Change in Production Aq to Mol grade H2O
h2o_samp = Sample.find_by_name(h2o_type)
cal_plt_mats = {'1X PBS'=>'Bench', 'Mol. Grade H2O'=>'Media Bay', '96 Well Flat Bottom (black)'=>'Bench'}
show do
title "Creating a New Calibration Plate"
separator
note "<b>Gather the following:</b>"
cal_plt_mats.each {|mat, loc| check "#{mat} at #{loc}"}
end
show do
title "Creating a New Calibration Plate"
separator
note "Vortex 1mM Fluorescein Stock and make sure there are no precipitates."
check "In a fresh 1.5mL Eppendorf tube, dilute 50µl of 1mM Fluorescein Stock into 950µl of 1X PBS - Final Concentration [50µM]"
note "Make sure to vortex."
end
dims = cal_coll.dimensions
rows = dims[0]
cols = dims[1]
new_coll_mat = Array.new(rows) { Array.new(cols) { -1 } }
rows.times do |r|
cols.times do |c|
if r < 4
new_coll_mat[r][c] = flour_samp.id
elsif r == 4
new_coll_mat[r][c] = ludox_samp.id
elsif r == 5
new_coll_mat[r][c] = h2o_samp.id
end
end
end
cal_plate = cal_coll
cal_plate.matrix = new_coll_mat
cal_plate.save
# selects wells that have flourescin sample id, then selects for the one's that are not in the first column of the collection is an array of [r,c]
pbs_wells = cal_plate.select {|well| well == flour_samp.id}.select {|r, c| c != 0}
# direct tech to fill new calibration plate
show do
title "Creating a New Calibration Plate"
separator
note "You will need <b>#{(pbs_wells.length * 0.1) + 0.1}mL</b> of 1X PBS for the next step."
note "Follow the table below to dispense 1X PBS in the appropriate wells:"
table highlight_rc(cal_plate, pbs_wells) {|r,c| "100µl"}
end
flour_serial_image = "Actions/Yeast_Gates/plateReaderImages/flour_serial_dilution.png"
show do
title "Serial Dilution of Flourescein"
separator
note "From the 50µM Fluorescein solution, dispense <b>200µl</b> in wells <b>A1, B1, C1, D1</b>"
note "Following the image below, transfer <b>100µl</b> of 50µM Fluorescein solution in Column 1 to Column 2"
note "Resuspend by pipetting up and down 3X"
note "Repeat until column 11 and discard the remaining <b>100µl</b>."
image flour_serial_image
end
# selects wells of a collection that have the ludox sample id, collects them as an array of [r, c]
ludox_wells = cal_plate.select {|well| well == ludox_samp.id}
show do
title "Creating a New Calibration Plate"
separator
note "Follow the table below to dispense #{ludox_samp.name} into the appropriate wells."
table highlight_rc(cal_plate, ludox_wells) {|r,c| ludox_vol(r, c)}
end
# selects wells of a collection that have the MG H2O sample id, collects them as an array of [r, c]
h2o_wells = cal_plate.select {|well| well == h2o_samp.id}
show do
title "Creating a New Calibration Plate"
separator
note "Follow the table below to dispense #{h2o_type} into the appropriate wells."
table highlight_rc(cal_plate, h2o_wells) {|r,c| ludox_vol(r, c)}
end
# Assocaite todays_date with item
Item.find(cal_plate.id).associate('date_created', todays_date)
release cal_items, interactive: true
return cal_plate
end
def ludox_vol(row, col)
if col < 4
return "100µl"
elsif col.between?(4, 7)
return "200µl"
else col.between?(7, 11)
return "300µl"
end
end
# This function directs tech to measure calibration plate on plate reader and export data; it also associates data from plate reader
#
# @params cal_plates [Array] an array of item objects
#
def measure_cal_plate(cal_plate)
# measure on plate reader
set_up_plate_reader(cal_plate, CAL_TEMPLATE_FILENAME)
# Export a file for each measurement
CAL_MEASUREMENTS.each do |method|
timepoint = nil # Is nil since it is not being measured along with this experiment
filename = export_data(cal_plate, timepoint, method=method)
# Show block upload button and retrieval of file uploaded
up_show, up_sym = upload_show(filename)
if (up_show[up_sym].nil?)
show {warning "No upload found for calibration measurement. Try again!!!"}
up_show, up_sym = upload_show(filename)
else
upload = find_upload_from_show(up_show, up_sym)
key = "#{todays_date}_#{method}"
# Associates upload to calibration plate and plan
associate_to_plans(key, upload)
associate_to_item(cal_plate, key, upload)
# Associates data hash of measurements to item/collection - extract info from plate reader upload and associate with item
associate_PlateReader_Data(upload, cal_plate, method, timepoint)
end
end
cal_plate.location = '4°C Fridge'
# cal_plate.mark_as_deleted
end
#-------------------------------------------------PlateReaderControl-----------------------------------------------------#
end #module
|
dgf1979/epicodus-tic_tac_toe
|
lib/board.rb
|
require('pry')
class Board
attr_reader(:spaces)
define_method(:initialize) do
@spaces = []
x = 0
y = 0
3.times() do
3.times() do
@spaces.push(Space.new({:x => x, :y => y}))
x += 1
end
x = 0
y += 1
end
end
define_method(:find) do |x, y|
return @spaces[x + y*3]
end
define_method(:find_mark) do |x, y|
if(@spaces[x + y*3].marked_by == nil)
return ""
else
return @spaces[x + y*3].marked_by.mark
end
end
define_method(:check_winner) do
#check for horizontal victory
xy = 0
3.times() do
if self.find_mark(0, xy) != "" && self.find_mark(0, xy) == self.find_mark(1, xy) && self.find_mark(1, xy) == self.find_mark(2, xy)
return self.find_mark(0, xy)
end
#check for vertical victory
if self.find_mark(xy, 0) != "" && self.find_mark(xy, 0) == self.find_mark(xy, 1) && self.find_mark(xy, 1) == self.find_mark(xy, 2)
return self.find_mark(xy, 0)
end
xy += 1
end
#check for diagonal vitories
if self.find_mark(1,1) != ""
if self.find_mark(0, 0) == self.find_mark(1, 1) && self.find_mark(1, 1) == self.find_mark(2, 2)
return self.find_mark(1, 1)
elsif self.find_mark(2, 0) == self.find_mark(1, 1) && self.find_mark(1, 1) == self.find_mark(0, 2)
return self.find_mark(1, 1)
end
end
return ""
end
end
|
dgf1979/epicodus-tic_tac_toe
|
lib/game.rb
|
require('pry')
class Game
@@game = nil
attr_reader(:player1, :player2, :board, :player1_turn, :message)
define_method(:initialize) do
@player1 = Player.new({:mark => "X"})
@player2 = Player.new({:mark => "O"})
@board = Board.new()
@message = "Welcome to Tic Tac Toe!"
@player1_turn = true
end
define_method(:save) do
@@game = self
end
define_singleton_method(:load) do
@@game
end
define_method(:ai_turn) do
insertIndex = -1
compareX = ["", "X", "X"]
compareO = ["", "O", "O"]
possibles = []
#horizontal
possibles.push([0, 1, 2])
possibles.push([3, 4, 5])
possibles.push([6, 7, 8])
#vertical
possibles.push([0, 3, 6])
possibles.push([1, 4, 7])
possibles.push([2, 5, 8])
#diagonal
possibles.push([0, 4, 8])
possibles.push([2, 4, 6])
# rule 1 - if you can complete 3 in a row, do it
possibles.each() do |possible|
line = []
possible.each() do |board_index|
player = @board.spaces[board_index].marked_by
if player == nil
line.push("")
else
line.push(player.mark)
end
end
if(line.sort() == compareO)
x = possible[line.index("")]%3
y = (possible[line.index("")]/3).floor()
return take_turn(x, y)
end
end
# rule 2 - if the opponent had 2 in a row, block it
possibles.each() do |possible|
line = []
possible.each() do |board_index|
player = @board.spaces[board_index].marked_by
if player == nil
line.push("")
else
line.push(player.mark)
end
end
if(line.sort() == compareX)
x = possible[line.index("")]%3
y = (possible[line.index("")]/3).floor()
return take_turn(x, y)
end
end
# rule 3 - add a mark where you are not blocked from making 3
@board.spaces.each_index() do |index|
if(@board.spaces[index].marked_by == nil)
x = index%3
y = (index/3).floor()
return take_turn(x, y)
end
end
#take_turn(,)
end
define_method(:take_turn) do |x, y|
if @board.find(x, y).marked_by == nil && @board.check_winner == ""
if(@player1_turn)
@message = "O's Turn"
@board.find(x, y).mark_by(@player1)
else
@message = "X's Turn"
@board.find(x, y).mark_by(@player2)
end
@player1_turn = !@player1_turn
if @board.check_winner != ""
@message = "The winner is: #{@board.check_winner()}!"
end
end
end
end
|
dgf1979/epicodus-tic_tac_toe
|
spec/tic_tac_toe_integration_spec.rb
|
require('capybara/rspec')
require('./app')
Capybara.app = Sinatra::Application
set(:show_exceptions, false)
describe("Sinatra test", {:type => :feature}) do
it('checks a test page to verify basic Sinatra functionality') do
visit('/test')
expect(page).to have_content('Sinatra OK')
end
end
describe("tic tac toe game", {:type => :feature}) do
it('message text on new game') do
visit('/')
expect(page).to have_content("Welcome")
end
it('can restart with a fresh game', { :type => :feature}) do
visit('/')
click_button("new_game")
expect(page).to have_content("Welcome")
click_link('00')
expect(page).to have_content("O's Turn")
end
it('plays a turn', { :type => :feature}) do
visit('/')
click_link('00')
expect(page).to have_content("O's Turn")
end
it('plays a whole game', { :type => :feature}) do
visit('/')
click_link('00')
click_link('10')
click_link('01')
click_link('20')
click_link('02')
expect(page).to have_content("The winner is: X!")
end
end
|
dgf1979/epicodus-tic_tac_toe
|
spec/space_spec.rb
|
require('rspec')
require('space')
describe(Space) do
describe("#x") do
it("returns the space's x coordinate") do
test_space = Space.new({ :x => 1, :y => 2 })
expect(test_space.x()).to(eq(1))
end
end
describe("#y") do
it("returns the space's y coordinate") do
test_space = Space.new({ :x => 1, :y => 2 })
expect(test_space.y()).to(eq(2))
end
end
describe("#mark_by") do
it("lets a player mark the space") do
test_player = Player.new({ :mark => "X" })
test_space = Space.new({ :x => 1, :y => 2 })
test_space.mark_by(test_player)
expect(test_space.marked_by()).to(eq(test_player))
end
end
end
|
dgf1979/epicodus-tic_tac_toe
|
app.rb
|
<filename>app.rb
require('sinatra')
require('sinatra/reloader')
also_reload('lib/**/*.rb')
require('./lib/game')
require('./lib/board')
require('./lib/space')
require('./lib/player')
require('pry')
get('/test') do
erb(:test)
end
get('/') do
if Game.load() == nil #start a new game if one is not in progress
this_game = Game.new()
this_game.save()
else #load game if one is in progress
this_game = Game.load()
end
@board = this_game.board
@message = this_game.message
erb(:index)
end
get('/move/:x/:y') do
x = params.fetch('x').to_i()
y = params.fetch('y').to_i()
this_game = Game.load()
this_game.take_turn(x, y)
@board = this_game.board
@message = this_game.message
this_game.save()
erb(:index)
end
post("/new_game") do
this_game = Game.new()
this_game.save()
@board = this_game.board
@message = this_game.message
erb(:index)
end
post("/ai_turn") do
this_game = Game.load()
this_game.ai_turn()
this_game.save()
@board = this_game.board
@message = this_game.message
erb(:index)
end
|
dgf1979/epicodus-tic_tac_toe
|
spec/player_spec.rb
|
require('rspec')
require('player')
describe(Player) do
describe("#mark") do
it("returns the player's mark") do
test_player = Player.new({ :mark => "X" })
expect(test_player.mark()).to(eq("X"))
end
end
end
|
dgf1979/epicodus-tic_tac_toe
|
spec/game_spec.rb
|
require('rspec')
require('game')
describe(Game) do
describe("#initialize") do
it('sets up a new game state') do
new_game = Game.new()
expect(new_game.player1.mark).to(eq("X"))
expect(new_game.player2.mark).to(eq("O"))
expect(new_game.board.class).to(eq(Board))
end
end
describe("#take_turn") do
it('takes a turn') do
new_game = Game.new()
new_game.take_turn(0, 2)
new_game.take_turn(1, 0)
new_game.take_turn(1, 1)
new_game.take_turn(2, 2)
new_game.take_turn(2, 0)
expect(new_game.board.check_winner()).to(eq("X"))
end
end
describe('#message') do
it('returns a message about the game state') do
new_game = Game.new()
expect(new_game.message()).to(eq("Welcome to Tic Tac Toe!"))
new_game.take_turn(0, 2)
expect(new_game.message()).to(eq("O's Turn"))
new_game.take_turn(1, 0)
expect(new_game.message()).to(eq("X's Turn"))
new_game.take_turn(1, 1)
new_game.take_turn(2, 2)
new_game.take_turn(2, 0)
expect(new_game.message()).to(eq("The winner is: X!"))
end
end
describe('#ai_turn') do
it('makes a winning move given the opportunity (X wins)') do
new_game = Game.new()
new_game.take_turn(1, 1)
new_game.take_turn(1, 0)
new_game.take_turn(0, 1)
new_game.take_turn(0, 0)
new_game.take_turn(1, 2)
new_game.ai_turn()
expect(new_game.message()).to(eq("The winner is: O!"))
end
it('blocks a potential win by the other player') do
new_game = Game.new()
new_game.take_turn(1, 1)
new_game.take_turn(2, 0)
new_game.take_turn(2, 1)
new_game.ai_turn()
expect(new_game.board.find_mark(0, 1)).to(eq("O"))
end
end
end
|
dgf1979/epicodus-tic_tac_toe
|
lib/space.rb
|
class Space
attr_reader(:x, :y, :marked_by)
define_method(:initialize) do |attributes|
@x = attributes.fetch(:x)
@y = attributes.fetch(:y)
@marked_by = nil
end
define_method(:mark_by) do |player|
@marked_by = player
end
end
|
dgf1979/epicodus-tic_tac_toe
|
lib/player.rb
|
<reponame>dgf1979/epicodus-tic_tac_toe
class Player
attr_reader(:mark)
define_method(:initialize) do |attributes|
@mark = attributes.fetch(:mark)
end
end
|
dgf1979/epicodus-tic_tac_toe
|
spec/board_spec.rb
|
require("rspec")
require('board')
describe(Board) do
describe('#initialize') do
it("creates 9 spaces when it is initialized") do
board = Board.new()
expect(board.spaces.length()).to(eq(9))
end
end
describe('#find') do
it('returns the space at the given coordinates') do
board = Board.new()
expect(board.find(1, 2).x).to(eq(1))
expect(board.find(1, 2).y).to(eq(2))
end
end
describe('#check_winner') do
it('returns winning mark for horizontal victory') do
board = Board.new()
player1 = Player.new({:mark => "X"})
board.find(0, 2).mark_by(player1)
board.find(1, 2).mark_by(player1)
board.find(2, 2).mark_by(player1)
expect(board.check_winner()).to(eq("X"))
end
it('returns winning mark for vertical victory') do
board = Board.new()
player1 = Player.new({:mark => "O"})
board.find(0, 0).mark_by(player1)
board.find(0, 1).mark_by(player1)
board.find(0, 2).mark_by(player1)
expect(board.check_winner()).to(eq("O"))
end
it('returns winning mark for diagonal victory') do
board = Board.new()
player1 = Player.new({:mark => "O"})
board.find(0, 0).mark_by(player1)
board.find(1, 1).mark_by(player1)
board.find(2, 2).mark_by(player1)
expect(board.check_winner()).to(eq("O"))
end
it('returns empty string for NO victory') do
board = Board.new()
player1 = Player.new({:mark => "O"})
player2 = Player.new({:mark => "X"})
board.find(0, 0).mark_by(player1)
board.find(1, 1).mark_by(player2)
board.find(2, 2).mark_by(player1)
expect(board.check_winner()).to(eq(""))
end
end
end
|
csfrancis/lmdb_store
|
lib/lmdb_store.rb
|
# frozen_string_literal: true
require 'lmdb'
require 'active_support/cache'
class LmdbStore < ActiveSupport::Cache::Store
def initialize(path, options = nil)
options ||= {}
super(options)
@path = path
@max_size = options[:size] || 32.megabytes
@name = options[:name] || 'cache'
@env = LMDB.new(path, mapsize: @max_size, nosync: true, writemap: true)
@db = @env.database(@name, create: true)
end
private
def read_entry(key, **options)
if data = @db.get(key)
Marshal.load(data)
else
nil
end
end
def write_entry(key, entry, **options)
@db.put(key, Marshal.dump(entry).to_s)
end
def delete_entry(key, **options)
@db.delete(key)
true
rescue LMDB::Error
false
end
def clear
@db.clear
end
end
|
csfrancis/lmdb_store
|
lmdb_store.gemspec
|
Gem::Specification.new do |s|
s.name = 'lmdb_store'
s.version = '0.0.1'
s.summary = 'LMDB implementation of ActiveSupport::Store'
s.description = <<-DOC
This gem provides an ActiveSupport::Store implementation that is backed by an LMDB database.
DOC
s.homepage = 'https://github.com/csfrancis/lmdb_store'
s.authors = '<NAME>'
s.email = '<EMAIL>'
s.license = 'MIT'
s.files = `git ls-files`.split("\n")
s.add_runtime_dependency 'activesupport', ' >= 5'
s.add_runtime_dependency 'lmdb', ' >= 0.5'
s.add_development_dependency 'rake', '>= 12.3.3'
s.add_development_dependency 'minitest', '~> 5.0'
s.add_development_dependency 'pry', '~> 0.12.2'
end
|
csfrancis/lmdb_store
|
lmdb-profile.rb
|
<reponame>csfrancis/lmdb_store
#!/usr/bin/env ruby
# frozen_string_literal:true
require 'lmdb'
require 'pry'
# Customize these things
DURATION = 5
NUM_WORKERS = 16
UPDATE_SLEEP = 0.001
class Worker
class Result
attr_accessor :count, :duration
attr_reader :worker_num
def initialize(worker_num)
@count = 0
@worker_num = worker_num
end
def increment_count
@count = @count + 1
end
def to_s
"worker: #{worker_num}\tduration: #{format("%.4f", duration)}\tread_count: #{count}"
end
end
@worker_count = 0
class <<self
attr_accessor :worker_count
end
def initialize(db_name, duration)
@db_name = db_name
@duration = duration
@rd, @wr = IO.pipe
@worker_num = self.class.worker_count
self.class.worker_count += 1
end
def start
fork do
@rd.close
env = LMDB.new(@db_name)
@db = env.database
@wr.write Marshal.dump(work)
@wr.close
end
@wr.close
end
def result
result = Marshal.load(@rd.read)
@rd.close
result
end
private
def work
result = Result.new(@worker_num)
start = Time.now.to_f
while Time.now.to_f < (start + @duration) do
@db["working_bin"]
result.increment_count
end
result.duration = Time.now.to_f - start
result
end
end
env = LMDB.new('db')
m = env.database
workers = []
NUM_WORKERS.times { workers << Worker.new('db', DURATION) }
workers.each(&:start)
count = 1
start = Time.now.to_f
stop = start + DURATION
while Time.now.to_f < stop do
m["working_bin"] = count.to_s
count += 1
sleep UPDATE_SLEEP
end
|
csfrancis/lmdb_store
|
test/lmdb_store_test.rb
|
# frozen_string_literal: true
require 'minitest/autorun'
require 'lmdb_store'
require 'fileutils'
class TestLmdbStore < Minitest::Test
def setup
FileUtils.rm_rf(db_path)
FileUtils.mkdir_p(db_path)
end
def test_create
LmdbStore.new(db_path)
end
def test_write
c = LmdbStore.new(db_path)
c.write('foo', 'bar')
end
def test_read
c = LmdbStore.new(db_path)
c.write('foo', 'bar')
assert_equal 'bar', c.read('foo')
end
def test_delete
c = LmdbStore.new(db_path)
c.write('foo', 'bar')
assert c.delete('foo')
end
def test_delete_non_existant
c = LmdbStore.new(db_path)
refute c.delete('foo')
end
def test_write_max_space
c = LmdbStore.new(db_path, size: 64.kilobytes)
assert_raises(LMDB::Error::MAP_FULL) do
10000.times do |i|
c.write("foo#{i}", i)
end
end
end
private
def db_path
@db_path ||= File.join(File.dirname(__FILE__), '..', 'tmp', 'test')
end
end
|
cleversoap/homebrew-cask
|
Casks/second-life-viewer.rb
|
cask :v1 => 'second-life-viewer' do
version '3.6.13.284995'
sha256 '8aa1bc39077452c3006390d4a888ca4113c087e8cdc78f5008dc85091015627d'
url "http://download.cloud.secondlife.com/Viewer_3/Second_Life_#{version.gsub('.','_')}_i386.dmg"
name 'Second Life Viewer'
homepage 'http://secondlife.com/'
license :unknown # todo: change license and remove this comment; ':unknown' is a machine-generated placeholder
app 'Second Life Viewer.app'
end
|
cleversoap/homebrew-cask
|
Casks/seil.rb
|
<filename>Casks/seil.rb
cask :v1 => 'seil' do
version '11.0.0'
sha256 '4b2a5afe8c45a46af7b8a5ef291615627d795c90ba1614b5532eafa479e8f30b'
url "https://pqrs.org/macosx/keyremap4macbook/files/Seil-#{version}.dmg"
name 'Seil'
homepage 'https://pqrs.org/macosx/keyremap4macbook/seil.html.en'
license :public_domain
pkg 'Seil.pkg'
uninstall :quit => 'org.pqrs.Seil',
:kext => 'org.pqrs.driver.Seil',
:pkgutil => 'org.pqrs.driver.Seil'
zap :delete => [
'~/Library/Caches/org.pqrs.PCKeyboardHack',
'~/Library/Caches/org.pqrs.Seil',
'~/Library/Preferences/org.pqrs.PCKeyboardHack.plist',
'~/Library/Preferences/org.pqrs.Seil.plist',
]
end
|
cleversoap/homebrew-cask
|
Casks/electron.rb
|
cask :v1 => 'electron' do
version '0.25.1'
sha256 '87c97fbe768c61773226e51a12e0567020e9c5a9807cc75f3ab87d38543c9d03'
url "https://github.com/atom/electron/releases/download/v#{version}/electron-v#{version}-darwin-x64.zip"
appcast 'https://github.com/atom/electron/releases.atom'
name 'Electron'
homepage 'http://electron.atom.io/'
license :mit
stage_only true
end
|
cleversoap/homebrew-cask
|
Casks/qtox.rb
|
cask :v1 => 'qtox' do
version :latest
sha256 :no_check
# libtoxcore.so is the official download host per the vendor homepage
url 'https://jenkins.libtoxcore.so/job/qTox%20OS%20X/lastSuccessfulBuild/artifact/qtox.dmg'
name 'qTox'
name 'Tox'
homepage 'https://tox.im/'
license :gpl
app 'qTox.app'
end
|
cleversoap/homebrew-cask
|
Casks/bartender.rb
|
cask :v1 => 'bartender' do
version '1.2.38'
sha256 'c4e1a59bf21d9f2d8ab714e0b4ff8504724954c1db06f23d96de3bf0a2084d1b'
url "http://macbartender.com/updates/#{version.gsub('.', '-')}/Bartender.zip",
:referer => 'http://www.macbartender.com'
name 'Bartender'
appcast 'http://www.macbartender.com/updates/Appcast.xml',
:sha256 => 'cb01076d5f91bf81f8b3074c5bc205f74b55e81ee880b756a2898057a2ffc953'
homepage 'http://www.macbartender.com/'
license :commercial
app 'Bartender.app'
postflight do
suppress_move_to_applications
end
zap :delete => '~/Library/Preferences/com.surteesstudios.Bartender.plist'
end
|
arslankhalid0067/bourbon-xquic
|
lib/bourbon/version.rb
|
module Bourbon
VERSION = "7.0.0"
end
|
arslankhalid0067/bourbon-xquic
|
spec/bourbon/utilities/font_source_declaration_spec.rb
|
<reponame>arslankhalid0067/bourbon-xquic
require "spec_helper"
describe "font-source-declaration" do
before(:all) do
ParserSupport.parse_file("utilities/font-source-declaration")
end
context "called with pipeline" do
it "returns pipeline path" do
rule = 'src: font-url("b.woff2") format("woff2"), ' +
'font-url("b.woff") format("woff")'
expect(".has-pipeline").to have_rule(rule)
end
end
context "called with no pipeline" do
it "does not return pipeline path" do
rule = 'src: url("b.woff2") format("woff2"), ' +
'url("b.woff") format("woff")'
expect(".no-pipeline").to have_rule(rule)
end
end
end
|
arslankhalid0067/bourbon-xquic
|
spec/bourbon/library/font_face_spec_1.rb
|
require "spec_helper"
describe "font-face" do
before(:all) do
ParserSupport.parse_file("library/font-face-1")
end
context "called with defaults" do
it "outputs defaults" do
ruleset = 'font-family: "source-sans-pro"; ' +
'src: url("/fonts/source-sans-pro/source-sans-pro-regular.woff2") format("woff2"), url("/fonts/source-sans-pro/source-sans-pro-regular.woff") format("woff");'
expect("@font-face").to have_ruleset(ruleset)
end
end
end
|
arslankhalid0067/bourbon-xquic
|
spec/support/sass_support.rb
|
<reponame>arslankhalid0067/bourbon-xquic
module SassSupport
def generate_css
FileUtils.mkdir("tmp")
`sass -I . spec/fixtures:tmp --update --precision=5 --sourcemap=none`
end
def clean_up
FileUtils.rm_rf("tmp")
end
end
|
arslankhalid0067/bourbon-xquic
|
spec/bourbon/library/font_face_spec_3.rb
|
require "spec_helper"
describe "font-face" do
before(:all) do
ParserSupport.parse_file("library/font-face-3")
end
context "called with defaults" do
it "outputs defaults" do
ruleset = 'font-family: "pitch";' +
'src: font-url("/fonts/pitch.woff2") format("woff2");'
expect("@font-face").to have_ruleset(ruleset)
end
end
end
|
sous-chefs/dpkg_autostart
|
metadata.rb
|
name 'dpkg_autostart'
maintainer '<NAME>'
maintainer_email '<EMAIL>'
description 'Control service actions initialized from dpkg'
license 'Apache-2.0'
source_url 'https://github.com/sous-chefs/dpkg_autostart'
issues_url 'https://github.com/sous-chefs/dpkg_autostart/issues'
version '0.4.4'
chef_version '>= 12.15'
supports 'ubuntu'
supports 'debian'
|
superfeedr/superfeedr-rb
|
spec/spec_helper.rb
|
require 'rubygems'
require 'minitest/spec'
require 'mocha'
$:.unshift File.expand_path(File.join(File.dirname(__FILE__), '..'))
$:.unshift File.expand_path(File.join(File.dirname(__FILE__), *%w[.. lib]))
require 'superfeedr-rb'
def message_node
Blather::XMPPNode.import(Nokogiri::XML(<<-XML).root)
<message from="firehoser.superfeedr.com" to="<EMAIL>">
<event xmlns="http://jabber.org/protocol/pubsub#event">
<status xmlns="http://superfeedr.com/xmpp-pubsub-ext" feed="http://superfeedr.com/dummy.xml">
<http code="200">957 bytes fetched in 0.228013s</http>
<next_fetch>2009-11-05T16:34:12+00:00</next_fetch>
</status>
<items node="http://superfeedr.com/dummy.xml">
<item xmlns="http://jabber.org/protocol/pubsub" chunks="1" chunk="1">
<entry xmlns="http://www.w3.org/2005/Atom">
<title>16:32:41</title>
<id>tag:superfeedr.com,2005:String/1257438761</id>
<published>2009-11-05T16:32:41+00:00</published>
<summary>sprsquish wanted to know what time it was.</summary>
<content>Thursday November 05 16:32:41 UTC 2009 sprsquish wanted to know what time it was.</content>
<category term="tag" scheme="http://www.sixapart.com/ns/types#tag" />
<category term="category" scheme="http://www.sixapart.com/ns/types#tag" />
<link type="text/html" href="http://superfeedr.com/?1257438761" title="superfeedr" rel="alternate"/>
<author>
<name>Superfeedr</name>
<uri>http://superfeedr.com/</uri>
<email><EMAIL></email>
</author>
</entry>
</item>
</items>
</event>
</message>
XML
end
MiniTest::Unit.autorun
|
superfeedr/superfeedr-rb
|
lib/superfeedr/entry.rb
|
<gh_stars>1-10
module Superfeedr
class Entry < Blather::Stanza::PubSubItem
NS = 'http://www.w3.org/2005/Atom'.freeze
def self.parse(node)
node.find('//ns:event/ns:items/ns2:item', :ns => Blather::Stanza::PubSub::Event.registered_ns,
:ns2 => Blather::Stanza::PubSub.registered_ns).map do |item|
Entry.new('item').inherit(item)
end
end
def chunks
self[:chunks].to_i
end
def chunk
self[:chunk].to_i
end
def id
self.entry.content_from 'ns:id', :ns => NS
end
def title
self.entry.content_from 'ns:title', :ns => NS
end
def published
if published = self.entry.content_from('ns:published', :ns => NS)
DateTime.parse published
end
end
def content
self.entry.content_from 'ns:content', :ns => NS
end
def summary
self.entry.content_from 'ns:summary', :ns => NS
end
def categories
self.entry.find('ns:category', :ns => NS).map { |cat| cat[:term] }
end
def links
self.entry.find('ns:link', :ns => NS).map { |l| Link.new.inherit(l) }
end
def authors
self.entry.find('ns:author', :ns => NS).map { |l| Author.new.inherit(l) }
end
def entry
Blather::XMPPNode.import(super)
end
class Link < Blather::XMPPNode
def href
self[:href]
end
def rel
self[:rel]
end
def type
self[:type]
end
def title
self[:title]
end
end
class Author < Blather::XMPPNode
def name
self.content_from 'ns:name', :ns => NS
end
def email
self.content_from 'ns:email', :ns => NS
end
def uri
self.content_from 'ns:uri', :ns => NS
end
end
end
end
|
superfeedr/superfeedr-rb
|
spec/superfeedr/entry_spec.rb
|
require 'spec_helper'
# <item chunks="1" chunk="1">
# <entry xmlns="http://www.w3.org/2005/Atom">
# <title>16:32:41</title>
# <id>tag:superfeedr.com,2005:String/1257438761</id>
# <published>2009-11-05T16:32:41+00:00</published>
# <summary>sprsquish wanted to know what time it was.</summary>
# <content>Thursday November 05 16:32:41 UTC 2009 sprsquish wanted to know what time it was.</content>
# <category term="tag" scheme="http://www.sixapart.com/ns/types#tag" />
# <category term="category" scheme="http://www.sixapart.com/ns/types#tag" />
# <link type="text/html" href="http://superfeedr.com/?1257438761" title="superfeedr" rel="alternate"/>
# <author>
# <name>Superfeedr</name>
# <uri>http://superfeedr.com/</uri>
# <email><EMAIL></email>
# </author>
# </entry>
# </item>
describe Superfeedr::Entry do
before do
@events = Superfeedr::Entry.parse message_node
@event = @events.first
end
it 'parses apart a list of items' do
@events.must_be_kind_of Array
@events.size.must_equal 1
end
it 'knows how many chunks it has' do
@event.chunks.must_equal 1
end
it 'knows what chunk it is' do
@event.chunk.must_equal 1
end
it 'knows its title' do
@event.title.must_equal '16:32:41'
end
it 'knows its id' do
@event.id.must_equal 'tag:superfeedr.com,2005:String/1257438761'
end
it 'knows when it was published' do
@event.published.must_equal DateTime.parse('2009-11-05T16:32:41+00:00')
end
it 'has content' do
@event.content.must_equal 'Thursday November 05 16:32:41 UTC 2009 sprsquish wanted to know what time it was.'
end
it 'has a summary' do
@event.summary.must_equal 'sprsquish wanted to know what time it was.'
end
it 'knows its categories' do
@event.categories.must_equal %w[tag category]
end
it 'has a set of links' do
@event.links.size.must_equal 1
@event.links.first.must_be_kind_of Superfeedr::Entry::Link
end
it 'has a set of authors' do
@event.authors.size.must_equal 1
@event.authors.first.must_be_kind_of Superfeedr::Entry::Author
end
end
# <link type="text/html" href="http://superfeedr.com/?1257438761" title="superfeedr" rel="alternate"/>
describe Superfeedr::Entry::Link do
before do
@link = Superfeedr::Entry.parse(message_node).first.links.first
end
it 'knows its href' do
@link.href.must_equal 'http://superfeedr.com/?1257438761'
end
it 'knows its rel' do
@link.rel.must_equal 'alternate'
end
it 'knows its type' do
@link.type.must_equal 'text/html'
end
it 'knows its title' do
@link.title.must_equal 'superfeedr'
end
end
# <author>
# <name>Superfeedr</name>
# <uri>http://superfeedr.com/</uri>
# <email><EMAIL></email>
# </author>
describe Superfeedr::Entry::Author do
before do
@author = Superfeedr::Entry.parse(message_node).first.authors.first
end
it 'knows its name' do
@author.name.must_equal 'Superfeedr'
end
it 'knows its uri' do
@author.uri.must_equal 'http://superfeedr.com/'
end
it 'knows its email' do
@author.email.must_equal '<EMAIL>'
end
end
|
superfeedr/superfeedr-rb
|
spec/superfeedr/status_spec.rb
|
<gh_stars>1-10
require 'spec_helper'
# <status xmlns="http://superfeedr.com/xmpp-pubsub-ext" feed="http://superfeedr.com/dummy.xml">
# <http code="200">957 bytes fetched in 0.228013s</http>
# <next_fetch>2009-11-05T16:34:12+00:00</next_fetch>
# </status>
describe Superfeedr::Status do
before do
@status = Superfeedr::Status.parse message_node
end
it 'knows the feed it belongs to' do
@status.feed.must_equal 'http://superfeedr.com/dummy.xml'
end
it 'knows its status code' do
@status.code.must_equal 200
end
it 'has more info about the http code' do
@status.http.must_equal '957 bytes fetched in 0.228013s'
end
it 'knows when the next fetch will be' do
@status.next_fetch.must_equal DateTime.parse('2009-11-05T16:34:12+00:00')
end
end
|
superfeedr/superfeedr-rb
|
example/subscribe_superfeedr.rb
|
require 'superfeedr-rb'
channel = EM::Channel.new
Thread.new do
Superfeedr::Client.connect('<EMAIL>', '<PASSWORD>', :subscribe_channel => channel ) do |client|
client.register_handler(:pubsub_event) do |evt|
pp evt
end
client.register_handler(:disconnected){ client.connect }
end
end
sleep 10
channel.push('http://github.com/shingara.atom')
sleep 10
|
superfeedr/superfeedr-rb
|
spec/spec_superfeedr-rb_spec.rb
|
<reponame>superfeedr/superfeedr-rb
require 'spec_helper'
describe Superfeedr::Client do
it 'should it herited Blater::Client' do
Superfeedr::Client.new.must_be_kind_of Blather::Client
end
describe '#initialize' do
it 'should initialize deferred attribute like array' do
client = Superfeedr::Client.new
client.instance_variable_get(:@deferred).must_equal []
end
end
describe '#subscribe' do
it 'should write Subscribe' do
client = Superfeedr::Client.setup '<EMAIL>', '<PASSWORD>'
mock_stanza = mock()
Blather::Stanza::PubSub::Subscribe.expects(:new).returns(mock_stanza)
client.expects(:write).with(mock_stanza)
client.subscribe('http://github.com/shingara.atom')
end
end
end
|
superfeedr/superfeedr-rb
|
example/superfeedr.rb
|
require 'rubygems'
require 'superfeedr-rb'
require 'pp'
Blather.logger.level = Logger::DEBUG
Superfeedr::Client.connect('n@d/r', 'password') do |client|
# Automatically subscribes to the feed
# If already subscribed it simply catches the events coming in.
client.feed('http://superfeedr.com/dummy.xml') do |status, entries|
return if status.failed?
pp({
:status => {
:feed => status.feed,
:code => status.code,
:http => status.http,
:next_fetch => status.next_fetch
},
:entries => entries.map { |entry| {
:id => entry.id,
:chunks => entry.chunks,
:chunk => entry.chunk,
:title => entry.title,
:published => entry.published,
:content => entry.content,
:summary => entry.summary,
:categories => entry.categories,
:links => entry.links.map { |link| {
:href => link.href,
:rel => link.rel,
:type => link.type,
:title => link.title
}},
:authors => entry.authors.map { |author| {
:name => author.name,
:email => author.email,
:uri => author.uri
}}
}}
})
end
# client.feed('http://github.com/superfeedr.atom') do |notification|
# pp notification
# end
# Catch all notifications
# This works because Superfeedr::Client is just a subsclass of Blather::Client
client.register_handler(:pubsub_event) do |evt|
pp evt
end
end
|
superfeedr/superfeedr-rb
|
lib/superfeedr/status.rb
|
<reponame>superfeedr/superfeedr-rb
require 'date'
module Superfeedr
class Status < Blather::XMPPNode
NS = 'http://superfeedr.com/xmpp-pubsub-ext'.freeze
def self.parse(node)
self.new('status').inherit node.find_first('//ns:status', :ns => NS)
end
def failed?
false
end
def feed
self[:feed]
end
def code
self.http_node[:code].to_i
end
def http
self.http_node.content
end
def next_fetch
if next_fetch = self.find_first('//ns:next_fetch', :ns => NS).content
DateTime.parse next_fetch
end
end
protected
def http_node
self.find_first('//ns:http', :ns => NS)
end
end
end
|
superfeedr/superfeedr-rb
|
lib/superfeedr-rb.rb
|
%w[
blather
blather/client/client
superfeedr/entry
superfeedr/status
].each { |r| require r }
module Superfeedr
class Client < Blather::Client
def self.connect(jid, pass, options={} )
if block_given?
client = self.setup jid, pass, options[:host], options[:port]
EM.run {
yield client
if options[:subscribe_channel]
options[:subscribe_channel].subscribe {|url| client.subscribe(url) }
end
client.connect
}
else
super
end
end
def initialize # :nodoc:
super
@deferred = []
end
def subscribe(url)
self.write Blather::Stanza::PubSub::Subscribe.new(:set, 'firehoser.superfeedr.com', url, self.jid.stripped)
end
def feed(url, &block)
return if defer(:feed, url, &block)
subscribe(url)
self.register_handler(:pubsub_event, "//ns:items[@node='#{url}']", :ns => Blather::Stanza::PubSub::Event.registered_ns) do |evt, _|
block.call Status.parse(evt), Entry.parse(evt)
end
end
def client_post_init # :nodoc:
# overwrite the default actions to take after a client is setup
status = Blather::Stanza::Presence::Status.new
status.priority = 100
write status
end
# Allow users to setup callbacks before the connection is setup
def defer(*args, &block) # :nodoc:
if @stream
false
else
@deferred << [args, block]
true
end
end
# Run all deferred commands after the connection is established
def post_init(stream, jid = nil) # :nodoc:
super
until @deferred.empty?
args = @deferred.pop
self.__send__ *(args[0]), &args[1]
end
end
end
end
|
isabella232/over_board
|
app/models/database.rb
|
class Database
module ClassMethods
def lvarayut
User.new(1, '<NAME>', 'lvarayut', 'https://github.com/lvarayut/relay-fullstack');
end
def features
@features ||= [
Feature.new(1, 'React', 'A JavaScript library for building user interfaces.', 'https://facebook.github.io/react'),
Feature.new(2, 'Relay', 'A JavaScript framework for building data-driven react applications.', 'https://facebook.github.io/relay'),
Feature.new(3, 'GraphQL', 'A reference implementation of GraphQL for JavaScript.', 'http://graphql.org'),
Feature.new(4, 'Express', 'Fast, unopinionated, minimalist web framework for Node.js.', 'http://expressjs.com'),
Feature.new(5, 'Webpack', 'Webpack is a module bundler that packs modules for the browser.', 'https://webpack.github.io'),
Feature.new(6, 'Babel', 'Babel is a JavaScript compiler. Use next generation JavaScript, today.', 'https://babeljs.io'),
Feature.new(7, 'PostCSS', 'PostCSS. A tool for transforming CSS with JavaScript.', 'http://postcss.org'),
Feature.new(8, 'MDL', 'Material Design Lite lets you add a Material Design to your websites.', 'http://www.getmdl.io')
]
end
def add_feature(name, description, url)
@curr_features ||= 9
new_feature = Feature.new(@curr_features, name, description, url)
features.push(new_feature)
@curr_features += 1
new_feature
end
def get_user(id)
id == lvarayut.id ? lvarayut : nil
end
def get_feature(id)
features.detect do |feature|
feature.id == id
end
end
def get_features
features
end
end
extend ClassMethods
end
|
isabella232/over_board
|
app/graphql/types/mutation_type.rb
|
Types::MutationType = GraphQL::ObjectType.define do
name "Mutation"
# Add the mutation's derived field to the mutation type
field :addFeature, field: Mutations::AddFeatureMutation.field
end
|
isabella232/over_board
|
app/graphql/mutations/add_feature_mutation.rb
|
<filename>app/graphql/mutations/add_feature_mutation.rb
Mutations::AddFeatureMutation = GraphQL::Relay::Mutation.define do
# Used to name derived types, eg `"AddFeatureInput"`:
name 'AddFeature'
# Accessible from `inputs` in the resolve function:
input_field :name, !types.String
input_field :description, !types.String
input_field :url, !types.String
# The result has access to these fields,
# resolve must return a hash with these keys.
# On the client-side this would be configured
# as RANGE_ADD mutation, so our returned fields
# must conform to that API.
return_field :featuresConnection, Types::FeatureType.connection_type
return_field :featureEdge, Types::FeatureType.edge_type
return_field :viewer, Types::UserType
resolve ->(object, inputs, ctx) {
viewer = Database.get_user(1)
new_feature = Database.add_feature(inputs[:name], inputs[:description], inputs[:url])
# Use this helper to create the response that a
# client-side RANGE_ADD mutation would expect.
range_add = GraphQL::Relay::RangeAdd.new(
parent: viewer,
collection: Database.get_features,
item: new_feature,
context: ctx,
)
response = {
viewer: viewer,
featuresConnection: range_add.connection,
featureEdge: range_add.edge,
}
}
end
|
isabella232/over_board
|
app/models/feature.rb
|
Feature = Struct.new(:id, :name, :description,:url)
|
isabella232/over_board
|
app/graphql/types/feature_type.rb
|
<reponame>isabella232/over_board
Types::FeatureType = GraphQL::ObjectType.define do
name "Feature"
global_id_field :id
field :id, !types.ID
field :name, !types.String
field :description, !types.String
field :url, !types.String
implements GraphQL::Relay::Node.interface
end
|
isabella232/over_board
|
app/graphql/types/query_type.rb
|
<filename>app/graphql/types/query_type.rb
Types::QueryType = GraphQL::ObjectType.define do
name "Query"
# Used by Relay to lookup objects by UUID:
field :node, GraphQL::Relay::Node.field
# Fetches a list of objects given a list of IDs
field :nodes, GraphQL::Relay::Node.plural_field
field :viewer do
type Types::UserType
resolve ->(obj, args, ctx) { Database.get_user(1) }
end
end
|
isabella232/over_board
|
app/models/user.rb
|
<gh_stars>1-10
User = Struct.new(:id, :name, :username, :website)
|
isabella232/over_board
|
app/graphql/over_board_schema.rb
|
OverBoardSchema = GraphQL::Schema.define do
query Types::QueryType
mutation Types::MutationType
id_from_object ->(object, type_definition, query_ctx) {
# Call your application's UUID method here
# It should return a string
GraphQL::Schema::UniqueWithinType.encode(type_definition.name, object.id)
}
object_from_id ->(id, query_ctx) {
type_name, item_id = GraphQL::Schema::UniqueWithinType.decode(id)
Database.send("get_#{type_name}", item_id)
}
resolve_type ->(obj, ctx) {
case obj
when User
Types::UserType
when Feature
Types::FeatureType
else
raise("Unexpected object: #{obj}")
end
}
end
|
isabella232/over_board
|
app/graphql/types/user_type.rb
|
Types::UserType = GraphQL::ObjectType.define do
name "User"
global_id_field :id
field :id, !types.ID
field :username, !types.String
field :website, !types.String
connection :features, Types::FeatureType.connection_type do
resolve ->(user, args, ctx) {
Database.get_features
}
end
implements GraphQL::Relay::Node.interface
end
|
gnufied/merb_ajax
|
lib/merb_ajax/javascript_helper.rb
|
<reponame>gnufied/merb_ajax
module Merb
module Helpers
# Provides functionality for working with JavaScript in your views.
#
# == Ajax, controls and visual effects
#
# * For information on using Ajax, see
# Merb::Helpers::PrototypeHelper.
# * For information on using controls and visual effects, see
# Merb::Helpers::ScriptaculousHelper.
#
# == Including the JavaScript libraries into your pages
#
# merb_ajax provides helpers for the Prototype JavaScript framework and the
# Scriptaculous JavaScript controls and visual effects library. If you wish
# to use these libraries and their helpers (Merb::Helpers::PrototypeHelper
# and Merb::Helpers::ScriptaculousHelper), you must include the respective
# JavaScript files.
#
# Merb provides some helper methods for you:
# Merb::Assets::AssetHelpers or Module: Merb :: ViewContextMixin
module JavaScriptHelper
# Returns a link that will trigger a JavaScript +function+ using the
# onclick handler and return false after the fact.
#
# The +function+ argument can be omitted in favor of an +update_page+
# block, which evaluates to a string when the template is rendered
# (instead of making an Ajax request first).
#
# Examples:
# link_to_function "Greeting", "alert('Hello world!')"
# Produces:
# <a onclick="alert('Hello world!'); return false;" href="#">Greeting</a>
#
# link_to_function(image_tag("delete"), "if (confirm('Really?')) do_delete()")
# Produces:
# <a onclick="if (confirm('Really?')) do_delete(); return false;" href="#">
# <img src="/images/delete.png?" alt="Delete"/>
# </a>
#
# link_to_function("Show me more", nil, :id => "more_link") do |page|
# page[:details].visual_effect :toggle_blind
# page[:more_link].replace_html "Show me less"
# end
# Produces:
# <a href="#" id="more_link" onclick="try {
# $("details").visualEffect("toggle_blind");
# $("more_link").update("Show me less");
# }
# catch (e) {
# alert('RJS error:\n\n' + e.toString());
# alert('$(\"details\").visualEffect(\"toggle_blind\");
# \n$(\"more_link\").update(\"Show me less\");');
# throw e
# };
# return false;">Show me more</a>
#
def link_to_function(name, *args, &block)
html_options = args.extract_options!
function = args[0] || ''
html_options.symbolize_keys!
function = update_page(&block) if block_given?
content_tag(
"a", name,
html_options.merge({
:href => html_options[:href] || "#",
:onclick => (html_options[:onclick] ? "#{html_options[:onclick]}; " : "") + "#{function}; return false;"
})
)
end
# Returns a button that'll trigger a JavaScript +function+ using the
# onclick handler.
#
# The +function+ argument can be omitted in favor of an +update_page+
# block, which evaluates to a string when the template is rendered
# (instead of making an Ajax request first).
#
# Examples:
# button_to_function "Greeting", "alert('Hello world!')"
# button_to_function "Delete", "if (confirm('Really?')) do_delete()"
# button_to_function "Details" do |page|
# page[:details].visual_effect :toggle_slide
# end
# button_to_function "Details", :class => "details_button" do |page|
# page[:details].visual_effect :toggle_slide
# end
def button_to_function(name, *args, &block)
html_options = args.extract_options!
function = args[0] || ''
html_options.symbolize_keys!
function = update_page(&block) if block_given?
tag("button", name, html_options.merge({
:onclick => (html_options[:onclick] ? "#{html_options[:onclick]}; " : "") + "#{function};"
}))
end
# Escape carrier returns and single and double quotes for JavaScript segments.
def escape_javascript(javascript)
(javascript || '').gsub('\\','\0\0').gsub('</','<\/').gsub(/\r\n|\n|\r/, "\\n").gsub(/["']/) { |m| "\\#{m}" }
end
# Returns a JavaScript tag with the +content+ inside. Example:
# javascript_tag "alert('All is good')"
#
# Returns:
# <script type="text/javascript">
# //<![CDATA[
# alert('All is good')
# //]]>
# </script>
#
# +html_options+ may be a hash of attributes for the <script> tag. Example:
# javascript_tag "alert('All is good')", :defer => 'defer'
# # => <script defer="defer" type="text/javascript">alert('All is good')</script>
#
# Instead of passing the content as an argument, you can also use a block
# in which case, you pass your +html_options+ as the first parameter.
# <% javascript_tag :defer => 'defer' do -%>
# alert('All is good')
# <% end -%>
def javascript_tag(content_or_options_with_block = nil, html_options = {}, &block)
if block_given?
html_options = content_or_options_with_block if content_or_options_with_block.is_a?(Hash)
content = capture(&block)
else
content = content_or_options_with_block
end
javascript_tag = content_tag("script", javascript_cdata_section(content), html_options.merge(:type => Merb.available_mime_types[:js].first))
if block_given? && block_is_within_action_view?(block)
concat(javascript_tag, block.binding)
else
javascript_tag
end
end
def javascript_cdata_section(content) #:nodoc:
"\n//#{cdata_section("\n#{content}\n//")}\n"
end
protected
def options_for_javascript(options)
'{' + options.map {|k, v| "#{k}:#{v}"}.sort.join(', ') + '}'
end
def array_or_string_for_javascript(option)
js_option = if option.kind_of?(Array)
"['#{option.join('\',\'')}']"
elsif !option.nil?
"'#{option}'"
end
js_option
end
private
def block_is_within_action_view?(block)
eval("defined? _erbout", block.binding)
end
end
JavascriptHelper = JavaScriptHelper unless const_defined? :JavascriptHelper
end
end
class Merb::ViewContext #:nodoc:
include Merb::Helpers::JavascriptHelper
end
|
gnufied/merb_ajax
|
lib/mixins/render.rb
|
module Merb
module RenderMixin
# Uses the JavaScriptGenerator to render a JavaScript block, updating the current view.
# (similar to Rails' render :update)
#
# render_js_block do |page|
# page.replace_html 'user_list', :partial => 'user', :collection => @users
# page.visual_effect :highlight, 'user_list'
# end
def render_js_block(&blk)
generator = Merb::Helpers::PrototypeHelper::JavaScriptGenerator.new(@template, &blk)
return render(:js => generator.to_s, :layout => :none)
end
end
end
|
gnufied/merb_ajax
|
lib/core_ext/hash.rb
|
class Hash #:nodoc;
# Return a new hash with all keys converted to symbols.
def symbolize_keys
inject({}) do |options, (key, value)|
options[key.to_sym] = value
options
end
end
end
|
gnufied/merb_ajax
|
lib/core_ext/array.rb
|
<reponame>gnufied/merb_ajax<gh_stars>1-10
class Array #:nodoc:
def extract_options!
last.is_a?(::Hash) ? pop : {}
end
end
|
gnufied/merb_ajax
|
lib/merb_ajax/form_tag_helper.rb
|
module Merb
module Helpers
# Provides a number of methods for creating form tags that doesn't rely on an object assigned to the template like
# FormHelper does. Instead, you provide the names and values manually.
#
# NOTE: The HTML options <tt>disabled</tt>, <tt>readonly</tt>, and <tt>multiple</tt> can all be treated as booleans. So specifying
# <tt>:disabled => true</tt> will give <tt>disabled="disabled"</tt>.
module FormTagHelper
# Starts a form tag that points the action to an url configured with <tt>url_for_options</tt> just like
# Merb::Helpers::TagHelper#url_for. The method for the form defaults to POST.
#
# ==== Options
# * <tt>:multipart</tt> - If set to true, the enctype is set to "multipart/form-data".
# * <tt>:method</tt> - The method to use when submitting the form, usually either "get" or "post".
# If "put", "delete", or another verb is used, a hidden input with name _method
# is added to simulate the verb over post.
# * A list of parameters to feed to the URL the form will be posted to.
#
# ==== Examples
# form_tag('/posts')
# # => <form action="/posts" method="post">
#
# form_tag('/posts/1', :method => :put)
# # => <form action="/posts/1" method="put">
#
# form_tag('/upload', :multipart => true)
# # => <form action="/upload" method="post" enctype="multipart/form-data">
#
# <% form_tag '/posts' do -%>
# <div><%= submit_tag 'Save' %></div>
# <% end -%>
# # => <form action="/posts" method="post"><div><input type="submit" name="submit" value="Save" /></div></form>
def form_tag(url_for_options = {}, options = {}, *parameters_for_url, &block)
html_options = html_options_for_form(url_for_options, options, *parameters_for_url)
if block_given?
form_tag_in_block(html_options, &block)
else
form_tag_html(html_options)
end
end
private
def html_options_for_form(url_for_options, options, *parameters_for_url)
returning options.stringify_keys do |html_options|
html_options["enctype"] = "multipart/form-data" if html_options.delete("multipart")
html_options["action"] = url_for(url_for_options, *parameters_for_url)
end
end
def extra_tags_for_form(html_options)
case method = html_options.delete("method").to_s
when /^get$/i # must be case-insentive, but can't use downcase as might be nil
html_options["method"] = "get"
''
when /^post$/i, "", nil
html_options["method"] = "post"
''
else
html_options["method"] = "post"
content_tag(:div, tag(:input, :type => "hidden", :name => "_method", :value => method) + token_tag, :style => 'margin:0;padding:0')
end
end
def form_tag_html(html_options)
extra_tags = extra_tags_for_form(html_options)
open_tag("form", html_options) + extra_tags
end
def form_tag_in_block(html_options, &block)
content = capture(&block)
concat(form_tag_html(html_options), block.binding)
concat(content, block.binding)
concat("</form>", block.binding)
end
def token_tag
unless protect_against_forgery?
''
else
tag(:input, :type => "hidden", :name => request_forgery_protection_token.to_s, :value => form_authenticity_token)
end
end
end
end
end
class Merb::ViewContext #:nodoc:
include Merb::Helpers::FormTagHelper
end
|
gnufied/merb_ajax
|
lib/merb_ajax/merbtasks.rb
|
<reponame>gnufied/merb_ajax
namespace :merb_ajax do
desc "Do something for merb_ajax"
task :default do
puts "merb_ajax doesn't do anything"
end
end
|
gnufied/merb_ajax
|
lib/merb_ajax.rb
|
module Merb
module Ajax
CORE_EXT_DIR = File.dirname(__FILE__) / 'core_ext'
CORE_EXT_FILES = Dir["#{CORE_EXT_DIR}/*.rb"].collect {|h| h.match(/\/(\w+)\.rb/)[1]}
HELPERS_DIR = File.dirname(__FILE__) / 'merb_ajax'
HELPERS_FILES = Dir["#{HELPERS_DIR}/*_helper.rb"].collect {|h| h.match(/\/(\w+)\.rb/)[1]}
MIXINS_DIR = File.dirname(__FILE__) / 'mixins'
MIXINS_FILES = Dir["#{MIXINS_DIR}/*.rb"].collect {|h| h.match(/\/(\w+)\.rb/)[1]}
def self.load_helpers(helpers = HELPERS_FILES)
helpers.each {|h| Kernel.load(File.join(HELPERS_DIR, "#{h}.rb") )}
end
def self.load_core_ext(core = CORE_EXT_FILES)
core.each {|s| Kernel.load(File.join(CORE_EXT_DIR, "#{s}.rb") )}
end
def self.load_mixins(mixins = MIXINS_FILES)
mixins.each {|s| Kernel.load(File.join(MIXINS_DIR, "#{s}.rb") )}
end
def self.load
load_core_ext
load_mixins
load_helpers
Merb::Plugins.add_rakefiles "merb_ajax/merbtasks"
end
end
end
Merb::Ajax.load if defined?(Merb::Plugins)
|
vcilabs/terradactyl
|
lib/terradactyl/version.rb
|
# frozen_string_literal: true
module Terradactyl
VERSION = '1.1.2'
end
|
vcilabs/terradactyl
|
terradactyl.gemspec
|
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'terradactyl/version'
Gem::Specification.new do |spec|
spec.name = "terradactyl"
spec.version = Terradactyl::VERSION
spec.authors = ["<NAME>"]
spec.email = ["<EMAIL>"]
spec.license = 'MIT'
spec.summary = %{Manage a Terraform monorepo}
spec.description = %{Provides facility to manage a large Terraform monorepo}
spec.homepage = %{https://github.com/vcilabs/terradactyl}
spec.metadata['homepage_uri'] = spec.homepage
spec.metadata['source_code_uri'] = spec.homepage
spec.metadata['changelog_uri'] = %{#{spec.homepage}/CHANGELOG.md}
spec.metadata['allowed_push_host'] = 'https://rubygems.org'
spec.required_ruby_version = '>= 2.5.0'
spec.files = `git ls-files -z`.split("\x0").reject do |f|
f.match(%r{^(test|spec|features)/})
end
spec.bindir = 'exe'
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ['lib']
spec.add_dependency 'thor', '~> 0.20'
spec.add_dependency 'colorize', '~> 0.8'
spec.add_dependency 'deepsort', '~> 0.4'
spec.add_dependency 'deep_merge', '~> 1.2'
spec.add_dependency 'bundler', '>= 1.16'
spec.add_dependency 'rake', '>= 10.0'
spec.add_dependency 'terradactyl-terraform', '>= 1.1.0'
spec.add_development_dependency 'rspec', '~> 3.0'
spec.add_development_dependency 'pry', '~> 0.12'
spec.add_development_dependency 'pry-remote', '~> 0.1.8'
spec.add_development_dependency 'rubocop', '~> 0.71.0'
end
|
MercurieVV/jobs-crawler
|
core/src/main/scala/com/github/mercurievv/http4s/coders/scalaxb/Http4sScalaxbEncodersDecoders.scala
|
package com.github.mercurievv.http4s.coders.scalaxb
import cats.Monad
import cats.implicits._
import cats.effect.ConcurrentEffect
import fs2.Chunk
import org.http4s.headers.`Content-Type`
import org.http4s._
import scalaxb.XMLFormat
import scala.xml.NamespaceBinding
/**
* Created with IntelliJ IDEA.
* User: <NAME>
* Date: 3/5/2020
* Time: 6:09 AM
* Contacts: email: <EMAIL> Skype: 'grobokopytoff' or 'mercurievv'
*/
object Http4sScalaxbEncodersDecoders {
implicit def xmlDecoder[F[_], T](
implicit format: XMLFormat[T],
F: Monad[F],
C: ConcurrentEffect[F]
): EntityDecoder[F, T] = EntityDecoder.decodeBy(MediaType.application.xml) { decodeXml(_) }
def decodeXml[T, F[_]](m: Media[F])(
implicit format: XMLFormat[T],
C: ConcurrentEffect[F]
): DecodeResult[F, T] = {
DecodeResult {
fs2.Stream
.resource(
fs2.io
.toInputStreamResource(m.body)
.map(xml.XML.load)
)
.compile
.last
.map(
_.toRight(MalformedMessageBodyFailure("Invalid XML: empty body"))
.flatMap(scalaxb.fromXMLEither[T](_).leftMap(MalformedMessageBodyFailure(_)))
)
}
}
@SuppressWarnings(Array("org.wartremover.warts.ImplicitConversion"))
implicit def xmlEncoder[F[_], T](elementLabel: String, scope: NamespaceBinding)(
implicit format: scalaxb.CanWriteXML[T],
C: ConcurrentEffect[F]
): EntityEncoder[F, T] =
EntityEncoder
.simple[F, T]() { t =>
val xml = scalaxb
.toXML(t, elementLabel, scope)
.toString()
Chunk.bytes(xml.getBytes())
}
.withContentType(`Content-Type`(MediaType.application.xml))
}
|
MercurieVV/jobs-crawler
|
core/src/main/scala/com/github/mercurievv/jobsearch/model/Job.scala
|
<reponame>MercurieVV/jobs-crawler
package com.github.mercurievv.jobsearch.model
import java.time.ZonedDateTime
import org.http4s.Uri
import org.http4s.blaze.http.Url
/**
* Created with IntelliJ IDEA.
* User: <NAME>
* Date: 3/10/2020
* Time: 5:05 PM
* Contacts: email: <EMAIL> Skype: 'grobokopytoff' or 'mercurievv'
*/
case class Job(jobsResource: JobsResource,
id: Id,
link: Uri,
title: String,
description: String,
tags: Seq[Tag],
company: Company,
created: ZonedDateTime,
// updated: ZonedDateTime
)
|
MercurieVV/jobs-crawler
|
core/src/main/scala/com/github/mercurievv/jobsearch/businesslogic/JobsStorage.scala
|
<gh_stars>0
package com.github.mercurievv.jobsearch.businesslogic
import com.github.mercurievv.jobsearch.model.Job
/**
* Created with IntelliJ IDEA.
* User: <NAME>
* Date: 3/19/2020
* Time: 9:48 PM
* Contacts: email: <EMAIL> Skype: 'grobokopytoff' or 'mercurievv'
*/
trait JobsStorage[F[_], S[_]] {
// def filterNewJobsOnly(allJobs: S[Job]): S[Job]
def saveJobsToDb(jobs: S[Job]): F[Unit]
}
|
MercurieVV/jobs-crawler
|
core/src/main/scala/com/github/mercurievv/jobsearch/model/package.scala
|
package com.github.mercurievv.jobsearch
import shapeless.tag
import shapeless.tag.@@
/**
* Created with IntelliJ IDEA.
* User: <NAME>
* Date: 3/10/2020
* Time: 5:19 PM
* Contacts: email: <EMAIL> Skype: 'grobokopytoff' or 'mercurievv'
*/
package object model {
trait IdTag
type Id = String @@ IdTag
object Id{
def apply(o: String): Id = tag[IdTag][String](o)
}
trait TagTag
type Tag = String @@ TagTag
object Tag{
def apply(o: String): Tag = tag[TagTag][String](o)
}
trait CompanyTag
type Company = String @@ CompanyTag
object Company{
def apply(o: String): Company = tag[CompanyTag][String](o)
}
}
|
MercurieVV/jobs-crawler
|
core/src/main/scala/com/github/mercurievv/jobsearch/App.scala
|
package com.github.mercurievv.jobsearch
/**
* Created with IntelliJ IDEA.
* User: <NAME>
* Date: 3/13/2020
* Time: 5:24 PM
* Contacts: email: <EMAIL> Skype: 'grobokopytoff' or 'mercurievv'
*/
object App extends zio.App {
import zio._
override def run(args: List[String]): ZIO[zio.ZEnv, Nothing, Int] = {
AppHandler.run(this)
}
}
|
MercurieVV/jobs-crawler
|
core/src/main/scala/com/github/mercurievv/jobsearch/businesslogic/JobsServer.scala
|
<reponame>MercurieVV/jobs-crawler
package com.github.mercurievv.jobsearch.businesslogic
import cats.data.NonEmptyChain
import cats._
import cats.implicits._
import com.github.mercurievv.jobsearch.Errorable
import com.github.mercurievv.jobsearch.model.Job
import com.github.mercurievv.jobsearch.remote.stackowerflow.{RssItemToJobConverter, StackowerflowJobsApi}
import com.github.mercurievv.jobsearch.remote.upwork.UpworkJobsApi
import simulacrum._
/**
* Created with IntelliJ IDEA.
* User: <NAME>
* Date: 3/19/2020
* Time: 10:39 PM
* Contacts: email: <EMAIL> Skype: 'grobokopytoff' or 'mercurievv'
*/
import enumeratum._
sealed trait JobsServer[F[_], S[_]] /*extends EnumEntry*/ {
def getJobsFromServer: F[S[Errorable[Job]]]
}
object JobsServer /*extends Enum[JobsServer[*, *]]*/ {
// val values = findValues
final class StackOwerflow[F[_] : Monad, S[_]](api: StackowerflowJobsApi[F], seqToS: Seq ~> S) extends JobsServer[F, S] {
override def getJobsFromServer: F[S[Errorable[Job]]] = api.getJobsRss.map(RssItemToJobConverter(_)).map(seqToS(_))
}
final class Upwork[F[_] : Monad, S[_]](api: UpworkJobsApi[F], seqToS: Seq ~> S) extends JobsServer[F, S] {
override def getJobsFromServer: F[S[Errorable[Job]]] = api.getJobsRss.map(RssItemToJobConverter(_)).map(seqToS(_))
}
}
|
MercurieVV/jobs-crawler
|
core/src/main/scala/com/github/mercurievv/jobsearch/persistence/DynamodbJobsStorage.scala
|
package com.github.mercurievv.jobsearch.persistence
import java.time.{ZoneId, ZonedDateTime}
import cats.effect.Async
import cats.kernel.Eq
import com.amazonaws.services.dynamodbv2.AmazonDynamoDBAsync
import com.github.mercurievv.jobsearch.businesslogic.JobsStorage
import com.github.mercurievv.jobsearch.model.{Job, JobsResource}
import fs2._
import org.scanamo._
import org.scanamo.generic.auto._
/**
* Created with IntelliJ IDEA.
* User: <NAME>
* Date: 3/12/2020
* Time: 4:19 PM
* Contacts: email: <EMAIL> Skype: 'grobokopytoff' or 'mercurievv'
*/
class DynamodbJobsStorage[F[_]: Async](client: AmazonDynamoDBAsync) extends JobsStorage[F, Stream[F, *]] {
private val scanamo: ScanamoCats[F] = ScanamoCats[F](client)
private val table = Table[Job]("Jobs")
import cats.implicits._
implicit val eeq: Eq[JobsResource] = Eq.by[JobsResource, String](_.toString)
override def saveJobsToDb(jobs: Stream[F, Job]): F[Unit] = {
val ops = for {
byJob <- jobs.groupAdjacentBy(_.jobsResource)
operation <- Stream.emit(saveJobsOps(byJob._1, byJob._2.toList.toSet))
save <- Stream.eval(scanamo.exec(operation))
} yield save
ops.compile.drain
}
private def saveJobsOps(jobsResource: JobsResource, jobs: Set[Job]) = {
for {
lastJob <- table
.filter("jobsResource" -> jobsResource)
.limit(1)
.scan
jobsIO = jobs.filter(
_.created.isAfter(
lastJob.lastOption
.flatMap(_.toOption)
.map(_.created)
.getOrElse(ZonedDateTime.of(2000, 1, 1, 1, 1, 1, 1, ZoneId.of("UTC")))))
_ <- table.putAll(jobsIO)
} yield ()
}
}
|
MercurieVV/jobs-crawler
|
project/BuildKeys.scala
|
<filename>project/BuildKeys.scala
import sbt.Def._
import sbt.{Def, settingKey}
object BuildKeys {
/**
* When this value is set, it means we want to test and publish a custom Scala.js
* version, therefore we shouldn't re-publish the JVM packages.
*/
lazy val customScalaJSVersion =
Option(System.getenv("SCALAJS_VERSION"))
/**
* Human readable project title.
*
* Examples:
*
* - Cats
* - Cats Effect
* - Monix
*/
lazy val projectTitle =
settingKey[String]("Human readable project title (e.g. 'Cats Effect', 'Monix', etc)")
/**
* Example: alexandru, monix, typelevel, etc.
*/
lazy val githubOwnerID =
settingKey[String]("GitHub owner ID (e.g. user_id, organization_id)")
/**
* Example: alexandru, monix, typelevel, etc.
*/
lazy val githubRelativeRepositoryID =
settingKey[String]("GitHub repository ID (e.g. project_name)")
/**
* Example: `alexandru/my-typelevel-library`
*/
lazy val githubFullRepositoryID =
Def.setting(
s"${githubOwnerID.value}/${githubOwnerID.value}"
)
/**
* Auto-detected by the build process.
*/
lazy val needsScalaMacroParadise =
settingKey[Boolean]("Needs Scala Macro Paradise")
}
|
MercurieVV/jobs-crawler
|
core/src/test/scala/com/github/mercurievv/jobsearch/xml/RssTest.scala
|
package com.github.mercurievv.jobsearch.xml
import com.github.mercurievv.rss.generated.XMLProtocol
import javax.xml.datatype.{DatatypeFactory, XMLGregorianCalendar}
import scalaxb.{DataTypeFactory, ElemName, Helper, XMLCalendar, XMLFormat}
import scala.xml.{Text, XML}
class RssTest extends org.scalatest.FunSuite {
test("decode rss") {
import CustomXmlProtocol._
scalaxb.fromXML[XMLGregorianCalendar]( new Text("Sat, 30 May 2020 11:37:12 Z"))
val rss = scalaxb.fromXML[com.github.mercurievv.rss.generated.Rss](XML.loadString(
"""<?xml version="1.0" encoding="utf-8"?>
| <rss xmlns:a10="http://www.w3.org/2005/Atom" version="2.0">
| <channel xmlns:os="http://a9.com/-/spec/opensearch/1.1/">
| <title>remote scala jobs - Stack Overflow</title>
| <link>https://stackoverflow.com/jobs</link>
| <description>remote scala jobs - Stack Overflow</description>
| 
| <os:totalResults>10</os:totalResults>
| <item>
| <guid isPermaLink="false">396789</guid>
| <link>https://stackoverflow.com/jobs/396789/backend-engineer-ascendify?a=294nRAfUVLos&so_medium=Talent&so_source=TalentApi</link>
| <a10:author>
| <a10:name>Ascendify</a10:name>
| </a10:author>
| <category>scala</category>
| <category>php</category>
| <category>java</category>
| <category>elasticsearch</category>
| <category>postgresql</category>
| <title>Backend Engineer at Ascendify () (allows remote)</title>
| <description></description>
| <pubDate>Sat, 30 May 2020 11:37:12 Z</pubDate>
| <a10:updated>2020-05-30T11:37:12Z</a10:updated>
| </item>
| <item>
| <title><![CDATA[Create Logo and Web Pages for New IT Service - Upwork]]></title>
| <link>
| https://www.upwork.com/jobs/Create-Logo-and-Web-Pages-for-New-Service_%7E0159da9cec51ec4459?source=rss
| </link>
| <description><![CDATA[]]></description>
| <content:encoded><![CDATA[]]></content:encoded>
| <pubDate>Sat, 06 Jun 2020 14:52:58 +0000</pubDate>
| <guid>
| https://www.upwork.com/jobs/Create-Logo-and-Web-Pages-for-New-Service_%7E0159da9cec51ec4459?source=rss
| </guid>
| </item>
|
|
|</channel></rss>
|""".stripMargin))
println(rss)
}
}
|
MercurieVV/jobs-crawler
|
core/src/main/scala/com/github/mercurievv/jobsearch/remote/stackowerflow/RssItemToJobConverter.scala
|
<gh_stars>0
package com.github.mercurievv.jobsearch.remote.stackowerflow
import java.time.ZonedDateTime
import cats.Monad
import cats.data.{Validated, ValidatedNec}
import cats.implicits._
import com.github.mercurievv.jobsearch._
import com.github.mercurievv.jobsearch.model._
import com.github.mercurievv.rss.generated.{Item, Rss}
import org.http4s.Uri
/**
* Created with IntelliJ IDEA.
* User: <NAME>
* Date: 3/10/2020
* Time: 11:49 AM
* Contacts: email: <EMAIL> Skype: 'grobokopytoff' or 'mercurievv'
*/
object RssItemToJobConverter{
def apply(rss: Rss): Seq[Errorable[Job]] = rss.channel.item.map(mapRssItemToJob)
def mapRssItemToJob(item : Item): Errorable[Job] = {
val ur: String => ValidatedNec[ParsingError, Uri] = (s: String) => Uri
.fromString(s)
.leftMap(new ParsingError(_))
.toValidatedNec
val value: ValidatedNec[ParsingError, Uri] = item
.link
.toValidNec(ParsingError("link"))
.andThen(ur)
val zdt: ValidatedNec[ParsingError, ZonedDateTime] = item
.pubDate
.toValidNec(ParsingError("pubDate"))
.andThen(gc => Validated.catchNonFatal(gc.toGregorianCalendar.toZonedDateTime).leftMap(new ParsingError(_)).toValidatedNec)
(
JobsResource.StackOverflow.validNec,
item.guid.toValidNec(ParsingError("guid")).map(_.value).map(Id(_)),
value,
item.title.toValidNec(ParsingError("title")),
item.description.toValidNec(ParsingError("description")),
item.category.map(_.value).map(Tag(_)).validNec,
item.author.orElse(Some("Unknown")).toValidNec(ParsingError("author")).map(Company(_)),
zdt
).mapN(Job)
}
case class ParsingError(message: String, cause: Option[Throwable] = None) extends Throwable(message){
def this(cause: Throwable) {
this(cause.getMessage, Some(cause))
}
}
}
|
MercurieVV/jobs-crawler
|
core/src/main/scala/com/github/mercurievv/jobsearch/Module.scala
|
<filename>core/src/main/scala/com/github/mercurievv/jobsearch/Module.scala
package com.github.mercurievv.jobsearch
import cats.{FunctorFilter, Monad, ~>}
import cats.effect.{Async, ConcurrentEffect}
import com.amazonaws.client.builder.AwsClientBuilder.EndpointConfiguration
import com.amazonaws.services.dynamodbv2.AmazonDynamoDBAsyncClientBuilder
import com.github.mercurievv.jobsearch
import com.github.mercurievv.jobsearch.businesslogic.{CollectJobs, JobsServer}
import com.github.mercurievv.jobsearch.model.Job
import com.github.mercurievv.jobsearch.persistence.DynamodbJobsStorage
import com.github.mercurievv.jobsearch.remote.stackowerflow.{RssItemToJobConverter, StackowerflowJobsApi}
import com.github.mercurievv.jobsearch.remote.upwork.UpworkJobsApi
import fs2.Stream
import org.http4s.client.Client
import org.http4s.client.middleware.{RequestLogger, ResponseLogger}
/**
* Created with IntelliJ IDEA.
* User: <NAME>
* Date: 3/13/2020
* Time: 4:14 PM
* Contacts: email: <EMAIL> Skype: 'grobokopytoff' or 'mercurievv'
*/
class Module[F[_]: Monad: Async, S[_]: Monad : FunctorFilter](
httpClient: Client[F],
seqToS: Seq ~> S,
sToFs2: S ~> Stream[F, *])(implicit ce: ConcurrentEffect[F]) {
private val client = RequestLogger(logHeaders = false, logBody = false)(ResponseLogger(logHeaders = false, logBody = true)(httpClient))
private val dbclient = {
val conf = new EndpointConfiguration("https://dynamodb.eu-west-1.amazonaws.com", "eu-west-1")
AmazonDynamoDBAsyncClientBuilder.standard().withEndpointConfiguration(conf).build()
}
private val dynDbStorage = new DynamodbJobsStorage[F](dbclient)
val saveJobs: S[Job] => F[Unit] = s => dynDbStorage.saveJobsToDb (sToFs2(s))
val collectJobs: CollectJobs[F, S] = new CollectJobs(saveJobs)
val jobsServers: List[JobsServer[F, S]] = List(
new JobsServer.StackOwerflow(new StackowerflowJobsApi[F](client), seqToS),
new JobsServer.Upwork(new UpworkJobsApi[F](client), seqToS),
)
}
|
MercurieVV/jobs-crawler
|
core/src/main/scala/com/github/mercurievv/jobsearch/AppHandler.scala
|
<reponame>MercurieVV/jobs-crawler
package com.github.mercurievv.jobsearch
/**
* Created with IntelliJ IDEA.
* User: <NAME>
* Date: 5/30/2020
* Time: 6:58 PM
* Contacts: email: <EMAIL> Skype: 'grobokopytoff' or 'mercurievv'
*/
import java.io.{InputStream, OutputStream}
import cats._
import cats.implicits._
import com.amazonaws.services.lambda.runtime.{Context, RequestStreamHandler}
import fs2._
import org.http4s.client.blaze.BlazeClientBuilder
import zio.blocking.Blocking
import zio.console.{Console, putStrLn}
import scala.concurrent.ExecutionContext.global
object AppHandler extends RequestStreamHandler {
import zio._
import zio.clock.Clock
import zio.interop.catz._
type AppEnvironment = Clock with Console with Blocking
override def handleRequest(input: InputStream, output: OutputStream, context: Context): Unit = {
println(
Runtime.default
.unsafeRunSync(run(Runtime.default)))
}
private val seqToList = λ[Seq ~> List](_.toList)
private val sToFs2: List ~> Stream[AIO, *] = λ[List ~> fs2.Stream[AIO, *]](fs2.Stream.emits(_))
def run[R <: Console](implicit runtime: Runtime[R]): ZIO[R, Nothing, Int] = {
BlazeClientBuilder[AIO](global).resource.toManagedZIO
.map(client => new Module[AIO, List](client, seqToList, sToFs2))
.use { module =>
module.collectJobs.collectJobsFromServers(module.jobsServers)
}
.foldM(
err => putStrLn(s"Execution failed with: $err").as(1),
_ => ZIO.succeed(0)
)
}
}
|
MercurieVV/jobs-crawler
|
core/src/main/scala/com/github/mercurievv/jobsearch/xml/CustomXmlProtocol.scala
|
<gh_stars>0
package com.github.mercurievv.jobsearch.xml
import java.text.DateFormat
import java.time.{LocalDateTime, OffsetDateTime, ZoneId, ZonedDateTime}
import java.time.format.DateTimeFormatter
import java.util.{Date, Locale}
import com.github.mercurievv.rss.generated.XMLProtocol
import javax.xml.datatype.{DatatypeFactory, XMLGregorianCalendar}
import scalaxb.{ElemName, Helper, XMLFormat}
import scala.util.Try
/**
* Created with IntelliJ IDEA.
* User: <NAME>
* Date: 6/2/2020
* Time: 1:31 AM
* Contacts: email: <EMAIL> Skype: 'grobokopytoff' or 'mercurievv'
*/
object CustomXmlProtocol extends XMLProtocol {
override implicit lazy val __CalendarXMLFormat: XMLFormat[XMLGregorianCalendar] = new XMLFormat[XMLGregorianCalendar] {
def reads(seq: scala.xml.NodeSeq, stack: List[ElemName]): Either[String, XMLGregorianCalendar] = try {
import java.text.SimpleDateFormat
//Sat, 30 May 2020 11:37:12 Z
//Sat, 06 Jun 2020 15:18:24 +0000
val FORMATER = "EEE, dd MMM yyyy HH:mm:ss 'Z'"
Try(ZonedDateTime.parse(seq.text, DateTimeFormatter.RFC_1123_DATE_TIME))
.orElse(Try(LocalDateTime.parse(seq.text, DateTimeFormatter.ofPattern(FORMATER, Locale.ENGLISH)).atZone(ZoneId.of("UTC"))))
.map(zdt => {
val date = Date.from(zdt.toInstant)
import java.util.GregorianCalendar
val gregory = new GregorianCalendar
gregory.setTime(date)
DatatypeFactory.newInstance().newXMLGregorianCalendar(gregory)
})
.toEither
.left
.map(_.getMessage)
}
def writes(obj: XMLGregorianCalendar, namespace: Option[String], elementLabel: Option[String],
scope: scala.xml.NamespaceBinding, typeAttribute: Boolean): scala.xml.NodeSeq =
Helper.stringToXML(obj.toXMLFormat, namespace, elementLabel, scope)
}
}
|
MercurieVV/jobs-crawler
|
core/src/main/scala/com/github/mercurievv/jobsearch/businesslogic/CollectJobs.scala
|
package com.github.mercurievv.jobsearch.businesslogic
import cats._
import cats.data.Validated.{Invalid, Valid}
import cats.implicits._
import com.github.mercurievv.jobsearch.model.Job
import org.slf4j.{Logger, LoggerFactory}
/**
* Created with IntelliJ IDEA.
* User: <NAME>
* Date: 3/19/2020
* Time: 9:52 PM
* Contacts: email: <EMAIL> Skype: 'grobokopytoff' or 'mercurievv'
*/
class CollectJobs[F[_]: Monad, S[_]: Monad: FunctorFilter](saveJobs: S[Job] => F[Unit]) {
private val log: Logger = LoggerFactory.getLogger(classOf[CollectJobs[Nothing, Nothing]])
def collectJobsFromServers(jobsServers: List[JobsServer[F, S]]): F[Unit] = {
jobsServers
.map(collectJobFromServer)
.sequence_
}
private def collectJobFromServer(jobsServer: JobsServer[F, S]) =
for {
jobsErr <- jobsServer.getJobsFromServer
jobs = jobsErr.mapFilter {
case Valid(job) => Some(job)
case Invalid(errors) =>
log.error(errors.toString)
None
}
_ <- saveJobs(jobs)
} yield ()
}
|
MercurieVV/jobs-crawler
|
core/src/main/scala/com/github/mercurievv/jobsearch/model/JobsResource.scala
|
<gh_stars>0
package com.github.mercurievv.jobsearch.model
/**
* Created with IntelliJ IDEA.
* User: <NAME>
* Date: 3/12/2020
* Time: 2:29 PM
* Contacts: email: <EMAIL> Skype: 'grobokopytoff' or 'mercurievv'
*/
import enumeratum._
sealed trait JobsResource extends EnumEntry {}
object JobsResource extends Enum[JobsResource] {
val values = findValues
case object StackOverflow extends JobsResource
case object Upwork extends JobsResource
}
|
MercurieVV/jobs-crawler
|
core/src/main/scala/com/github/mercurievv/jobsearch/remote/stackowerflow/StackowerflowJobsApi.scala
|
package com.github.mercurievv.jobsearch.remote.stackowerflow
import cats.effect._
import com.github.mercurievv.http4s.coders.scalaxb.Http4sScalaxbEncodersDecoders._
import com.github.mercurievv.jobsearch.xml.CustomXmlProtocol._
import com.github.mercurievv.rss.generated.Rss
import org.http4s.client.Client
import org.http4s.implicits._
/**
* Created with IntelliJ IDEA.
* User: <NAME>
* Date: 3/4/2020
* Time: 2:21 PM
* Contacts: email: <EMAIL> Skype: 'grobokopytoff' or 'mercurievv'
*/
class StackowerflowJobsApi[F[_]](httpClient: Client[F])(
implicit
C: ConcurrentEffect[F]) {
def getJobsRss: F[Rss] = {
val target = uri"https://stackoverflow.com/jobs/feed?q=scala&r=true"
httpClient.expect[Rss](target)
}
}
|
MercurieVV/jobs-crawler
|
core/src/main/scala/com/github/mercurievv/jobsearch/remote/upwork/UpworkJobsApi.scala
|
package com.github.mercurievv.jobsearch.remote.upwork
import cats.effect._
import com.github.mercurievv.http4s.coders.scalaxb.Http4sScalaxbEncodersDecoders._
import com.github.mercurievv.jobsearch.xml.CustomXmlProtocol._
import com.github.mercurievv.rss.generated.Rss
import org.http4s.client.Client
import org.http4s.implicits._
/**
* Created with IntelliJ IDEA.
* User: <NAME>
* Date: 6/6/2020
* Time: 8:24 AM
* Contacts: email: <EMAIL> Skype: 'grobokopytoff' or 'mercurievv'
*/
class UpworkJobsApi [F[_]](httpClient: Client[F])(
implicit
C: ConcurrentEffect[F]) {
def getJobsRss: F[Rss] = {
val target = uri"https://www.upwork.com/ab/feed/jobs/rss?and_terms=scala&exclude_terms=kotlin+java+javascript+python+php+ruby&sort=recency&paging=0%3B10&api_params=1&q=scala+AND+NOT+%28kotlin+OR+java+OR+javascript+OR+python+OR+php+OR+ruby%29"
httpClient.expect[Rss](target)
}
}
|
MercurieVV/jobs-crawler
|
project/plugins.sbt
|
addSbtPlugin("com.github.tkawachi" % "sbt-doctest" % "0.9.6")
addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.7.0")
addSbtPlugin("de.heikoseeberger" % "sbt-header" % "5.6.0")
addSbtPlugin("com.dwijnand" % "sbt-dynver" % "4.0.0")
addSbtPlugin("com.typesafe.sbt" % "sbt-git" % "1.0.0")
addSbtPlugin("io.github.davidgregory084" % "sbt-tpolecat" % "0.1.12")
addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.0")
addSbtPlugin("org.wartremover" % "sbt-wartremover" % "2.4.9")
addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.16")
addSbtPlugin("net.vonbuchholtz" % "sbt-dependency-check" % "2.0.0")
addSbtPlugin("com.timushev.sbt" % "sbt-updates" % "0.5.1")
addSbtPlugin("io.stryker-mutator" % "sbt-stryker4s" % "0.8.1")
addSbtPlugin("org.scalaxb" % "sbt-scalaxb" % "1.7.3")
addSbtPlugin("com.gilt.sbt" % "sbt-aws-lambda" % "0.7.0")
addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.6.1")
addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.3.7")
resolvers += Resolver.sonatypeRepo("public")
|
MercurieVV/jobs-crawler
|
core/src/main/scala/com/github/mercurievv/jobsearch/package.scala
|
package com.github.mercurievv
import cats.data.{NonEmptyChain, Validated}
import zio.Task
/**
* Created with IntelliJ IDEA.
* User: <NAME>
* Date: 3/19/2020
* Time: 6:34 PM
* Contacts: email: <EMAIL> Skype: 'grobokopytoff' or 'mercurievv'
*/
package object jobsearch {
type AIO[+A] = Task[A]
type Errorable[A] = Validated[NonEmptyChain[Throwable], A]
}
|
MercurieVV/jobs-crawler
|
core/src/main/scala/com/github/mercurievv/jobsearch/persistence/package.scala
|
<filename>core/src/main/scala/com/github/mercurievv/jobsearch/persistence/package.scala
package com.github.mercurievv.jobsearch
import com.github.mercurievv.jobsearch.model.{Company, Id, JobsResource, Tag}
import enumeratum.NoSuchMember
import org.http4s.Uri
import org.scanamo.DynamoFormat
/**
* Created with IntelliJ IDEA.
* User: <NAME>
* Date: 3/13/2020
* Time: 11:50 AM
* Contacts: email: <EMAIL> Skype: 'grobokopytoff' or 'mercurievv'
*/
package object persistence {
implicit val jobsResourceFormat: DynamoFormat[JobsResource] =
DynamoFormat.coercedXmap[JobsResource, String, NoSuchMember[JobsResource]](JobsResource.withName)(_.entryName)
implicit val urlFormat: DynamoFormat[Uri] =
DynamoFormat.coercedXmap[Uri, String, RuntimeException](Uri.unsafeFromString)(_.renderString)
implicit val idFormat: DynamoFormat[Id] = DynamoFormat.iso[Id, String](Id(_), id => id)
implicit val taggFormat: DynamoFormat[Tag] = DynamoFormat.iso[Tag, String](Tag(_), id => id)
implicit val companyFormat: DynamoFormat[Company] = DynamoFormat.iso[Company, String](Company(_), id => id)
}
|
Liuxg16/BrainMatrix
|
scala-package/core/src/test/scala/ml/dmlc/mxnet/train/ConvSuite.scala
|
<gh_stars>100-1000
package ml.dmlc.mxnet.train
import ml.dmlc.mxnet.optimizer.SGD
import ml.dmlc.mxnet._
import org.scalatest.{BeforeAndAfterAll, FunSuite}
import org.slf4j.LoggerFactory
import scala.collection.mutable.ListBuffer
import scala.sys.process._
class ConvSuite extends FunSuite with BeforeAndAfterAll {
private val logger = LoggerFactory.getLogger(classOf[ConvSuite])
test("train mnist") {
// symbol net
val batchSize = 100
val data = Symbol.Variable("data")
val conv1 = Symbol.Convolution(name = "conv1")(Map("data" -> data, "num_filter" -> 32,
"kernel" -> (3, 3), "stride" -> (2, 2)))
val bn1 = Symbol.BatchNorm(name = "bn1")(Map("data" -> conv1))
val act1 = Symbol.Activation(name = "relu1")(Map("data" -> bn1, "act_type" -> "relu"))
val mp1 = Symbol.Pooling(name = "mp1")(Map("data" -> act1, "kernel" -> (2, 2),
"stride" -> (2, 2), "pool_type" -> "max"))
val conv2 = Symbol.Convolution(name = "conv2")(Map("data" -> mp1, "num_filter" -> 32,
"kernel" -> (3, 3), "stride" -> (2, 2)))
val bn2 = Symbol.BatchNorm(name = "bn2")(Map("data" -> conv2))
val act2 = Symbol.Activation(name = "relu2")(Map("data" -> bn2, "act_type" -> "relu"))
val mp2 = Symbol.Pooling(name = "mp2")(Map("data" -> act2, "kernel" -> (2, 2),
"stride" -> (2, 2), "pool_type" -> "max"))
val fl = Symbol.Flatten(name = "flatten")(Map("data" -> mp2))
val fc2 = Symbol.FullyConnected(name = "fc2")(Map("data" -> fl, "num_hidden" -> 10))
val softmax = Symbol.SoftmaxOutput(name = "sm")(Map("data" -> fc2))
// get data
"./scripts/get_mnist_data.sh" !
val trainDataIter = IO.MNISTIter(Map(
"image" -> "data/train-images-idx3-ubyte",
"label" -> "data/train-labels-idx1-ubyte",
"data_shape" -> "(1, 28, 28)",
"label_name" -> "sm_label",
"batch_size" -> batchSize.toString,
"shuffle" -> "1",
"flat" -> "0",
"silent" -> "0",
"seed" -> "10"))
val valDataIter = IO.MNISTIter(Map(
"image" -> "data/t10k-images-idx3-ubyte",
"label" -> "data/t10k-labels-idx1-ubyte",
"data_shape" -> "(1, 28, 28)",
"label_name" -> "sm_label",
"batch_size" -> batchSize.toString,
"shuffle" -> "1",
"flat" -> "0", "silent" -> "0"))
val model = FeedForward.newBuilder(softmax)
.setContext(Context.cpu())
.setNumEpoch(1)
.setOptimizer(new SGD(learningRate = 0.1f, momentum = 0.9f, wd = 0.0001f))
.setTrainData(trainDataIter)
.setEvalData(valDataIter)
.build()
logger.info("Finish fit ...")
val probArrays = model.predict(valDataIter)
assert(probArrays.length === 1)
val prob = probArrays(0)
logger.info("Finish predict ...")
valDataIter.reset()
val labels = ListBuffer.empty[NDArray]
while (valDataIter.hasNext) {
val evalData = valDataIter.next()
labels += evalData.label(0).copy()
}
val y = NDArray.concatenate(labels)
val py = NDArray.argmaxChannel(prob)
assert(y.shape === py.shape)
var numCorrect = 0
var numInst = 0
for ((labelElem, predElem) <- y.toArray zip py.toArray) {
if (labelElem == predElem) {
numCorrect += 1
}
numInst += 1
}
val acc = numCorrect.toFloat / numInst
logger.info(s"Final accuracy = $acc")
assert(acc > 0.96)
}
}
|
Liuxg16/BrainMatrix
|
scalakernel/src/main/java/thu/brainmatrix/util/CVTool.scala
|
<reponame>Liuxg16/BrainMatrix
package thu.brainmatrix.util
import thu.brainmatrix.NDArray
import com.sksamuel.scrimage.Image
import com.sksamuel.scrimage.Pixel
import com.sksamuel.scrimage.filter.GaussianBlurFilter
import com.sksamuel.scrimage.nio.JpegWriter
object CVTool {
def saveImage(img: NDArray, filename: String, radius: Int): Unit = {
val out = postprocessImage(img)
val gauss = GaussianBlurFilter(radius).op
val result = Image(out.width, out.height)
gauss.filter(out.awt, result.awt)
out.output(filename)(JpegWriter())
}
// can process (n,1,-1,-1) ndarray
def saveFlattenImage(img: NDArray, filename: String): Unit = {
val shape = img.shape
assert(shape.length == 4)
val (n, c, h, w) = (shape(0), shape(1), shape(2), shape(3))
val spatialSize = n*c*h*w
val totals = h * w
img *= 255
val row, col = Math.sqrt(n).toInt
val imgs_rows = (0 until row) map{r =>
val imgs_row = (0 until col) map{c =>
img.slice(r*col+c).reshape(Array(h,w))
}
val temp = NDArray.transpose(NDArray.concatenate(imgs_row:_*))
imgs_row.foreach {_.dispose()}
temp
}
val imgs_nda = NDArray.transpose(NDArray.concatenate(imgs_rows:_*))
imgs_rows.foreach {_.dispose()}
val out = process2DImage(imgs_nda)
imgs_nda.dispose()
out.output(filename)(JpegWriter())
// val lineArrs = rawData.grouped(col * c * totals)
// for (line <- lineArrs) {
// val imgArr = line.grouped(c * totals)
// for(arr <- imgArr)
// src.add(getImg(arr, c, h, w, flip))
//
// }
//
// val pixels = for (i <- 0 until spatialSize)
// yield Pixel(r(i).toInt, g(i).toInt, b(i).toInt, 255)
// Image(img.shape(3), img.shape(2), pixels.toArray)
//
//
// val out = postprocessImage(img)
// val gauss = GaussianBlurFilter(radius).op
// val result = Image(out.width, out.height)
// gauss.filter(out.awt, result.awt)
// out.output(filename)(JpegWriter())
}
def saveGrayImage(img: NDArray, filename: String): Unit = {
val out = processGrayImage(img)
out.output(filename)(JpegWriter())
}
def saveRGBImage(img: NDArray, filename: String): Unit = {
val out = postprocessImage(img)
out.output(filename)(JpegWriter())
}
/**
* @author :<NAME>
* @date
* @brief process:ndarray to image whose shape matches (3,m,n)
* @param
* @return
* @example
* @note
*/
def postprocessImage(img: NDArray): Image = {
val datas = img.toArray
val spatialSize = img.shape(2) * img.shape(3)
val r = clip(datas.take(spatialSize).map(_ + 123.68f))
val g = clip(datas.drop(spatialSize).take(spatialSize).map(_ + 116.779f))
val b = clip(datas.takeRight(spatialSize).map(_ + 103.939f))
val pixels = for (i <- 0 until spatialSize)
yield Pixel(r(i).toInt, g(i).toInt, b(i).toInt, 255)
Image(img.shape(3), img.shape(2), pixels.toArray)
}
/**
* @author
* @date
* @brief process:ndarray to Gray image
* @param
* @return
* @example
* @note
*/
def processGrayImage(img: NDArray): Image = {
val datas = img.toArray
val spatialSize = img.shape(2) * img.shape(3)
val r = clip(datas.take(spatialSize))
val g = clip(datas.take(spatialSize))
val b = clip(datas.take(spatialSize))
val pixels = for (i <- 0 until spatialSize)
yield Pixel(r(i).toInt, g(i).toInt, b(i).toInt, 255)
Image(img.shape(3), img.shape(2), pixels.toArray)
}
/**
* @author
* @date
* @brief process:ndarray to Gray image
* @param
* @return
* @example
* @note
*/
def process2DImage(img: NDArray): Image = {
val datas = img.toArray
val spatialSize = img.shape(0) * img.shape(1)
val r = clip(datas.take(spatialSize))
val g = clip(datas.take(spatialSize))
val b = clip(datas.take(spatialSize))
val pixels = for (i <- 0 until spatialSize)
yield Pixel(r(i).toInt, g(i).toInt, b(i).toInt, 255)
Image(img.shape(0), img.shape(1), pixels.toArray)
}
/**
* @author
* @date
* @brief process:make all the element in param between [0,255]
* @param Array[Float]
* @return
* @example
* @note
*/
private def clip(array: Array[Float]): Array[Float] = array.map { a =>
if (a < 0) 0f
else if (a > 255) 255f
else a
}
}
|
Liuxg16/BrainMatrix
|
scalakernel/src/main/java/thu/brainmatrix/AttrScope.scala
|
<filename>scalakernel/src/main/java/thu/brainmatrix/AttrScope.scala
package thu.brainmatrix
/**
* Attribute manager for scoping.
* User can also inherit this object to change naming behavior.
* @author <NAME>
*/
class AttrScope(attr: Map[String, String] = Map.empty) {
private var _attr = attr
/**
* Get the attribute dict given the attribute set by the symbol.
* @param userDefinedAttr The attribute passed in by user during symbol creation.
* @return Updated attributes to add other scope related attributes.
*/
def get(userDefinedAttr: Option[Map[String, String]]): Map[String, String] = {
_attr ++ userDefinedAttr.getOrElse(Map.empty[String, String])
}
def withScope[T](body: => T): T = {
val oldAttrScope = AttrScope.current
this._attr = AttrScope.current._attr ++ this._attr
AttrScope.setCurrentAttr(this)
try {
body
} finally {
AttrScope.setCurrentAttr(oldAttrScope)
}
}
}
object AttrScope {
private var _current = new AttrScope()
def current: AttrScope = _current
private def setCurrentAttr(attr: AttrScope): Unit = {
_current = attr
}
def apply(attr: Map[String, String] = Map.empty): AttrScope = new AttrScope(attr)
}
|
Liuxg16/BrainMatrix
|
scalakernel/src/main/java/thu/brainmatrix/util/CodeTrick.scala
|
package thu.brainmatrix.util
object CodeTrick {
def main(args:Array[String]){
ArrayFillTest
}
def ArrayFillTest{
val arr = Array.fill[Float](5)(3f)
(arr).foreach(println)
}
}
|
Liuxg16/BrainMatrix
|
scalakernel/src/main/java/thu/brainmatrix/synapse/Dendrite.scala
|
<filename>scalakernel/src/main/java/thu/brainmatrix/synapse/Dendrite.scala
package thu.brainmatrix.synapse
import thu.brainmatrix.NDArray
import thu.brainmatrix.Context
import thu.brainmatrix.Shape
class Dendrite(val ctx: Context = Context.defaultCtx) extends Module{
val onenda = NDArray.ones(Config.SHAPE,ctx)
var currentinput = NDArray.zeros(Config.SHAPE,ctx);
override var variable_table = Array[String]("postVm")
override var variableindices = Array(-1)
//connectivity
var synapses = Vector[Synapse]();
// parameters
var gK :NDArray = onenda //
var Vk :NDArray = onenda * -70f; // reversal potential for K channel
var Cm :NDArray = onenda * 10; // membran capacitance
// variables
var postVm = onenda * -70f;
def set(gK:NDArray, Vk:NDArray,Cm:NDArray,postVm:NDArray){
this.gK = gK;
this.Vk = Vk;
this.Cm = Cm;
this.postVm = postVm;
}
def getSynapses(idx:Int) :Synapse = {
return synapses(idx);
}
def addSynapse(s:Synapse){
s.dendrite = this
synapses = synapses.:+(s)
}
override def getInitial():Array[NDArray] = {
Array(this.postVm)
}
override def update(t: NDArray,y:Array[NDArray],yDot:Array[NDArray],indices:Array[Int]):Array[NDArray] = {
this.postVm = y(indices(0));
var postI = onenda
if(this.currentinput!=0){
postI = this.currentinput;
}
var tEPSC = NDArray.zeros(Config.SHAPE,ctx);
for(i<- 0 until this.synapses.length){
tEPSC += this.synapses(i).EPSC;
}
val d_postVm = - (tEPSC+postI+this.gK*(this.postVm-this.Vk))/this.Cm;
yDot(indices(0)) = d_postVm;
yDot
}
}
|
Liuxg16/BrainMatrix
|
scalakernel/src/main/java/thu/brainmatrix/synapse_symbol/Synapse.scala
|
<reponame>Liuxg16/BrainMatrix
package thu.brainmatrix.synapse_symbol
import thu.brainmatrix.NDArray
import thu.brainmatrix.Symbol
import thu.brainmatrix.Context
import thu.brainmatrix.Shape
class Synapse(val ctx: Context = Context.defaultCtx,val name:String) extends Module{
// a basic synapse
override var variable_table = Array[String]("preCa","preCaBuff","aPreCDK","preSensor","aPreTrkB","preNR2B","preAbeta","preMg",
"postCa","postCaBuff","aPostCN","aPostTrkB","qNMDAR")
override var variableindices = Array.fill[Int](this.variable_table.length)(-1)
var BDNF :Symbol = null
var tmp :Symbol = null
override def getSymbol() = this.tmp
// connectivity
var axon:Axon = null
var dendrite:Dendrite = null
var Vs = Symbol.CreateVariable(s"Vs_$name") // threshold for the activation of VGCC
var gK = Symbol.CreateVariable(s"gK_$name") //
var Vk = Symbol.CreateVariable(s"Vk_$name") // reversal potential for K channel
var Vr = Symbol.CreateVariable(s"Vr_$name") // resting Vm
var Ve = Symbol.CreateVariable(s"Ve_$name") // reversal potential for AMPA channel
var Vca = Symbol.CreateVariable(s"Vca_$name") // reversal potential for Ca flux though NMDAR
var Cm = Symbol.CreateVariable(s"Cm_$name") // membran capacitance
var Mgo = Symbol.CreateVariable(s"Mgo_$name") // [Mg]o = 1.2 mM
var kMgNMDAR = Symbol.CreateVariable(s"kMgNMDAR_$name") // NMDAR Mg block
var kNMDARIn = Symbol.CreateVariable(s"kNMDARIn_$name") // constitutive insertion of NMDAR
var kNMDARCa = Symbol.CreateVariable(s"kNMDARCa_$name") // converting Inmda to Ca flux
var CaBasal = Symbol.CreateVariable(s"CaBasal_$name") // basal Ca influx
var Mg50 = Symbol.CreateVariable(s"Mg50_$name") // 50//
var MgSlope = Symbol.CreateVariable(s"MgSlope_$name")
// sensor
var aCDK50 = Symbol.CreateVariable(s"aCDK50_$name")
var aCDKSlope = Symbol.CreateVariable(s"aCDKSlope_$name")
//presynaptic constants
var preVol = Symbol.CreateVariable(s"preVol_$name") // volume of presynaptic terminals
var a1 = Symbol.CreateVariable(s"a1_$name") // k+ of Ca buffer
var b1 = Symbol.CreateVariable(s"b1_$name") // k- of Ca buffer
var tPreCaBuffer = Symbol.CreateVariable(s"tPreCaBuffer_$name")// presynaptic Ca buffer
var kSensorDeg = Symbol.CreateVariable(s"kSensorDeg_$name") // rate of sensor degradation
var a2 = Symbol.CreateVariable(s"a2_$name") // k+ of CDK
var b2 = Symbol.CreateVariable(s"b2_$name") // k- of CDK
var aCDKNR2B50 = Symbol.CreateVariable(s"aCDKNR2B50_$name")
var aCDKNR2BSlope = Symbol.CreateVariable(s"aCDKNR2BSlope_$name")
var a4 = Symbol.CreateVariable(s"a4_$name") // k+ of pre TrkB
var b4 = Symbol.CreateVariable(s"b4_$name") // k- of pre TrkB
var aPreTrkB50 = Symbol.CreateVariable(s"aPreTrkB50_$name")
var aPreTrkBSlope = Symbol.CreateVariable(s"aPreTrkBSlope_$name")
var kpreNR2BIn = Symbol.CreateVariable(s"kpreNR2BIn_$name")
var kpreAbetaDeg = Symbol.CreateVariable(s"kpreAbetaDeg_$name")
var kMgIn = Symbol.CreateVariable(s"kMgIn_$name") // constitive Mg influx (TRPM7...)
var kpreMgOut = Symbol.CreateVariable(s"kpreMgOut_$name") // kMgIn/kMgOut = 0.5
var kBDNFMg = Symbol.CreateVariable(s"kBDNFMg_$name") // influence of BDNF to Mg influx
//postsynaptic constants
var postVol = Symbol.CreateVariable(s"postVol_$name") // volume of postsynaptic spine
var qAMPA = Symbol.CreateVariable(s"qAMPA_$name") // AMPAR
var kpostCaOut = Symbol.CreateVariable(s"kpostCaOut_$name") // Ca extrusion from spine
var CN50 = Symbol.CreateVariable(s"CN50_$name")
var CNSlope = Symbol.CreateVariable(s"CNSlope_$name")
var TrkB50 = Symbol.CreateVariable(s"TrkB50_$name")
var TrkBSlope = Symbol.CreateVariable(s"TrkBSlope_$name")
var a5 = Symbol.CreateVariable(s"a5_$name") // k+ of Ca buffer
var b5 = Symbol.CreateVariable(s"b5_$name") // k- of Ca buffer
var tPostCaBuffer = Symbol.CreateVariable(s"tPostCaBuffer_$name")
var a6 = Symbol.CreateVariable(s"a6_$name") // k+ of CN activation
var b6 = Symbol.CreateVariable(s"b6_$name") // k- of CN
var a7 = Symbol.CreateVariable(s"a7_$name") // TrkB activation
var b7 = Symbol.CreateVariable(s"b7_$name") // TrkB deactivation
//presynaptic variables
var preCa :Symbol= Symbol.CreateVariable(s"preCa_$name") // presynaptic [Ca]i
var preCaBuff :Symbol= Symbol.CreateVariable(s"preCaBuff_$name")// presynaptic [Mg]i
var aPreCDK :Symbol= Symbol.CreateVariable(s"aPreCDK_$name")
var preSensor :Symbol= Symbol.CreateVariable(s"preSensor_$name") // presynaptic [sensor]
var aPreTrkB :Symbol= Symbol.CreateVariable(s"aPreTrkB_$name")
var preNR2B :Symbol= Symbol.CreateVariable(s"preNR2B_$name")
var preAbeta :Symbol= Symbol.CreateVariable(s"preAbeta_$name")
var preMg :Symbol= Symbol.CreateVariable(s"preMg_$name")
:Symbol
var postCa :Symbol= Symbol.CreateVariable(s"postCa_$name")
var postCaBuff :Symbol= Symbol.CreateVariable(s"postCaBuff_$name")
var aPostCN :Symbol= Symbol.CreateVariable(s"aPostCN_$name")
var aPostTrkB :Symbol= Symbol.CreateVariable(s"aPostTrkB_$name")
var qNMDAR :Symbol= Symbol.CreateVariable(s"qNMDAR_$name") // qNMDAR
// other variables for communication between compartments
var EPSC = Config.zero_s
val onenda = NDArray.ones(Config.SHAPE,ctx)
// parameters
// common constants
var Vs_nda :NDArray = onenda * -50f // threshold for the activation of VGCC
var gK_nda :NDArray = onenda * 1; //
var Vk_nda :NDArray = onenda * -70; // reversal potential for K channel
var Vr_nda :NDArray = onenda * -70; // resting Vm
var Ve_nda :NDArray = onenda * 0; // reversal potential for AMPA channel
var Vca_nda :NDArray = onenda * 30; // reversal potential for Ca flux though NMDAR
var Cm_nda :NDArray = onenda * 10; // membran capacitance
var Mgo_nda :NDArray = onenda * 1.2f; // [Mg]o = 1.2 mM
var kMgNMDAR_nda :NDArray = onenda * 4.5f; // NMDAR Mg block
var kNMDARIn_nda :NDArray = onenda * 0.00004f; // constitutive insertion of NMDAR
var kNMDARCa_nda :NDArray = onenda * -0.2f; // converting Inmda to Ca flux
var CaBasal_nda :NDArray = onenda * 0.001f; // basal Ca influx
// magnesium
var Mg50_nda :NDArray = onenda * 400f; // 50//
var MgSlope_nda :NDArray = onenda * 50f;
// sensor
var aCDK50_nda :NDArray = onenda * 0.5f;
var aCDKSlope_nda :NDArray = onenda * 0.1f;
//presynaptic constants
var preVol_nda :NDArray = onenda // volume of presynaptic terminals
var a1_nda :NDArray = onenda * 0.02f; // k+ of Ca buffer
var b1_nda :NDArray = onenda * 0.0001f; // k- of Ca buffer
var tPreCaBuffer_nda :NDArray = onenda * 5f; // presynaptic Ca buffer
var kSensorDeg_nda :NDArray = onenda * 0.0001f; // rate of sensor degradation
var a2_nda :NDArray = onenda * 0.01f // k+ of CDK
var b2_nda :NDArray = onenda * 0.0001f; // k- of CDK
var aCDKNR2B50_nda :NDArray = onenda * 0.7f;
var aCDKNR2BSlope_nda :NDArray = onenda * 0.3f;
var a4_nda :NDArray = onenda * 0.00025f; // k+ of pre TrkB
var b4_nda :NDArray = onenda * 0.0002f; // k- of pre TrkB
var aPreTrkB50_nda :NDArray = onenda * 0.4f;
var aPreTrkBSlope_nda :NDArray = onenda * 0.1f;
var kpreNR2BIn_nda :NDArray = onenda * 0.000025f;
var kpreAbetaDeg_nda :NDArray = onenda * 0.0001f;
var kMgIn_nda :NDArray = onenda * 0.004f; // constitive Mg influx (TRPM7...)
var kpreMgOut_nda :NDArray = onenda * 0.00002f; // kMgIn/kMgOut = 0.5
var kBDNFMg_nda :NDArray = onenda * 0.04f; // influence of BDNF to Mg influx
//postsynaptic constants
var postVol_nda :NDArray = onenda * 1f; // volume of postsynaptic spine
var qAMPA_nda :NDArray = onenda * 0.2f; // AMPAR
var kpostCaOut_nda :NDArray = onenda * 0.1f; // Ca extrusion from spine
var CN50_nda :NDArray = onenda * 0.55f;
var CNSlope_nda :NDArray = onenda * 0.1f;
var TrkB50_nda :NDArray = onenda * 0.4f;
var TrkBSlope_nda :NDArray = onenda * 0.1f;
var a5_nda :NDArray = onenda * 0.005f; // k+ of Ca buffer
var b5_nda :NDArray = onenda * 0.001f; // k- of Ca buffer
var tPostCaBuffer_nda :NDArray = onenda * 1f;
var a6_nda :NDArray = onenda * 0.005f; // k+ of CN activation
var b6_nda :NDArray = onenda * 0.001f; // k- of CN
var a7_nda :NDArray = onenda * 0.0003f; // TrkB activation
var b7_nda :NDArray = onenda * 0.0002f; // TrkB deactivation
//presynaptic variables
var preCa_nda :NDArray = onenda * 0; // presynaptic [Ca]i
var preCaBuff_nda :NDArray = onenda * 5; // presynaptic [Mg]i
var aPreCDK_nda :NDArray = onenda * 0;
var preSensor_nda :NDArray = onenda * 0.7f; // presynaptic [sensor]
var aPreTrkB_nda :NDArray = onenda * 0;
var preNR2B_nda :NDArray = onenda * 1;
var preAbeta_nda :NDArray = onenda * 1;
var preMg_nda :NDArray = onenda * 400;
//postsynaptic variables
var postCa_nda :NDArray = onenda * 0f;
var postCaBuff_nda :NDArray = onenda * 1f;
var aPostCN_nda :NDArray = onenda * 0f;
var aPostTrkB_nda :NDArray = onenda * 0f;
var qNMDAR_nda :NDArray = onenda * 1f; // qNMDAR
// other variables for communication between compartments
var EPSC_nda :NDArray = onenda * 0f;
//initial y
var y_preCa_nda :NDArray = onenda * 0; // presynaptic [Ca]i
var y_preCaBuff_nda :NDArray = onenda * 5; // presynaptic [Mg]i
var y_aPreCDK_nda :NDArray = onenda * 0;
var y_preSensor_nda :NDArray = onenda * 0.7f; // presynaptic [sensor]
var y_aPreTrkB_nda :NDArray = onenda * 0;
var y_preNR2B_nda :NDArray = onenda * 1;
var y_preAbeta_nda :NDArray = onenda * 1;
var y_preMg_nda :NDArray = onenda * 400;
//postsynaptic variables
var y_postCa_nda :NDArray = onenda * 0f;
var y_postCaBuff_nda :NDArray = onenda * 1f;
var y_aPostCN_nda :NDArray = onenda * 0f;
var y_aPostTrkB_nda :NDArray = onenda * 0f;
var y_qNMDAR_nda :NDArray = onenda * 1f; // qNMDAR
override def getSymbolMap():Map[String,NDArray] = {
Map (s"Vs_$name" -> Vs_nda ,
s"gK_$name" -> gK_nda ,
s"Vk_$name" -> Vk_nda ,
s"Vr_$name" -> Vr_nda ,
s"Ve_$name" -> Ve_nda ,
s"Vca_$name" -> Vca_nda ,
s"Cm_$name" -> Cm_nda ,
s"Mgo_$name" -> Mgo_nda ,
s"kMgNMDAR_$name" -> kMgNMDAR_nda,
s"kNMDARIn_$name" -> kNMDARIn_nda,
s"kNMDARCa_$name" -> kNMDARCa_nda,
s"CaBasal_$name" -> CaBasal_nda ,
s"Mg50_$name" -> Mg50_nda ,
s"MgSlope_$name" -> MgSlope_nda ,
s"aCDK50_$name" -> aCDK50_nda ,
s"aCDKSlope_$name" -> aCDKSlope_nda ,
s"preVol_$name" -> preVol_nda ,
s"a1_$name" -> a1_nda ,
s"b1_$name" -> b1_nda ,
s"tPreCaBuffer_$name" -> tPreCaBuffer_nda ,
s"kSensorDeg_$name" -> kSensorDeg_nda ,
s"a2_$name" -> a2_nda ,
s"b2_$name" -> b2_nda ,
s"aCDKNR2B50_$name" -> aCDKNR2B50_nda ,
s"aCDKNR2BSlope_$name" -> aCDKNR2BSlope_nda ,
s"a4_$name" -> a4_nda ,
s"b4_$name" -> b4_nda ,
s"aPreTrkB50_$name" -> aPreTrkB50_nda ,
s"aPreTrkBSlope_$name" -> aPreTrkBSlope_nda ,
s"kpreNR2BIn_$name" -> kpreNR2BIn_nda ,
s"kpreAbetaDeg_$name" -> kpreAbetaDeg_nda ,
s"kMgIn_$name" -> kMgIn_nda ,
s"kpreMgOut_$name" -> kpreMgOut_nda ,
s"kBDNFMg_$name" -> kBDNFMg_nda ,
s"postVol_$name" -> postVol_nda ,
s"qAMPA_$name" -> qAMPA_nda ,
s"kpostCaOut_$name" -> kpostCaOut_nda ,
s"CN50_$name" -> CN50_nda ,
s"CNSlope_$name" -> CNSlope_nda ,
s"TrkB50_$name" -> TrkB50_nda ,
s"TrkBSlope_$name" -> TrkBSlope_nda ,
s"a5_$name" -> a5_nda ,
s"b5_$name" -> b5_nda ,
s"tPostCaBuffer_$name" -> tPostCaBuffer_nda ,
s"a6_$name" -> a6_nda ,
s"b6_$name" -> b6_nda ,
s"a7_$name" -> a7_nda ,
s"b7_$name" -> b7_nda ,
s"preCa_$name" -> y_preCa_nda ,
s"preCaBuff_$name" -> y_preCaBuff_nda ,
s"aPreCDK_$name" -> y_aPreCDK_nda ,
s"preSensor_$name" -> y_preSensor_nda ,
s"aPreTrkB_$name" -> y_aPreTrkB_nda ,
s"preNR2B_$name" -> y_preNR2B_nda ,
s"preAbeta_$name" -> y_preAbeta_nda ,
s"preMg_$name" -> y_preMg_nda ,
s"postCa_$name" -> y_postCa_nda ,
s"postCaBuff_$name" -> y_postCaBuff_nda ,
s"aPostCN_$name" -> y_aPostCN_nda ,
s"aPostTrkB_$name" -> y_aPostTrkB_nda ,
s"qNMDAR_$name" -> y_qNMDAR_nda
)
}
override def getInitialVar():Array[String] = {
Array(s"y${this.variableindices(0)}" ,
s"y${this.variableindices(1)}" ,
s"y${this.variableindices(2)}" ,
s"y${this.variableindices(3)}" ,
s"y${this.variableindices(4)}" ,
s"y${this.variableindices(5)}" ,
s"y${this.variableindices(6)}" ,
s"y${this.variableindices(7)}" ,
s"y${this.variableindices(8)}" ,
s"y${this.variableindices(9)}" ,
s"y${this.variableindices(10)}" ,
s"y${this.variableindices(11)}" ,
s"y${this.variableindices(12)}")
}
override def getInitial(map : Map[String,NDArray]=null): Map[String,NDArray] = {
if(map==null)
Map(s"y${this.variableindices(0)}"->this.preCa_nda,
s"y${this.variableindices(1)}"->this.preCaBuff_nda,
s"y${this.variableindices(2)}"->this.aPreCDK_nda,
s"y${this.variableindices(3)}"->this.preSensor_nda,
s"y${this.variableindices(4)}"->this.aPreTrkB_nda,
s"y${this.variableindices(5)}"->this.preNR2B_nda,
s"y${this.variableindices(6)}"->this.preAbeta_nda,
s"y${this.variableindices(7)}"->this.preMg_nda,
s"y${this.variableindices(8)}"->this.postCa_nda,
s"y${this.variableindices(9)}"->this.postCaBuff_nda,
s"y${this.variableindices(10)}"->this.aPostCN_nda,
s"y${this.variableindices(11)}"->this.aPostTrkB_nda,
s"y${this.variableindices(12)}"->this.qNMDAR_nda)
else {
map
}
}
override def getInitialY():Array[NDArray] = {
Array(y_preCa_nda,
y_preCaBuff_nda,
y_aPreCDK_nda,
y_preSensor_nda,
y_aPreTrkB_nda,
y_preNR2B_nda,
y_preAbeta_nda,
y_preMg_nda,
y_postCa_nda,
y_postCaBuff_nda,
y_aPostCN_nda,
y_aPostTrkB_nda,
y_qNMDAR_nda)
}
override def update(t_onehot: Symbol, y:Array[Symbol],yDot:Array[ Symbol],indices:Array[Int]):Array[Symbol] = {
this.preCa = y(indices(0));
this.preCaBuff = y(indices(1));
this.aPreCDK = y(indices(2));
this.preSensor = y(indices(3));
this.aPreTrkB = y(indices(4));
this.preNR2B = y(indices(5));
this.preAbeta = y(indices(6));
this.preMg = y(indices(7));
this.postCa = y(indices(8));
this.postCaBuff = y(indices(9));
this.aPostCN = y(indices(10));
this.aPostTrkB = y(indices(11));
this.qNMDAR = y(indices(12));
//-------------------------Presyanptic dynamics-----------------------------
var preVm=this.axon.preVm;
val Ivgcc = Symbol.Activation("relu")(Map("data"->(preVm-this.Vs),"act_type"->"relu"))*0.05
var IpreNR2B = Config.zero_s
var IAChR = Config.zero_s
var preCaIn = IpreNR2B*this.kNMDARCa+Ivgcc+IAChR;
var fCaBuff = this.tPreCaBuffer-this.preCaBuff; // Ca buffer
var kpreCaOut = Config.one_s *(0.1f) / (Symbol.exp((this.Mg50-this.preMg)/this.MgSlope)+1); // Ca efflux is funciton of [Mg]i (Boltzmann sigmoid function)
var d_preCa = (preCaIn+this.b1 * this.preCaBuff - (kpreCaOut+this.a1*fCaBuff)*this.preCa)/this.preVol; // dx/dt = (Jin-Jout)/vol
var d_preCaBuff = this.a1*fCaBuff*this.preCa-this.b1*this.preCaBuff; // presynaptic Ca buffer
//CDK
var d_aPreCDK = this.a2*(this.aPreCDK*(-1)+1)*this.preCa-this.b2*this.aPreCDK; // presynaptic CDK activation depends on Ca level
// sensor: insertion of sensor inhibited by Ca depend activation of CDK5
var freeSensor = this.axon.freeSensor;
// freeSensor needs to be shared across synapses
var kSensorIn =(Config.one_s * 0.0001f)/(Symbol.exp((this.aPreCDK-this.aCDK50)/this.aCDKSlope)+1);
var d_preSensor = (kSensorIn*freeSensor-this.kSensorDeg*this.preSensor)/this.preVol;
//BDNF=this.matrix.BDNF
// right now BDNF retrograde signalling is synapse specific
this.tmp = this.qNMDAR * Config.one_s
this.BDNF = (this.dendrite.postVm-this.Vr)*this.tmp; // retrograde signalling following coinsident detection
// TrkB activation depends on BDNF concentration
var d_aPreTrkB = this.a4*(Config.one_s+this.aPreTrkB*(-1))*BDNF-this.b4*this.aPreTrkB;
// degradation of presynaptic NR2B by Calpain/CDK5 dependent process
var kpreNR2BDeg = (Config.one_s * 0.0002f)/(Symbol.exp((this.aCDKNR2B50-this.aPreCDK)/this.aCDKNR2BSlope)+1);
var d_preNR2B = (this.kpreNR2BIn - kpreNR2BDeg*this.preNR2B)/this.preVol;
// synthesis of Abeta is inhibited by BDNF
var kpreAbetaIn =(Config.one_s * 0.0001f)/(Symbol.exp((this.aPreTrkB-this.aPreTrkB50)/this.aPreTrkBSlope)+1);
var d_preAbeta = (kpreAbetaIn - this.kpreAbetaDeg*this.preAbeta)/this.preVol;
// Mg
var preMgIn = this.kMgIn+this.kBDNFMg*BDNF; // presyanptic Mg influx = constitive + regrade signalling
var d_preMg = (preMgIn-this.kpreMgOut*this.preMg)/this.preVol;
// ------------------postsynaptic--------------------------
var Pr = this.preCa*this.preSensor; // Pr update probability of release
var postVm = this.dendrite.postVm;
var Iampa = Pr*this.qAMPA*(postVm-this.Ve); // EPSCampa
var pMgBlock = Config.one_s /((this.Mgo/this.kMgNMDAR)*Symbol.exp(postVm*(-2f/25.4f))+1); // NMDAR Mg block
var Inmda = Pr*this.qNMDAR*(postVm - this.Vca)*pMgBlock; // EPSCnmda
this.EPSC = Iampa+Inmda;
//Calcium
var postCaIn = Inmda*this.kNMDARCa+this.CaBasal; // total postsynaptic Ca influx = NMDAR + VGCC
var fpostCaBuff = this.tPostCaBuffer-this.postCaBuff;
var d_postCa = (postCaIn+this.b5*this.postCaBuff - (this.kpostCaOut+this.a5*fpostCaBuff)*this.postCa)/this.postVol;
var d_postCaBuff = this.a5*fpostCaBuff*this.postCa-this.b5*this.postCaBuff; // postsynaptic Ca buffer
// degradation of NMDAR is promoted by CN Calcineurin, which is activated by [Ca], and
// protected by BDNF via activation of Src Kinase.
var pCN = Config.one_s /(Symbol.exp((this.CN50-this.aPostCN)/this.CNSlope)+1);
var d_aPostCN = this.a6*(this.aPostCN*(-1)+1)*this.postCa-this.b6*this.aPostCN; // postsynaptic CN activation
var pTrkB = Config.one_s/(Symbol.exp((this.aPostTrkB-this.TrkB50)/this.TrkBSlope)+1);
var d_aPostTrkB = this.a7*(this.aPostTrkB*(-1)+Config.one_s)*BDNF-this.b7*this.aPostTrkB;
var kNMDARdeg = pCN*pTrkB*0.005f; // BDNF/CN
var d_qNMDAR = this.kNMDARIn - kNMDARdeg*this.qNMDAR;
yDot(indices(0)) = d_preCa;
yDot(indices(1)) = d_preCaBuff;
yDot(indices(2)) = d_aPreCDK;
yDot(indices(3)) = d_preSensor;
yDot(indices(4)) = d_aPreTrkB;
yDot(indices(5)) = d_preNR2B;
yDot(indices(6)) = d_preAbeta;
yDot(indices(7)) = d_preMg;
yDot(indices(8)) = d_postCa;
yDot(indices(9)) = d_postCaBuff;
yDot(indices(10)) = d_aPostCN;
yDot(indices(11)) = d_aPostTrkB;
yDot(indices(12)) = d_qNMDAR
yDot
}
//connectivity
def getaxon():Axon = {
this.axon;
}
def getdendrite():Dendrite = {
this.dendrite;
}
}
|
Liuxg16/BrainMatrix
|
scalakernel/src/main/java/thu/brainmatrix/gan/DCgan.scala
|
package thu.brainmatrix.gan
class DCgan {
}
|
Liuxg16/BrainMatrix
|
scalakernel/src/main/java/thu/brainmatrix/OperatorProperty.scala
|
<reponame>Liuxg16/BrainMatrix<filename>scalakernel/src/main/java/thu/brainmatrix/OperatorProperty.scala
package thu.brainmatrix
import thu.brainmatrix.Base._
import scala.collection.mutable.{ListBuffer,ArrayBuffer}
import scala.Vector
/**
* by liuxianggen
* 2015-3-3
* bref like OperatorProperty in mxnet,provide Operator class function
* bref OperatorPropertyHandle is the same as SymbolHandle, equals to the atomicSymbol
*/
class OperatorProperty(val handle:OperatorPropertyHandle,val opName:String) {
/*!
* by liuxianggen
* 2016-3-9
* \brief Initialize the Operator by setting the parameters
* This function need to be called before all other functions.
* \param kwargs the keyword arguments parameters
*/
def Init(paramKeys: Array[String],paramVals: Array[String]){
// call jni operator
checkCall(_LIB.mxScalaOpInit(handle, paramKeys, paramVals))
}
/*!
* by liuxianggen
* 2016-3-9
* \brief Get a map representation of internal parameters.
* This can be used by Init to recover the state of OperatorProperty.
*/
def printParam(){
// call jni operator
checkCall(_LIB.mxScalaOpPrintParam(handle))
}
/**
* by liuxianggen
* 2015-3-4
* brief according to the operator, return the inputs name
*/
def ListArguments: Vector[String] = {
// call jni operator
val arr = ArrayBuffer.empty[String]
checkCall(_LIB.mxScalaOpListArguments(handle, arr))
val arr_vec = arr.toVector
arr_vec
}
def ListOutputs: Vector[String] = {
Vector("output")
}
def ListAuxiliaryStates():Vector[String] = {
// call jni operator
val arr = ArrayBuffer.empty[String]
checkCall(_LIB.mxScalaOpListAuxiliaryStates(handle, arr))
val arr_vec = arr.toVector
arr_vec
}
// def Forward{
//
// }
def Copy(): OperatorProperty = {
val opHandleRef = new OperatorPropertyHandleRef
checkCall(_LIB.mxScalaOPCopy(handle, opHandleRef))
new OperatorProperty(opHandleRef.value,this.opName)
}
def NumVisibleOutputs(): Int = {
val intref= new MXUintRef
checkCall(_LIB.mxScalaOpNumVisibleOutputs(this.handle, intref))
intref.value
}
}
/*
* by liuxianggen
* 2016-3-20
*
*/
object OperatorProperty{
def apply(name:String):OperatorProperty = {
val opHandleRef = new OperatorPropertyHandleRef
val function = OperatorProperty.initSymbolModule()(name)
require(function != null, s"invalid operator name opName")
/*require(function.keyVarNumArgs == null || function.keyVarNumArgs.isEmpty,
"This function support variable length of Symbol arguments.\n" +
"Please pass all the input Symbols via positional arguments instead of keyword arguments.")*/
checkCall(_LIB.mxScalaCreateOperatorProperty(function.handle, opHandleRef))
new OperatorProperty(opHandleRef.value,name)
}
// List and add all the atomic symbol functions to current module.
private def initSymbolModule(): Map[String, ScalaSymbolFunction] = {
val symbolList = ListBuffer.empty[SymbolHandle]
checkCall(_LIB.mxSymbolListAtomicSymbolCreators(symbolList))
symbolList.map(makeAtomicSymbolFunction).toMap
}
// Create an atomic symbol function by handle and function name.
private def makeAtomicSymbolFunction(handle: SymbolHandle): (String, ScalaSymbolFunction) = {
val name = new RefString
val desc = new RefString
val keyVarNumArgs = new RefString
val numArgs = new MXUintRef
val argNames = ListBuffer.empty[String]
val argTypes = ListBuffer.empty[String]
val argDescs = ListBuffer.empty[String]
checkCall(_LIB.mxSymbolGetAtomicSymbolInfo(
handle, name, desc, numArgs, argNames, argTypes, argDescs, keyVarNumArgs))
val paramStr = ctypes2docstring(argNames, argTypes, argDescs)
val docStr = s"${name.value}\n${desc.value}\n\n$paramStr\n keyVarNumArgs:${keyVarNumArgs.value}"
// println(docStr)
(name.value, new ScalaSymbolFunction(handle, keyVarNumArgs.value))
}
}
private case class ScalaSymbolFunction(handle: ScalaSymbolHandle, keyVarNumArgs: String)
|
Liuxg16/BrainMatrix
|
scalakernel/src/main/java/thu/brainmatrix/lstmbyguo/LSTMModel.scala
|
package thu.brainmatrix.lstmbyguo
import java.io.File
import java.io.FileNotFoundException
import scala.collection.immutable.Set
import scala.io.Source
import thu.brainmatrix.NDArray
import thu.brainmatrix.Random
import thu.brainmatrix.Shape
/*
* 这是标准LSTM模型的变种,与标准LSTM模型不同之处在于
* input gate,forget gate 和 output gate里面引入了细胞输入c(t)
* introduced by Gers & Schmidhuber (2000), is adding “peephole connections.”
* This means that we let the gate layers look at the cell state.
*
*@author guoshen
*@date 2016/8/11
*@ introduction:The model that product the text by char-level use the vanilla rnn.
* */
class LSTMModel {
}
object LSTMModel {
private val inputfilepath: String = "./seqData/inputs.txt" //数据文件所在的绝对路径
private val outputfilepath: String = "./seqData/outputs.txt"
private val matrixfilepath: String = "./seqData/matrixs.txt"
var outputfile = new File(outputfilepath)
// outputfile.deleteOnExit() //把旧文件删除了
outputfile.createNewFile()
var matrixfile = new File(matrixfilepath)
matrixfile.createNewFile()
def lossfunction(inputs: Array[Int], targets: Array[Int],
hprev: NDArray, cprev: NDArray,
vocab_size: Int, cell_size: Int,
Wxi: NDArray, Whi: NDArray, Wci: NDArray, bi: NDArray,
Wxf: NDArray, Whf: NDArray, Wcf: NDArray, bf: NDArray,
Wxo: NDArray, Who: NDArray, Wco: NDArray, bo: NDArray,
Wxc: NDArray, Whc: NDArray, bc: NDArray,
Why: NDArray, by: NDArray): (NDArray, NDArray, NDArray, NDArray, NDArray, NDArray, NDArray, NDArray, NDArray, NDArray, NDArray, NDArray, NDArray, NDArray, NDArray, NDArray, NDArray, Double, NDArray, NDArray) = {
val len: Int = inputs.length
var x: Array[NDArray] = new Array(len + 1) //input word vector
var h: Array[NDArray] = new Array(len + 1) //hidden
var i: Array[NDArray] = new Array(len + 1) //input gate
var f: Array[NDArray] = new Array(len + 1) //forget gate
var o: Array[NDArray] = new Array(len + 1) //output gate
var c_in: Array[NDArray] = new Array(len + 1)
var c: Array[NDArray] = new Array(len + 1) //cell
var y: Array[NDArray] = new Array(len + 1) //output used to turn to p
var p: Array[NDArray] = new Array(len + 1) //softmax概率层
var loss: Double = 0.0
h(0) = hprev
c(0) = cprev
//forward pass
for (t <- 1 to len) {
x(t) = NDArray.zeros(vocab_size, 1)
x(t)(inputs(t - 1)) = 1
//input gate
i(t) = NDArray.sigmod(NDArray.dot(Wxi, x(t)) + NDArray.dot(Whi, h(t - 1)) + NDArray.dot(Wci, c(t - 1)) + bi)
//forget gate
f(t) = NDArray.sigmod(NDArray.dot(Wxf, x(t)) + NDArray.dot(Whf, h(t - 1)) + NDArray.dot(Wcf, c(t - 1)) + bf)
//cell
c_in(t) = NDArray.sigmod(NDArray.dot(Wxc, x(t)) + NDArray.dot(Whc, h(t - 1)) + bc)
c(t) = f(t) * c(t - 1) + i(t) * c_in(t) //这个地方不是点乘,目的是为了删除旧记忆,引入新记忆
//output gate
o(t) = NDArray.sigmod(NDArray.dot(Wxo, x(t)) + NDArray.dot(Who, h(t - 1)) + NDArray.dot(Wco, c(t)) + bo)
//cell output
h(t) = o(t) * NDArray.tanh(c(t))
//softmax
y(t) = NDArray.dot(Why, h(t))
var expy = NDArray.exp(y(t))
p(t) = expy / (NDArray.sum(expy).toScalar)
println("hehe:" + p(t).toArray(targets(t - 1)))
loss += -scala.math.log(p(t).toArray(targets(t - 1))) //损失函数,交叉熵
}
println("loss :" + loss)
//backforward pass
var dWxi = NDArray.zeros(Wxi.shape)
var dWhi = NDArray.zeros(Whi.shape)
var dWci = NDArray.zeros(Wci.shape)
var dbi = NDArray.zeros(bi.shape)
var dWxf = NDArray.zeros(Wxf.shape)
var dWhf = NDArray.zeros(Whf.shape)
var dWcf = NDArray.zeros(Wcf.shape)
var dbf = NDArray.zeros(bf.shape)
var dWxo = NDArray.zeros(Wxo.shape)
var dWho = NDArray.zeros(Who.shape)
var dWco = NDArray.zeros(Wco.shape)
var dbo = NDArray.zeros(bo.shape)
var dWxc = NDArray.zeros(Wxc.shape)
var dWhc = NDArray.zeros(Whc.shape)
var dbc = NDArray.zeros(bc.shape)
var dWhy = NDArray.zeros(Why.shape)
var dby = NDArray.zeros(by.shape)
var hi_next, hf_next, hc_in_next, hc_next, ho_next = NDArray.zeros(cell_size, 1)
var ci_next, cf_next, cc_in_next, cc_next, co_next = NDArray.zeros(cell_size, 1)
var iraw, fraw, c_inraw, craw, oraw = NDArray.zeros(cell_size, 1)
var dyt, dft, dit, dht, dot, dct, dc_int = NDArray.zeros(cell_size, 1)
val ones = NDArray.ones(cell_size, 1)
for (hehe <- 0 until len) {
var t = len - hehe;
dyt = NDArray.copy(p(t)) //(vocab_size , 1)
dyt(targets(t - 1)) -= 1
dWhy += NDArray.dot(dyt, NDArray.transpose(h(t))) //( vocab_size , cell_size )
dby += dyt
dht = NDArray.dot(NDArray.transpose(Why), dyt) + hi_next + hf_next + hc_in_next + ho_next
var tanhct = NDArray.tanh(c(t))
dot = dht * tanhct //( cell_size , 1 )
oraw = (ones - o(t)) * o(t) * dot //( cell_size , 1 )
dct = dht * o(t) * (ones - tanhct * tanhct) + NDArray.dot(Wco, oraw) + cc_next + cf_next + ci_next
dWxo += NDArray.dot(oraw, NDArray.transpose(x(t)))
dWho += NDArray.dot(oraw, NDArray.transpose(h(t - 1)))
dWco += NDArray.dot(oraw, NDArray.transpose(c(t)))
dbo += oraw
dit = dct * c_in(t) //( cell_size , 1 )
dft = dct * c(t - 1) //( cell_size , 1 )
dc_int = dct * i(t) //( cell_size , 1 )
iraw = (ones - i(t)) * i(t) * dit //( cell_size , 1 )
fraw = (ones - f(t)) * f(t) * dft //( cell_size , 1 )
c_inraw = (ones - c_in(t)) * c_in(t) * dc_int //( cell_size , 1 )
dWxc += NDArray.dot(c_inraw, NDArray.transpose(x(t)))
dWhc += NDArray.dot(c_inraw, NDArray.transpose(h(t - 1)))
dbc += c_inraw
dWxf += NDArray.dot(fraw, NDArray.transpose(x(t)))
dWhf += NDArray.dot(fraw, NDArray.transpose(h(t - 1)))
dWcf += NDArray.dot(fraw, NDArray.transpose(c(t - 1)))
dbf += fraw
dWxi += NDArray.dot(iraw, NDArray.transpose(x(t)))
dWhi += NDArray.dot(iraw, NDArray.transpose(h(t - 1)))
dWci += NDArray.dot(iraw, NDArray.transpose(c(t - 1)))
dbi += iraw
hi_next = NDArray.dot(Whi, iraw)
hf_next = NDArray.dot(Whf, fraw)
hc_in_next = NDArray.dot(Whc, c_inraw)
ho_next = NDArray.dot(Who, oraw)
cc_next = dct * f(t)
cf_next = NDArray.dot(Wcf, fraw) * dft
ci_next = NDArray.dot(Wci, iraw) * dit
}
var parameterlist: Array[NDArray] = Array(
Wxi, Whi, Wci, bi,
Wxf, Whf, Wcf, bf,
Wxo, Who, Wco, bo,
Wxc, Whc, bc,
Why, by)
for (i <- 0 until parameterlist.length) {
parameterlist(i) = NDArray.clip(parameterlist(i), -5, 5)
}
(dWxi, dWhi, dWci, dbi,
dWxf, dWhf, dWcf, dbf,
dWxo, dWho, dWco, dbo,
dWxc, dWhc, dbc,
dWhy, dby,
loss, h(len), c(len))
}
def main(args: Array[String]) {
var data: String = ""
var chars: Array[Char] = Array()
var data_size, vocab_size = 0; //data_size是指输入文本的长度,vocab_size是指字符表的长度
try {
val tempdata = Source.fromFile(new File(inputfilepath)).getLines().toList //读出文件所有文本数据,并按行作为list保存
var set: Set[Char] = Set() //将data里面的字符统计为一个字符集合
for (i <- tempdata) {
set = set.++(i.toSet); data += i + '\n'
}
chars = (set.+('\n')).toArray //小bug,在输入文本里没有换行符的时候这样做是错的
vocab_size = chars.length; data_size = data.length()
} catch {
case e: FileNotFoundException => { println("File Not Found Exception") } // TODO: handle error
}
var char_to_ix: Map[Char, Int] = Map() //输入字符,得到对应的字符编号
var ix_to_char: Map[Int, Char] = Map() //输入字符编号,得到对应的字符
for (index <- 0 until vocab_size) {
char_to_ix += (chars(index) -> index)
ix_to_char += (index -> chars(index))
}
val cell_size: Int = 128*4
//input gate parameters
var Wxi = Random.uniform(0.toFloat, 0.01.toFloat, Shape(cell_size, vocab_size))
var Whi = Random.uniform(0.toFloat, 0.01.toFloat, Shape(cell_size, cell_size))
var Wci = Random.uniform(0.toFloat, 0.01.toFloat, Shape(cell_size, cell_size))
var bi = NDArray.zeros(cell_size, 1)
//forget gate parameters
var Wxf = Random.uniform(0.toFloat, 0.01.toFloat, Shape(cell_size, vocab_size))
var Whf = Random.uniform(0.toFloat, 0.01.toFloat, Shape(cell_size, cell_size))
var Wcf = Random.uniform(0.toFloat, 0.01.toFloat, Shape(cell_size, cell_size))
var bf = NDArray.zeros(cell_size, 1)
//cell parameters
var Wxc = Random.uniform(0.toFloat, 0.01.toFloat, Shape(cell_size, vocab_size))
var Whc = Random.uniform(0.toFloat, 0.01.toFloat, Shape(cell_size, cell_size))
var bc = NDArray.zeros(cell_size, 1)
//output gate parameters
var Wxo = Random.uniform(0.toFloat, 0.01.toFloat, Shape(cell_size, vocab_size))
var Who = Random.uniform(0.toFloat, 0.01.toFloat, Shape(cell_size, cell_size))
var Wco = Random.uniform(0.toFloat, 0.01.toFloat, Shape(cell_size, cell_size))
var bo = NDArray.zeros(cell_size, 1)
//output parameters
var Why = Random.uniform(0.toFloat, 0.01.toFloat, Shape(vocab_size, cell_size)) //这一层是softmax层
var by = NDArray.zeros(vocab_size, 1)
val seq_length = 25 //每次训练用的样本字符长度
val learning_rate = 2e-3.toFloat
var n: Int = 0 //n表示为迭代次数
var p: Int = 0 //p表示指针,指向输入的起始位置
var hprev = NDArray.zeros(cell_size, 1) //上一层的隐层输入,初始化为0
var cprev = NDArray.zeros(cell_size, 1) //上一层的细胞输入,初始化为0
while (n < 1000) {
n += 1
var inputs: Array[Int] = Array();
var targets: Array[Int] = Array()
if (p + seq_length + 1 >= data_size) {
p = 0
hprev = NDArray.zeros(cell_size, 1)
cprev = NDArray.zeros(cell_size, 1)
}
for (index <- p until p + seq_length) {
inputs = inputs :+ (char_to_ix.apply(data(index))) //apply(key) => value
targets = targets :+ (char_to_ix.apply(data(index + 1)))
}
var (dWxi, dWhi, dWci, dbi,
dWxf, dWhf, dWcf, dbf,
dWxo, dWho, dWco, dbo,
dWxc, dWhc, dbc,
dWhy, dby,
smooth_loss, temp_hprev, temp_cprev) = lossfunction(inputs, targets,
hprev, cprev,
vocab_size, cell_size,
Wxi, Whi, Wci, bi,
Wxf, Whf, Wcf, bf,
Wxo, Who, Wco, bo,
Wxc, Whc, bc,
Why, by)
hprev = temp_hprev; cprev = temp_cprev
var mWxi = NDArray.zeros(Wxi.shape)
var mWhi = NDArray.zeros(Whi.shape)
var mWci = NDArray.zeros(Wci.shape)
var mbi = NDArray.zeros(bi.shape)
var mWxf = NDArray.zeros(Wxf.shape)
var mWhf = NDArray.zeros(Whf.shape)
var mWcf = NDArray.zeros(Wcf.shape)
var mbf = NDArray.zeros(bf.shape)
var mWxc = NDArray.zeros(Wxc.shape)
var mWhc = NDArray.zeros(Whc.shape)
var mbc = NDArray.zeros(bc.shape)
var mWxo = NDArray.zeros(Wxo.shape)
var mWho = NDArray.zeros(Who.shape)
var mWco = NDArray.zeros(Wco.shape)
var mbo = NDArray.zeros(bo.shape)
var mWhy = NDArray.zeros(Why.shape)
var mby = NDArray.zeros(by.shape)
var zips = Array(
Array(Wxi, dWxi, mWxi), Array(Whi, dWhi, mWhi), Array(Wci, dWci, mWci), Array(bi, dbi, mbi),
Array(Wxf, dWxf, mWxf), Array(Whf, dWhf, mWhf), Array(Wcf, dWcf, mWcf), Array(bf, dbf, mbf),
Array(Wxc, dWxc, mWxc), Array(Whc, dWhc, mWhc), Array(bc, dbc, mbc),
Array(Wxo, dWxo, mWxo), Array(Who, dWho, mWho), Array(Wco, dWco, mWco), Array(bo, dbo, mbo),
Array(Why, dWhy, mWhy), Array(by, dby, mby))
val little = 1e-8.toFloat
//利用Adagrad来优化学习速率
for (i <- 0 until zips.length) {
zips(i)(2) += zips(i)(1) * zips(i)(1)
zips(i)(0) += -zips(i)(1) * learning_rate / NDArray.sqrt(zips(i)(2) + NDArray.ones(zips(i)(2).shape) * little)
}
p += seq_length
n += 1
println("第" + n + "轮结束~")
}
}
}
|
Liuxg16/BrainMatrix
|
scalakernel/src/test/java/thu/brainmatrix/suite/TestSymbol.scala
|
//
//import ml.dmlc.mxnet.Symbol
//import ml.dmlc.mxnet.Base._
//import scala.collection.mutable.{ArrayBuffer,ListBuffer}
//import ml.dmlc.mxnet.Context
//import ml.dmlc.mxnet.lxg.ScalaSymFunction
//import ml.dmlc.mxnet.NDArray
//import ml.dmlc.mxnet.Random
//import ml.dmlc.mxnet.Executor
//
//
object TestSymbol{
//
//
// def main(args:Array[String]){
// inferShapeTest_mxnet
//// ListAtomicFuncTest
//// simpleBind
//// auxTest
//// addTest
//// printVectorTest
//// symbolOperatorTest
//// addTest
//// simpleBind
//// ElementWiseSumTest
//// convTest
// println("---------------------------------------")
// }
//
// def inferShapeTest_mxnet{
// val data = Symbol.Variable("data")
// val fc1 = Symbol.FullyConnected(Map("data" -> data, "name" -> "fc2", "num_hidden" -> 12))
//
// val kwargs_shape = Map("data"->Vector(200,15))
// val keys = ArrayBuffer.empty[String]
// val indPtr = ArrayBuffer(0)
// val sdata = ArrayBuffer.empty[Int]
// kwargs_shape.foreach { case (key, shape) =>
// keys += key
// sdata ++= shape
// indPtr += sdata.size
// }
// println("keys:")
// keys.foreach {println}
// println("\nsdata:")
// sdata.foreach(println)
// println("\nindPtr:"+indPtr)
//
// println("\n---------------------------------------------------")
// val (argShapes, _ , auxShapes) = fc1.inferShape(keys.toArray, indPtr.toArray, sdata.toArray)
// argShapes.foreach { x => {
// x.foreach{y => print(" "+ y )}
// println
// }
// }
//// fc2.listArguments().foreach { println }
// }
//
//
// def symbolOperatorTest(){
// val data1 = Symbol.Variable("data1")
// val data2 = Symbol.Variable("data2")
////
//
// val sum = data1 + data2
// sum.listArguments().foreach(println)
// }
//
// def printVectorTest(){
// _LIB.mxSymbolPrintVector(3,Array(1,2,3))
// }
//
// def ListAtomicFuncTest{
// val symbolList = ListBuffer.empty[SymbolHandle]
// checkCall(_LIB.mxSymbolListAtomicSymbolCreators(symbolList))
// symbolList.map(makeAtomicSymbolFunction).toMap
//
// def makeAtomicSymbolFunction(handle: SymbolHandle): (String,ScalaSymFunction) = {
// val name = new RefString
// val desc = new RefString
// val keyVarNumArgs = new RefString
// val numArgs = new MXUintRef
// val argNames = ListBuffer.empty[String]
// val argTypes = ListBuffer.empty[String]
// val argDescs = ListBuffer.empty[String]
//
// checkCall(_LIB.mxSymbolGetAtomicSymbolInfo(
// handle, name, desc, numArgs, argNames, argTypes, argDescs, keyVarNumArgs))
// val paramStr = ctypes2docstring(argNames, argTypes, argDescs)
// val docStr = s"${name.value}\n${desc.value}\n\n$paramStr\n"
// println("Atomic Symbol function defination:\n{}", docStr)
// (name.value, new ScalaSymFunction(handle, keyVarNumArgs.value))
// }
//
// }
//
//
// def simpleBind{
// import ml.dmlc.mxnet.Context
// val batchSize = 100
//
// val data = Symbol.Variable("data")
// val conv1 = Symbol.Convolution(Map("data" -> data, "name" -> "conv1",
// "num_filter" -> 32, "kernel" -> (3, 3), "stride" -> (2, 2)))
// val bn1 = Symbol.BatchNorm(Map("data" -> conv1, "name" -> "bn1"))
// val act1 = Symbol.Activation(Map("data" -> bn1, "name" -> "relu1", "act_type" -> "relu"))
// val mp1 = Symbol.Pooling(Map("data" -> act1, "name" -> "mp1",
// "kernel" -> (2, 2), "stride" -> (2, 2), "pool_type" -> "max"))
//
// val conv2 = Symbol.Convolution(Map("data" -> mp1, "name" -> "conv2", "num_filter" -> 32,
// "kernel" -> (3, 3), "stride" -> (2, 2)))
// val bn2 = Symbol.BatchNorm(Map("data" -> conv2, "name" -> "bn2"))
// val act2 = Symbol.Activation(Map("data" -> bn2, "name" -> "relu2", "act_type" -> "relu"))
// val mp2 = Symbol.Pooling(Map("data" -> act2, "name" -> "mp2",
// "kernel" -> (2, 2), "stride" -> (2, 2), "pool_type" -> "max"))
//
// val fl = Symbol.Flatten(Map("data" -> conv1, "name" -> "flatten"))
// val fc2 = Symbol.FullyConnected(Map("data" -> fl, "name" -> "fc2", "num_hidden" -> 10))
// val softmax = Symbol.SoftmaxOutput(Map("data" -> fc2, "name" -> "sm"))
//
// val dataShapes = Map("data" -> Vector(20,1,28, 28))
// println("*****************************************")
//// val dataShapes_ =collection.immutable.Map(dataShapes.toList: _*)
// val exe = softmax.simpleBind(Context.cpu(), "write", shapeDict = dataShapes)
// // val dataArr = Random.normal(0, 1,Vector(100,1,28,28))
// println("*****************************************")
// println("----------------------------")
// println(softmax.debugStr)
// println(exe.debugStr)
//// for(i<-0 until 10){
// exe.forward(isTrain = true)
// exe.backward()
//
//// }
//// println(exe.outputs(0))
//
// }
//
//
// def auxTest{
// val data = Symbol.Variable("data")
// val conv1 = Symbol.Convolution(Map("data" -> data, "name" -> "conv1",
// "num_filter" -> 32, "kernel" -> (3, 3), "stride" -> (2, 2)))
// conv1.listAuxiliaryStates().foreach(println)
//
// }
//
//
// def addTest{
//
// val a = Symbol.Variable("a")
// val b = Symbol.Variable("b")
// val c = a + 2
// val args = c.listArguments()
// args.foreach(println)
// println("-----------------------------------")
//
// }
//
//
// def convTest{
// val shape = Vector(20,1,28, 28)
// val lhs = Symbol.Variable("lhs")
// val rhs = Symbol.Variable("rhs")
// val sum = lhs + rhs
//
// println("++++++++++++++++++++++++++++++++++++++++++++++")
// val conv1 = Symbol.Convolution(Map("data" -> sum, "name" -> "conv1",
// "num_filter" -> 32, "kernel" -> (3, 3), "stride" -> (2, 2)))
//
// val fc = Symbol.FullyConnected(Map("data" -> sum, "name" -> "fc3", "num_hidden" -> 10))
//
// val softmax = Symbol.SoftmaxOutput(Map("data" -> fc, "name" -> "sm"))
// println(softmax.listArguments())
//
// val lhsArr = Random.uniform(-10f, 10f, shape)
// val rhsArr = Random.uniform(-10f, 10f, shape)
// val lhsGrad = NDArray.empty(shape)
// val rhsGrad = NDArray.empty(shape)
//
// val ctxMapKeys = ArrayBuffer.empty[String]
// val ctxMapDevTypes = ArrayBuffer.empty[Int]
// val ctxMapDevIDs = ArrayBuffer.empty[Int]
//
// val args = Array(lhsArr, rhsArr)
// val argsGrad = Array(lhsGrad, rhsGrad)
//
// val execHandle = new ExecutorHandleRef
// println("++++++++++++++++++++++++++++++++++++++++++++++")
// checkCall(_LIB.mxExecutorBindX(sum.handle,
// 1,//1
// 0,//0
// ctxMapKeys.size,//0
// ctxMapKeys.toArray,//null
// ctxMapDevTypes.toArray,//null
// ctxMapDevIDs.toArray,//null
// args.size,
// args.map(_.handle),
// argsGrad.map(_.handle),
// Array(1,1),
// new Array[NDArrayHandle](0),
// execHandle))
//
// val executor = new Executor(execHandle.value,sum)
//
//// val exec3 = ret.bind(Context.cpu(), args = Seq(lhsArr, rhsArr))
//// val exec4 = ret.bind(Context.cpu(), args = Map("rhs" -> rhsArr, "lhs" -> lhsArr),
//// argsGrad = Map("lhs" -> lhsGrad, "rhs" -> rhsGrad))
// val exec5 = softmax.simpleBind(Context.cpu(), "write", shapeDict = Map("rhs" ->shape,"rhs" -> shape))
//
// println("++++++++++++++++++++++++++++++++++++++++++++++")
// executor.forward()
// exec5.forward(true)
//
// println("++++++++++++++++++++++++++++++++++++++++++++++")
// println(executor.outputs(0))
// exec5.outputs.foreach { println }
//// val outGrad = Random.uniform(-10f, 10f, shape)
//// executor.backward(Array(outGrad))
// exec5.backward()
// println("++++++++++++++++++++++++++++++++++++++++++++++")
//
//// println(outGrad)
//
// }
//
//
// def ElementWiseSumTest{
// val data = Symbol.Variable("data")
// val data1 = Symbol.Variable("data1")
// val lat = Symbol.ElementWiseSum(Array(data,data1),"lateralCon")
// println(lat.debugStr)
// }
//
}
|
Liuxg16/BrainMatrix
|
scala-package/spark/src/main/scala/ml/dmlc/mxnet/spark/utils/Network.scala
|
<reponame>Liuxg16/BrainMatrix
package ml.dmlc.mxnet.spark.utils
import java.io.IOException
import java.net.{ServerSocket, NetworkInterface}
import java.util.regex.Pattern
/**
* Helper functions to decide ip address / port
* @author Yizhi
*/
object Network {
private val IPADDRESS_PATTERN = Pattern.compile(
"^([01]?\\d\\d?|2[0-4]\\d|25[0-5])\\." +
"([01]?\\d\\d?|2[0-4]\\d|25[0-5])\\." +
"([01]?\\d\\d?|2[0-4]\\d|25[0-5])\\." +
"([01]?\\d\\d?|2[0-4]\\d|25[0-5])$")
def ipAddress: String = {
val interfaces = NetworkInterface.getNetworkInterfaces
while (interfaces.hasMoreElements) {
val interface = interfaces.nextElement
val addresses = interface.getInetAddresses
while (addresses.hasMoreElements) {
val address = addresses.nextElement
val ip = address.getHostAddress
if (!ip.startsWith("127.") && IPADDRESS_PATTERN.matcher(ip).matches()) {
return ip
}
}
}
"127.0.0.1"
}
def availablePort: Int = {
try {
val serverSocket = new ServerSocket(0)
val port = serverSocket.getLocalPort
try {
serverSocket.close()
} catch {
case _: IOException => // do nothing
}
port
} catch {
case ex: Throwable => throw new IOException("Cannot find an available port")
}
}
}
|
Liuxg16/BrainMatrix
|
scalakernel/src/main/java/thu/brainmatrix/cnn/Predition.scala
|
package thu.brainmatrix.cnn
import java.io.File
import com.sksamuel.scrimage.Image
import thu.brainmatrix.NDArray
import thu.brainmatrix.Context
import thu.brainmatrix.Shape
import thu.brainmatrix.Model
import thu.brainmatrix.Symbol
import thu.brainmatrix.util.CVTool
object Predition {
val newWidth = 28
val newHeight = 28
val ctx = Context.cpu(0)
val (_, argParams, _) = Model.loadCheckpoint("lenet", 10)
val sym = TestTraininglxg.getLenet()
val inputShape = Map("data"->Shape(1,1,newWidth,newHeight))
val executor = sym.simpleBind(ctx = ctx, shapeDict = inputShape)
for (key <- executor.argDict.keys) {
if (!inputShape.contains(key) && argParams.contains(key) && key != "sm_label") {
argParams(key).copyTo(executor.argDict(key))
}
}
def pred(picPath:String):Float = {
val img = Image(new File(picPath))
val resizedImg = img.scaleTo(newWidth, newHeight)
val rgbs = resizedImg.iterator.toArray.map { p =>
(255*3f-(p.blue+p.red+p.green))/(3.0f*255)
}
val inputData = NDArray.array(rgbs, Shape(1,1,newWidth,newHeight), ctx)
CVTool.saveFlattenImage(inputData, "checkpic")
inputData.copyTo(executor.argDict("data"))
executor.forward()
val prob = executor.outputs(0)
val index = NDArray.argmaxChannel(prob).toScalar
index
}
def main(args:Array[String]){
println(pred("/home/agen/workspace-python/flask/recognizer/output.png"))
}
}
|
Liuxg16/BrainMatrix
|
scala-package/core/src/test/scala/ml/dmlc/mxnet/RandomSuite.scala
|
package ml.dmlc.mxnet
import org.scalatest.{BeforeAndAfterAll, FunSuite}
class RandomSuite extends FunSuite with BeforeAndAfterAll {
test("uniform on cpu") {
Context.cpu().withScope {
val (a, b) = (-10, 10)
val shape = Shape(100, 100)
Random.seed(128)
val un1 = Random.uniform(a, b, shape)
Random.seed(128)
val un2 = Random.uniform(a, b, shape)
assert(un1 === un2)
assert(Math.abs(un1.toArray.sum / un1.size - (a + b) / 2f) < 0.1)
}
}
test("normal on cpu") {
val (mu, sigma) = (10f, 2f)
val shape = Shape(100, 100)
Random.seed(128)
val ret1 = Random.normal(mu, sigma, shape)
Random.seed(128)
val ret2 = Random.normal(mu, sigma, shape)
assert(ret1 === ret2)
val array = ret1.toArray
val mean = array.sum / ret1.size
val devs = array.map(score => (score - mean) * (score - mean))
val stddev = Math.sqrt(devs.sum / ret1.size)
assert(Math.abs(mean - mu) < 0.1)
assert(Math.abs(stddev - sigma) < 0.1)
}
}
|
Liuxg16/BrainMatrix
|
scalakernel/src/main/java/thu/brainmatrix/Serializer.scala
|
package thu.brainmatrix
import java.io._
import java.nio.ByteBuffer
import java.nio.charset.Charset
import org.apache.commons.codec.binary.Base64
import scala.reflect.ClassTag
/**
* Serialize & deserialize Java/Scala [[Serializable]] objects
* @author <NAME>
*/
abstract class Serializer {
def serialize[T: ClassTag](t: T): ByteBuffer
def deserialize[T: ClassTag](bytes: ByteBuffer): T
}
object Serializer {
val UTF8 = Charset.forName("UTF-8")
def getSerializer: Serializer = getSerializer(None)
def getSerializer(serializer: Serializer): Serializer = {
// TODO: dynamically get from brainmatrix env to support other serializers like Kyro
if (serializer == null) new JavaSerializer else serializer
}
def getSerializer(serializer: Option[Serializer]): Serializer = {
// TODO: dynamically get from brainmatrix env to support other serializers like Kyro
serializer.getOrElse(new JavaSerializer)
}
def encodeBase64String(bytes: ByteBuffer): String = {
new String(Base64.encodeBase64(bytes.array), UTF8)
}
def decodeBase64String(str: String): ByteBuffer = {
ByteBuffer.wrap(Base64.decodeBase64(str.getBytes(UTF8)))
}
}
class JavaSerializer extends Serializer {
override def serialize[T: ClassTag](t: T): ByteBuffer = {
val bos = new ByteArrayOutputStream()
val out = new ObjectOutputStream(bos)
out.writeObject(t)
out.close()
ByteBuffer.wrap(bos.toByteArray)
}
override def deserialize[T: ClassTag](bytes: ByteBuffer): T = {
val byteArray = bytes.array()
val bis = new ByteArrayInputStream(byteArray)
val in = new ObjectInputStream(bis)
in.readObject().asInstanceOf[T]
}
}
|
Liuxg16/BrainMatrix
|
scalakernel/src/main/java/thu/brainmatrix/lstmSort/lstmSortInfer.scala
|
package thu.brainmatrix.lstmSort
import thu.brainmatrix._
import thu.brainmatrix.util.IOHelper
import thu.brainmatrix.lstmSort.RnnModel.Bi_LSTMInferenceModel
object lstmSortInfer {
def main(args:Array[String]){
val path_train = "./data/sort.train.txt"
val path_test = "./data/sort.valid.txt"
val saveModelPath = "./model"
val batch_size = 100
val buckets = List(5)
val num_hidden = 300
val num_embed = 512
val num_lstm_layer = 2
val seqLen = 5
val num_epoch = 1
val learningRate = 0.01f
val momentum = 0.9
val ctx = Context.gpu(0)
// # a dict that contains the word and the index
val vocab = IOHelper.buildVocab("./data/sort.train.txt")
var bacov = for((k,v)<- vocab) yield (v,k)
// load from check-point
val (_, argParams, _) = Model.loadCheckpoint(s"${saveModelPath}/lstmSort", 1)
val model = new Bi_LSTMInferenceModel(numLstmLayer = 2, inputSize = vocab.size, seq_len = 5,
numHidden = num_hidden,numEmbed = num_embed, numLabel = vocab.size, argParams = argParams)
println("----------------")
// S0V4C 94TMV NDKQ2 NEJVU GW2CJ
// KS51G 1KMG4 2R6OQ NDKQ2 FA4HP
val inputString = "S0V4C 94TMV NDKQ2 NEJVU GW2CJ"
val inputS =inputString.split(" ").map(vocab(_).toFloat)
val data = NDArray.array(inputS,Shape(1,inputS.length))
val prob = model.forward(data)
(NDArray.argmaxChannel(prob)).toArray.map(x => println(bacov(x.toInt)))
}
/**
* S0V4C 94TMV NDKQ2 NEJVU GW2CJ
* 94TMV
GW2CJ
NEJVU
NEJVU
S0V4C
note:这例子可以看到,输出都是排好序的,但是输出的字符串不一定都是输入的字符串。这说明LSTM并没有完全学到如何排序,但基本学会了。
*/
}
|
Liuxg16/BrainMatrix
|
scala-package/examples/src/main/scala/ml/dmlc/mxnet/examples/neuralstyle/ModelVgg19.scala
|
<filename>scala-package/examples/src/main/scala/ml/dmlc/mxnet/examples/neuralstyle/ModelVgg19.scala
package ml.dmlc.mxnet.examples.neuralstyle
import ml.dmlc.mxnet.Context
import ml.dmlc.mxnet.Executor
import ml.dmlc.mxnet.NDArray
import ml.dmlc.mxnet.Symbol
import ml.dmlc.mxnet.Shape
/**
* Definition for the neuralstyle network and initialize it with pretrained weight
* @author <NAME>
*/
object ModelVgg19 {
case class ConvExecutor(executor: Executor, data: NDArray, dataGrad: NDArray,
style: Array[NDArray], content: NDArray, argDict: Map[String, NDArray])
def getSymbol(): (Symbol, Symbol) = {
// declare symbol
val data = Symbol.Variable("data")
val conv1_1 = Symbol.Convolution("conv1_1")(Map("data" -> data , "num_filter" -> 64,
"pad" -> "(1,1)", "kernel" -> "(3,3)", "stride" -> "(1,1)",
"no_bias" -> false, "workspace" -> 1024))
val relu1_1 = Symbol.Activation("relu1_1")(Map("data" -> conv1_1 , "act_type" -> "relu"))
val conv1_2 = Symbol.Convolution("conv1_2")(Map("data" -> relu1_1 , "num_filter" -> 64,
"pad" -> "(1,1)", "kernel" -> "(3,3)", "stride" -> "(1,1)",
"no_bias" -> false, "workspace" -> 1024))
val relu1_2 = Symbol.Activation("relu1_2")(Map("data" -> conv1_2 , "act_type" -> "relu"))
val pool1 = Symbol.Pooling("pool1")(Map("data" -> relu1_2 , "pad" -> "(0,0)",
"kernel" -> "(2,2)", "stride" -> "(2,2)", "pool_type" -> "avg"))
val conv2_1 = Symbol.Convolution("conv2_1")(Map("data" -> pool1 , "num_filter" -> 128,
"pad" -> "(1,1)", "kernel" -> "(3,3)", "stride" -> "(1,1)",
"no_bias" -> false, "workspace" -> 1024))
val relu2_1 = Symbol.Activation("relu2_1")(Map("data" -> conv2_1 , "act_type" -> "relu"))
val conv2_2 = Symbol.Convolution("conv2_2")(Map("data" -> relu2_1 , "num_filter" -> 128,
"pad" -> "(1,1)", "kernel" -> "(3,3)", "stride" -> "(1,1)",
"no_bias" -> false, "workspace" -> 1024))
val relu2_2 = Symbol.Activation("relu2_2")(Map("data" -> conv2_2 , "act_type" -> "relu"))
val pool2 = Symbol.Pooling("pool2")(Map("data" -> relu2_2 , "pad" -> "(0,0)",
"kernel" -> "(2,2)", "stride" -> "(2,2)", "pool_type" -> "avg"))
val conv3_1 = Symbol.Convolution("conv3_1")(Map("data" -> pool2 , "num_filter" -> 256,
"pad" -> "(1,1)", "kernel" -> "(3,3)", "stride" -> "(1,1)",
"no_bias" -> false, "workspace" -> 1024))
val relu3_1 = Symbol.Activation("relu3_1")(Map("data" -> conv3_1 , "act_type" -> "relu"))
val conv3_2 = Symbol.Convolution("conv3_2")(Map("data" -> relu3_1 , "num_filter" -> 256,
"pad" -> "(1,1)", "kernel" -> "(3,3)", "stride" -> "(1,1)",
"no_bias" -> false, "workspace" -> 1024))
val relu3_2 = Symbol.Activation("'relu3_2")(Map("data" -> conv3_2 , "act_type" -> "relu"))
val conv3_3 = Symbol.Convolution("conv3_3")(Map("data" -> relu3_2 , "num_filter" -> 256,
"pad" -> "(1,1)", "kernel" -> "(3,3)", "stride" -> "(1,1)",
"no_bias" -> false, "workspace" -> 1024))
val relu3_3 = Symbol.Activation("relu3_3")(Map("data" -> conv3_3 , "act_type" -> "relu"))
val conv3_4 = Symbol.Convolution("conv3_4")(Map("data" -> relu3_3 , "num_filter" -> 256,
"pad" -> "(1,1)", "kernel" -> "(3,3)", "stride" -> "(1,1)",
"no_bias" -> false, "workspace" -> 1024))
val relu3_4 = Symbol.Activation("relu3_4")(Map("data" -> conv3_4 , "act_type" -> "relu"))
val pool3 = Symbol.Pooling("pool3")(Map("data" -> relu3_4 , "pad" -> "(0,0)",
"kernel" -> "(2,2)", "stride" -> "(2,2)", "pool_type" -> "avg"))
val conv4_1 = Symbol.Convolution("conv4_1")(Map("data" -> pool3 , "num_filter" -> 512,
"pad" -> "(1,1)", "kernel" -> "(3,3)", "stride" -> "(1,1)",
"no_bias" -> false, "workspace" -> 1024))
val relu4_1 = Symbol.Activation("relu4_1")(Map("data" -> conv4_1 , "act_type" -> "relu"))
val conv4_2 = Symbol.Convolution("conv4_2")(Map("data" -> relu4_1 , "num_filter" -> 512,
"pad" -> "(1,1)", "kernel" -> "(3,3)", "stride" -> "(1,1)",
"no_bias" -> false, "workspace" -> 1024))
val relu4_2 = Symbol.Activation("relu4_2")(Map("data" -> conv4_2 , "act_type" -> "relu"))
val conv4_3 = Symbol.Convolution("conv4_3")(Map("data" -> relu4_2 , "num_filter" -> 512,
"pad" -> "(1,1)", "kernel" -> "(3,3)", "stride" -> "(1,1)",
"no_bias" -> false, "workspace" -> 1024))
val relu4_3 = Symbol.Activation("relu4_3")(Map("data" -> conv4_3 , "act_type" -> "relu"))
val conv4_4 = Symbol.Convolution("conv4_4")(Map("data" -> relu4_3 , "num_filter" -> 512,
"pad" -> "(1,1)", "kernel" -> "(3,3)", "stride" -> "(1,1)",
"no_bias" -> false, "workspace" -> 1024))
val relu4_4 = Symbol.Activation("relu4_4")(Map("data" -> conv4_4 , "act_type" -> "relu"))
val pool4 = Symbol.Pooling("pool4")(Map("data" -> relu4_4 , "pad" -> "(0,0)",
"kernel" -> "(2,2)", "stride" -> "(2,2)", "pool_type" -> "avg"))
val conv5_1 = Symbol.Convolution("conv5_1")(Map("data" -> pool4 , "num_filter" -> 512,
"pad" -> "(1,1)", "kernel" -> "(3,3)", "stride" -> "(1,1)",
"no_bias" -> false, "workspace" -> 1024))
val relu5_1 = Symbol.Activation("relu5_1")(Map("data" -> conv5_1 , "act_type" -> "relu"))
// style and content layers
val style = Symbol.Group(relu1_1, relu2_1, relu3_1, relu4_1, relu5_1)
val content = Symbol.Group(relu4_2)
(style, content)
}
def getExecutor(style: Symbol, content: Symbol, modelPath: String,
inputSize: (Int, Int), ctx: Context): ConvExecutor = {
val out = Symbol.Group(style, content)
// make executor
val (argShapes, outputShapes, auxShapes) = out.inferShape(
Map("data" -> Shape(1, 3, inputSize._1, inputSize._2)))
val argNames = out.listArguments()
val argDict = argNames.zip(argShapes.map(NDArray.zeros(_, ctx))).toMap
val gradDict = Map("data" -> argDict("data").copyTo(ctx))
// init with pretrained weight
val pretrained = NDArray.load2Map(modelPath)
argNames.filter(_ != "data").foreach { name =>
val key = s"arg:$name"
if (pretrained.contains(key)) argDict(name).set(pretrained(key))
}
val executor = out.bind(ctx, argDict, gradDict)
val outArray = executor.outputs
ConvExecutor(executor = executor,
data = argDict("data"),
dataGrad = gradDict("data"),
style = outArray.take(outArray.length - 1),
content = outArray(outArray.length - 1),
argDict = argDict)
}
def getModel(modelPath: String, inputSize: (Int, Int), ctx: Context): ConvExecutor = {
val (style, content) = getSymbol()
getExecutor(style, content, modelPath, inputSize, ctx)
}
}
|
Liuxg16/BrainMatrix
|
scalakernel/src/main/java/thu/brainmatrix/lstmSort/ButketIo.scala
|
package thu.brainmatrix.lstmSort
import thu.brainmatrix.{DataBatch, DataIter, NDArray, Shape}
import org.slf4j.LoggerFactory
import scala.io.Source
import scala.util.Random
/**
* @author <NAME>
*/
object ButketIo {
type Text2Id = (String, Map[String, Int]) => Array[Int]
type ReadContent = String => String
def defaultReadContent(path: String): String = {
Source.fromFile(path).mkString
.replaceAll("\n", " <eos> ")
}
def defaultText2Id(sentence: String, theVocab: Map[String, Int]): Array[Int] = {
val words = {
val tmp = sentence.split(" ").filter(_.length() > 0)
for (w <- tmp) yield theVocab(w)
}
words.toArray
}
def defaultGenBuckets(sentences: Array[String], batchSize: Int,
theVocab: Map[String, Int]): List[Int] = {
val lenDict = scala.collection.mutable.Map[Int, Int]()
var maxLen = -1
for (sentence <- sentences) {
val wordsLen = defaultText2Id(sentence, theVocab).length
if (wordsLen > 0) {
if (wordsLen > maxLen) {
maxLen = wordsLen
}
if (lenDict.contains(wordsLen)) {
lenDict(wordsLen) = lenDict(wordsLen) + 1
} else {
lenDict += wordsLen -> 1
}
}
}
var tl = 0
var buckets = List[Int]()
lenDict.foreach {
case (l, n) =>
if (n + tl >= batchSize) {
buckets = buckets :+ l
tl = 0
} else tl += n
}
if (tl > 0) buckets = buckets :+ maxLen
buckets
}
class BucketSentenceIter(
path: String, vocab: Map[String, Int], var buckets: List[Int],
_batchSize: Int, initStates: IndexedSeq[(String, (Int, Int))],
seperateChar: String = " <eos> ", text2Id: Text2Id = defaultText2Id,
readContent: ReadContent = defaultReadContent) extends DataIter {
private val logger = LoggerFactory.getLogger(classOf[BucketSentenceIter])
private val content = readContent(path)
private val sentences = content.split(seperateChar)
// println(sentences.length)
if (buckets.length == 0) {
buckets = defaultGenBuckets(sentences, batchSize, vocab)
}
buckets = buckets.sorted//List(129)
// pre-allocate with the largest bucket for better memory sharing,129
private val defaultBucketKey = (buckets(0) /: buckets.drop(1)) { (max, elem) =>
if (max < elem) elem else max
}
// we just ignore the sentence it is longer than the maximum
// bucket size here
private val data = buckets.indices.map(x => sentences.map(text2Id(_,vocab).map(_.toFloat))).toArray
// val t = sentences.map(text2Id(_,vocab).map(_.toFloat))
//
// for (sentence <- sentences) {
//// println(sentence)
// val ids = text2Id(sentence, vocab)
// if (ids.length > 0) {
// buckets.indices.foreach { idx =>//data(0) is a collection contains many NDArrays,each is a sentence
// if (buckets(idx) >= ids.length) {
// data(idx) = data(idx) :+
// //ids and ++ Array(129-ids.length) to compose a 129 data
// (ids.map(_.toFloat) ++ Array.fill[Float](buckets(idx) - ids.length)(0f))
// }
// }
// }
// }
// Get the size of each bucket, so that we could sample
// uniformly from the bucket
private val bucketSizes = data.map(_.length)
// println("Summary of dataset ==================")
// buckets.zip(bucketSizes).foreach {
// case (bkt, size) => println(s"bucket of len $bkt : $size samples")
// }
// make a random data iteration plan
// truncate each bucket into multiple of batch-size
private var bucketNBatches = Array[Int]()//Array(1024),the length is the same as the length of data
for (i <- data.indices) {
bucketNBatches = bucketNBatches :+ (data(i).length / _batchSize)
data(i) = data(i).take(bucketNBatches(i) * _batchSize)//delete the redundant bucket
}
private val bucketPlan = {//List(0,0,0,...),length=1024
val plan = bucketNBatches.zipWithIndex.map(x => Array.fill[Int](x._1)(x._2)).flatten
Random.shuffle(plan.toList)
}
private val bucketIdxAll = data.map(_.length).toList
.map(l => Random.shuffle((0 until l).toList))//List(2,5,2,6),a random permutation index
private val bucketCurrIdx = data.map(x => 0)
private var dataBuffer = Array[NDArray]()//length is always 1
private var labelBuffer = Array[NDArray]()//length is always 1
for (iBucket <- data.indices) {
dataBuffer = dataBuffer :+ NDArray.zeros(_batchSize, buckets(iBucket))//(_batchSize,129)
labelBuffer = labelBuffer :+ NDArray.zeros(_batchSize, buckets(iBucket))
}
private val _provideData = {
val tmp = Map("data" -> Shape(_batchSize, defaultBucketKey))
tmp ++ initStates.map(x => x._1 -> Shape(x._2._1, x._2._2))
}
private val _provideLabel = Map("softmax_label" -> Shape(_batchSize, defaultBucketKey))//(batchsize,129)
private var iBucket = 0
override def next(): DataBatch = {
val bucketIdx = bucketPlan(iBucket)//the index for choice data slice,always 0
val dataBuf = dataBuffer(bucketIdx)
val iIdx = bucketCurrIdx(bucketIdx)//the idx that has used
val idx = bucketIdxAll(bucketIdx).drop(iIdx).take(_batchSize)
bucketCurrIdx(bucketIdx) = bucketCurrIdx(bucketIdx) + _batchSize
val datas = idx.map(i => data(bucketIdx)(i)).toArray
for (sentence <- datas) {
assert(sentence.length == buckets(bucketIdx))//129
}
// println(dataBuf.shape)
// val a = datas.flatten
// println(a.length)
dataBuf.set(datas.flatten)
val labelBuf = labelBuffer(bucketIdx)
val labels = idx.map(i => data(bucketIdx)(i).sorted).toArray
labelBuf.set(labels.flatten)
iBucket += 1
new DataBatch(IndexedSeq(dataBuf.copy),//NDArray,(_batchSize,129)
IndexedSeq(labelBuf.copy),//NDArray,(_batchSize,129)
getIndex(),
getPad())
}
/**
* reset the iterator
*/
override def reset(): Unit = {
iBucket = 0
bucketCurrIdx.indices.map(i => bucketCurrIdx(i) = 0)
}
override def batchSize: Int = _batchSize
/**
* get data of current batch
* @return the data of current batch
*/
override def getData(): IndexedSeq[NDArray] = IndexedSeq(dataBuffer(bucketPlan(iBucket)))
/**
* Get label of current batch
* @return the label of current batch
*/
override def getLabel(): IndexedSeq[NDArray] = IndexedSeq(labelBuffer(bucketPlan(iBucket)))
/**
* the index of current batch
* @return
*/
override def getIndex(): IndexedSeq[Long] = IndexedSeq[Long]()
// The name and shape of label provided by this iterator
override def provideLabel: Map[String, Shape] = this._provideLabel
/**
* get the number of padding examples
* in current batch
* @return number of padding examples in current batch
*/
override def getPad(): Int = 0
// The name and shape of data provided by this iterator
override def provideData: Map[String, Shape] = this._provideData
override def hasNext: Boolean = {
if (iBucket < bucketPlan.length) true else false
}
}
}
|
Liuxg16/BrainMatrix
|
scalakernel/src/main/java/thu/brainmatrix/synapse_symbol/Module.scala
|
package thu.brainmatrix.synapse_symbol
import thu.brainmatrix.NDArray
import thu.brainmatrix.Symbol
abstract class Module {
var variable_table:Array[String]
var variableindices:Array[Int]
def getSymbol():Symbol = {
null
}
def getInitialY():Array[NDArray] = {
Array[NDArray]()
}
def getInitialVar():Array[String] = {
Array[String]()
}
def getInitial(map : Map[String,NDArray]=null): Map[String,NDArray] = {
Map[String,NDArray]()
}
def getSymbolMap():Map[String,NDArray] = {
Map[String,NDArray]()
}
def setIndices(indices:Array[Int]){
this.variableindices=indices;
}
def setIndices(startIndex:Int){
var index = startIndex;
val numvariables=this.variable_table.length;
this.variableindices = Array.fill[Int](numvariables)(0)
for(i <- 0 until numvariables){
this.variableindices(i) = index;
index = index + 1
}
}
def getVarIndices():Array[Int] = {
this.variableindices;
}
def getVarNumber():Int = {
this.variable_table.length;
}
def getVarsName():Array[String] = {
this.variable_table;
}
/**
* @param name
* @return >-1, the index; -1 means null
*/
def getResindex(name:String):Int = {
var res = -1;
for(i <- 0 until this.variable_table.length){
if(name.equals(this.variable_table(i)))
res = this.variableindices(i);
}
return res;
}
// t: not the variable time, but the one-hot encode of time
def update(t_onehot: Symbol, y:Array[Symbol],yDot:Array[ Symbol],indices:Array[Int]):Array[Symbol] = {
Array.fill[Symbol](y.length)(null)
}
}
|
Liuxg16/BrainMatrix
|
scalakernel/src/main/java/thu/brainmatrix/util/RK4.scala
|
<reponame>Liuxg16/BrainMatrix
package thu.brainmatrix.util
import scala.collection.mutable.ArrayBuffer
import thu.brainmatrix.NDArray
import thu.brainmatrix.Shape
import thu.brainmatrix.Context
//import util.Draw
//import util.ArrOps
/**
* @Author: <NAME>
* Runge-Kutta method
* functions: the functions:
* f(t,dy0,y1,y2,y3,...) = dy0,dy1,dy2,dy3,... each element is a vector, the shapes of every element are the same
* RKM4.solve([y1_0,y2_0,y3_0,...],stepSize,endStep)
* the length of return of function equals to the numbers of parameters -1. In other words, y.length == dy.length
* note: There is one difference with the class RKM4, its parameter is only a function!
*/
class RK4(val function:((NDArray,Array[NDArray]) => Array[NDArray])) {
var ytmp = Array[NDArray]()
/**
* num_parallel
* num_funcions
* inputs:
* t0: 1 * num_parallel
* y_init: num_functions [1 * num_parallel]
* a array from y(0)-y(n), y(i) is a vector for parallel
* h : 1 * num_parallel
* Num_Step
*
* note: not very precise, becuase it only integrate to k*delta, which is a little smaller than n
* returns:
* 1: times => record of the step each episode
* 2: record of the y(0)-y(n), y(i) is matrix ,each line restores the episode of y(i)
*/
def solve(t0:NDArray, y_init:Array[NDArray],h:NDArray,Num_Step:Int)(ctx:Context=Context.cpu()) :(NDArray,Array[NDArray])= {
/**
* ts Num_Step * num_parallel
* res: num_functions * [Num_Step * num_parallel]
*/
this.ytmp = y_init.map(_.copy())
var yt = y_init;
var t = t0
var delta = h
val ts = NDArray.zeros(Shape(Num_Step,t0.shape(1)),ctx)
val res = Array.fill(y_init.length)(NDArray.zeros(Shape(Num_Step,t0.shape(1)), ctx))
var step = 0
while(step < Num_Step){
println("step:"+step)
t.copyTo(ts.slice(step))
// println("lemonman")
this.right_calculate(yt, t, delta)
// println("lemonman")
yt.indices.foreach {i => yt(i).copyTo(res(i).slice(step)) }
t += h
step += 1
}
this.ytmp.foreach {_.dispose()}
yt.foreach {_.dispose()}
delta.dispose()
t.dispose()
(ts,res)
}
/**
* t: 1 * num_parallel
* y: num_functions * [1 * num_parallel]
* delta: 1 * num_parallel
*/
def right_calculate(y:Array[NDArray],t:NDArray,delta:NDArray){
/**
* The fourth-order Runge-Kutta method requires four evaluations of the
* right-hand side per step h
*/
val delta05 = delta*0.5f
val function = this.function
// println("lemonman1")
val k1 = function(t,y).map(_ * delta).toArray
// println("lemonman1")
this.ytmp zip y foreach{case(yt,y1) => {y1.copyTo(yt)}}
ytmp.indices.foreach(i =>{ ytmp(i) += (delta05)*k1(i)})
t += delta05
val k2 = function(t,ytmp).map(_ * delta).toArray
this.ytmp zip y foreach{case(yt,y1) => {y1.copyTo(yt)}}
ytmp.indices.foreach(i =>{ ytmp(i) += (delta05) * k2(i)})
val k3 = function(t,ytmp).map(_ * delta).toArray
t += delta05
this.ytmp zip y foreach{case(yt,y1) => {y1.copyTo(yt)}}
ytmp.indices.foreach(i =>{ ytmp(i) += delta * k3(i)})
val k4 = function(t,ytmp).map(_ * delta).toArray
t -= delta
y.indices.foreach(i => {y(i) += (k1(i)+k2(i)*2f+ k3(i)*2 +k4(i))/6f})
delta05.dispose()
k1.foreach { _.dispose()}
k2.foreach { _.dispose()}
k3.foreach { _.dispose()}
k4.foreach { _.dispose()}
// y .foreach { _.dispose()}
// y
}
}
object RK4{
//sketch a cirlce
def main(args:Array[String]){
def xdot(t:NDArray,y:Array[NDArray]) = NDArray.cos(t)
def ydot(t:NDArray,y:Array[NDArray]) = -y(0) //cos
type f_Norm = (Double, Array[Double]) => Double
def f(t:NDArray,y:Array[NDArray]) = Array(xdot(t,y),ydot(t,y))
//
val ctx = Context.gpu();
val rkm4 = new RK4(f)
val num_funs = 2
val num_parallel = 10
val y0_0 = NDArray.array(Array(0f,1f,0f,1f,0f,1f,0f,1f,0f,1f), Shape(num_parallel,1), ctx)
val y0_1 = NDArray.array(Array(1f,0f,1f,0f,1f,0f,1f,0f,1f,0f), Shape(num_parallel,1), ctx)
val h = NDArray.ones(Shape(num_parallel,1),ctx) * (0.01 * 2* math.Pi).toFloat
val t0 = NDArray.zeros(Shape(num_parallel,1),ctx)
val y_init = Array(y0_0,y0_1)
val (t,y) = rkm4.solve(t0, y_init,h ,100)(ctx)
val rest = NDArray.transpose(t).slice(0).toArray
val resx = NDArray.transpose(y(0)).slice(0).toArray
val resy = NDArray.transpose(y(1)).slice(0).toArray
val draw = new Draw()
// draw.add_line(res,resx)
draw.add_line(resx,resy)
draw.draw()
}
}
|
Liuxg16/BrainMatrix
|
scalakernel/src/main/java/thu/brainmatrix/Context.scala
|
package thu.brainmatrix
object Context {
val devtype2str = Map(1 -> "cpu", 2 -> "gpu", 3 -> "cpu_pinned")
val devstr2type = Map("cpu" -> 1, "gpu" -> 2, "cpu_pinned" -> 3)
private var _defaultCtx = new Context("cpu", 0)
def defaultCtx: Context = _defaultCtx
def cpu(deviceId: Int = 0): Context = {
new Context("cpu", deviceId)
}
def gpu(deviceId: Int = 0): Context = {
new Context("gpu", deviceId)
}
implicit def ctx2Array(ctx: Context): Array[Context] = Array(ctx)
}
/**
* Constructing a context.
* @author <NAME>
* @param deviceTypeName {'cpu', 'gpu'} String representing the device type
* @param deviceId (default=0) The device id of the device, needed for GPU
*/
class Context(deviceTypeName: String, val deviceId: Int = 0) extends Serializable {
val deviceTypeid: Int = Context.devstr2type(deviceTypeName)
def this(context: Context) = {
this(context.deviceType, context.deviceId)
}
def withScope[T](body: => T): T = {
val oldDefaultCtx = Context.defaultCtx
Context._defaultCtx = this
try {
body
} finally {
Context._defaultCtx = oldDefaultCtx
}
}
/**
* Return device type of current context.
* @return device_type
*/
def deviceType: String = Context.devtype2str(deviceTypeid)
override def toString: String = {
s"$deviceType($deviceId)"
}
}
|
Liuxg16/BrainMatrix
|
scalakernel/src/main/java/thu/brainmatrix/synapse_symbol/Engine.scala
|
package thu.brainmatrix.synapse_symbol
import thu.brainmatrix.util.RK4
import scala.util.parsing.json._
import thu.brainmatrix.Symbol
import thu.brainmatrix.Visualization
import thu.brainmatrix.Executor
import thu.brainmatrix.NDArray
import thu.brainmatrix.Symbol
import thu.brainmatrix.Context
import thu.brainmatrix.Shape
class Engine(ctx:Context = Context.defaultCtx,val model:Model) {
var executor:Executor = null
var executor1:Executor = null
val t_onehot :NDArray = NDArray.zeros(Shape(Config.NUMBER,Config.SPIKENUM), ctx)
var in_args = Map[String, NDArray]()
def run(t0:NDArray, y0:Array[NDArray], h:NDArray, stepSize:Int):(NDArray,Array[NDArray]) = {
val rk4 = new RK4(functions)
val (t, y) = rk4.solve(t0, y0, h, stepSize)(ctx)
(t,y)
}
def build(module:Module = null) {
this.in_args = Map("t_onehot"->t_onehot) ++ this.model.symbolMap ++ this.model.getInitialMap() ++ Config.MAP
// model.update().listArguments().foreach{z => println(in_args(z).shape)}
// in_args.keySet.foreach {println}
val arg_grad_store = Map("t_onehot"->NDArray.zeros(Shape(1), ctx))
this.executor = this.model.update().easy_bind(ctx,in_args, arg_grad_store)
if(module !=null)
this.executor1 = module.getSymbol().easy_bind(ctx,in_args, arg_grad_store)
}
def plot(){
val netName = "synapse_net"
val sym = this.model.update()
val in_args = Map("t_onehot"->t_onehot) ++ this.model.symbolMap ++ this.model.getInitialMap() ++ Config.MAP
val shape_init = in_args.map(arg => (arg._1,arg._2.shape))
val dot = Visualization.plotNetwork(symbol = sym,
title =netName , shape = shape_init,
nodeAttrs = Map("shape" -> "rect", "fixedsize" -> "false"))
dot.render(engine = "dot", format = "pdf", fileName = netName, path = "output/")
}
def functions(t: NDArray,y:Array[NDArray]):Array[NDArray] ={
val t_1 = NDArray.array(t.toArray,Shape(Config.NUMBER),ctx)
NDArray.onehotEncode(t_1, t_onehot)
this.model.getInitialY() zip y map{case (a,b) =>{
b.copyTo(a)
}}
// this.executor1.forward()
// println(this.executor1.outputs(0))
this.executor.forward()
this.executor.outputs
}
def dispose(){
this.in_args.values.foreach { x => x.dispose() }
this.t_onehot.dispose()
this.executor.dispose()
}
}
|
Liuxg16/BrainMatrix
|
scala-package/core/src/main/scala/ml/dmlc/mxnet/io/MXDataIter.scala
|
package ml.dmlc.mxnet.io
import ml.dmlc.mxnet.Base._
import ml.dmlc.mxnet.{DataPack, DataBatch, DataIter, NDArray, Shape}
import ml.dmlc.mxnet.IO._
import org.slf4j.LoggerFactory
import scala.collection.mutable.ListBuffer
/**
* DataIter built in MXNet.
* @param handle the handle to the underlying C++ Data Iterator
*/
// scalastyle:off finalize
class MXDataIter private[mxnet](private[mxnet] val handle: DataIterHandle,
private val dataName: String = "data",
private val labelName: String = "label") extends DataIter {
private val logger = LoggerFactory.getLogger(classOf[MXDataIter])
// use currentBatch to implement hasNext
// (may be this is not the best way to do this work,
// fix me if any better way found)
private var currentBatch: DataBatch = null
private val (_provideData: Map[String, Shape],
_provideLabel: Map[String, Shape],
_batchSize: Int) =
if (hasNext) {
iterNext()
val data = currentBatch.data(0)
val label = currentBatch.label(0)
// properties
val res = (Map(dataName -> data.shape), Map(labelName -> label.shape), data.shape(0))
currentBatch.dispose()
reset()
res
} else {
(null, null, 0)
}
private var disposed = false
override protected def finalize(): Unit = {
dispose()
}
/**
* Release the native memory.
* The object shall never be used after it is disposed.
*/
def dispose(): Unit = {
if (!disposed) {
_LIB.mxDataIterFree(handle)
disposed = true
}
}
/**
* reset the iterator
*/
override def reset(): Unit = {
currentBatch = null
checkCall(_LIB.mxDataIterBeforeFirst(handle))
}
@throws(classOf[NoSuchElementException])
override def next(): DataBatch = {
if (currentBatch == null) {
iterNext()
}
if (currentBatch != null) {
val batch = currentBatch
currentBatch = null
batch
} else {
throw new NoSuchElementException
}
}
/**
* Iterate to next batch
* @return whether the move is successful
*/
private def iterNext(): Boolean = {
val next = new RefInt
checkCall(_LIB.mxDataIterNext(handle, next))
currentBatch = null
if (next.value > 0) {
currentBatch = new DataBatch(data = getData(), label = getLabel(),
index = getIndex(), pad = getPad())
}
next.value > 0
}
/**
* get data of current batch
* @return the data of current batch
*/
override def getData(): IndexedSeq[NDArray] = {
val out = new NDArrayHandleRef
checkCall(_LIB.mxDataIterGetData(handle, out))
IndexedSeq(new NDArray(out.value, writable = false))
}
/**
* Get label of current batch
* @return the label of current batch
*/
override def getLabel(): IndexedSeq[NDArray] = {
val out = new NDArrayHandleRef
checkCall(_LIB.mxDataIterGetLabel(handle, out))
IndexedSeq(new NDArray(out.value, writable = false))
}
/**
* Get the index of current batch
* @return the index of current batch
*/
override def getIndex(): IndexedSeq[Long] = {
val outIndex = new ListBuffer[Long]
val outSize = new RefLong
checkCall(_LIB.mxDataIterGetIndex(handle, outIndex, outSize))
outIndex.toIndexedSeq
}
/**
* get the number of padding examples
* in current batch
* @return number of padding examples in current batch
*/
override def getPad(): MXUint = {
val out = new MXUintRef
checkCall(_LIB.mxDataIterGetPadNum(handle, out))
out.value
}
// The name and shape of data provided by this iterator
override def provideData: Map[String, Shape] = _provideData
// The name and shape of label provided by this iterator
override def provideLabel: Map[String, Shape] = _provideLabel
override def hasNext: Boolean = {
if (currentBatch != null) {
true
} else {
iterNext()
}
}
override def batchSize: Int = _batchSize
}
// scalastyle:on finalize
class MXDataPack(val iterName: String,
val params: Map[String, String]) extends DataPack {
/**
* get data iterator
* @return DataIter
*/
override def iterator: DataIter = {
createIterator(iterName, params)
}
}
|
Liuxg16/BrainMatrix
|
scalakernel/src/main/java/thu/brainmatrix/synapse/Synapse.scala
|
package thu.brainmatrix.synapse
import thu.brainmatrix.NDArray
import thu.brainmatrix.Context
import thu.brainmatrix.Shape
class Synapse(val ctx: Context = Context.defaultCtx) extends Module{
// a basic synapse
override var variable_table = Array[String]("preCa","preCaBuff","aPreCDK","preSensor","aPreTrkB","preNR2B","preAbeta","preMg",
"postCa","postCaBuff","aPostCN","aPostTrkB","qNMDAR")
override var variableindices = Array.fill[Int](this.variable_table.length)(-1)
// connectivity
var axon:Axon = null
var dendrite:Dendrite = null
val onenda = NDArray.ones(Config.SHAPE,ctx)
// parameters
// common constants
var Vs :NDArray = onenda * -50f // threshold for the activation of VGCC
var gK :NDArray = onenda * 1; //
var Vk :NDArray = onenda * -70; // reversal potential for K channel
var Vr :NDArray = onenda * -70; // resting Vm
var Ve :NDArray = onenda * 0; // reversal potential for AMPA channel
var Vca :NDArray = onenda * 30; // reversal potential for Ca flux though NMDAR
var Cm :NDArray = onenda * 10; // membran capacitance
var Mgo :NDArray = onenda * 1.2f; // [Mg]o = 1.2 mM
var kMgNMDAR :NDArray = onenda * 4.5f; // NMDAR Mg block
var kNMDARIn :NDArray = onenda * 0.00004f; // constitutive insertion of NMDAR
var kNMDARCa :NDArray = onenda * -0.2f; // converting Inmda to Ca flux
var CaBasal :NDArray = onenda * 0.001f; // basal Ca influx
// magnesium
var Mg50 :NDArray = onenda * 400f; // 50//
var MgSlope :NDArray = onenda * 50f;
// sensor
var aCDK50 :NDArray = onenda * 0.5f;
var aCDKSlope :NDArray = onenda * 0.1f;
//presynaptic constants
var preVol :NDArray = onenda // volume of presynaptic terminals
var a1 :NDArray = onenda * 0.02f; // k+ of Ca buffer
var b1 :NDArray = onenda * 0.0001f; // k- of Ca buffer
var tPreCaBuffer :NDArray = onenda * 5f; // presynaptic Ca buffer
var kSensorDeg :NDArray = onenda * 0.0001f; // rate of sensor degradation
var a2 :NDArray = onenda * 0.01f // k+ of CDK
var b2 :NDArray = onenda * 0.0001f; // k- of CDK
var aCDKNR2B50 :NDArray = onenda * 0.7f;
var aCDKNR2BSlope :NDArray = onenda * 0.3f;
var a4 :NDArray = onenda * 0.00025f; // k+ of pre TrkB
var b4 :NDArray = onenda * 0.0002f; // k- of pre TrkB
var aPreTrkB50 :NDArray = onenda * 0.4f;
var aPreTrkBSlope :NDArray = onenda * 0.1f;
var kpreNR2BIn :NDArray = onenda * 0.000025f;
var kpreAbetaDeg :NDArray = onenda * 0.0001f;
var kMgIn :NDArray = onenda * 0.004f; // constitive Mg influx (TRPM7...)
var kpreMgOut :NDArray = onenda * 0.00002f; // kMgIn/kMgOut = 0.5
var kBDNFMg :NDArray = onenda * 0.04f; // influence of BDNF to Mg influx
//postsynaptic constants
var postVol :NDArray = onenda * 1f; // volume of postsynaptic spine
var qAMPA :NDArray = onenda * 0.2f; // AMPAR
var kpostCaOut :NDArray = onenda * 0.1f; // Ca extrusion from spine
var CN50 :NDArray = onenda * 0.55f;
var CNSlope :NDArray = onenda * 0.1f;
var TrkB50 :NDArray = onenda * 0.4f;
var TrkBSlope :NDArray = onenda * 0.1f;
var a5 :NDArray = onenda * 0.005f; // k+ of Ca buffer
var b5 :NDArray = onenda * 0.001f; // k- of Ca buffer
var tPostCaBuffer :NDArray = onenda * 1f;
var a6 :NDArray = onenda * 0.005f; // k+ of CN activation
var b6 :NDArray = onenda * 0.001f; // k- of CN
var a7 :NDArray = onenda * 0.0003f; // TrkB activation
var b7 :NDArray = onenda * 0.0002f; // TrkB deactivation
//presynaptic variables
var preCa :NDArray = onenda * 0; // presynaptic [Ca]i
var preCaBuff :NDArray = onenda * 5; // presynaptic [Mg]i
var aPreCDK :NDArray = onenda * 0;
var preSensor :NDArray = onenda * 0.7f; // presynaptic [sensor]
var aPreTrkB :NDArray = onenda * 0;
var preNR2B :NDArray = onenda * 1;
var preAbeta :NDArray = onenda * 1;
var preMg :NDArray = onenda * 400;
//postsynaptic variables
var postCa :NDArray = onenda * 0f;
var postCaBuff :NDArray = onenda * 1f;
var aPostCN :NDArray = onenda * 0f;
var aPostTrkB :NDArray = onenda * 0f;
var qNMDAR :NDArray = onenda * 1f; // qNMDAR
// other variables for communication between compartments
var EPSC :NDArray = onenda * 0f;
override def getInitial():Array[NDArray] = {
Array(preCa,preCaBuff,aPreCDK,preSensor,aPreTrkB,preNR2B,preAbeta,preMg,
postCa,postCaBuff,aPostCN,aPostTrkB,qNMDAR)
}
//cache variable
// var preVm = NDArray.empty(shape,ctx)
// //Calcium NDArray.empty(shape,ctx)
var Ivgcc_arr = Array.fill[Float](Config.NUMBER)(0f)
// var Ivgcc = NDArray.empty(shape,ctx)
// pMgBlock = NDArray.empty(shape,ctx)
// var IpreNR2B = NDArray.empty(shape,ctx)
// var IAChR = NDArray.empty(shape,ctx)
// var preCaIn = NDArray.empty(shape,ctx)
// var fCaBuff = NDArray.empty(shape,ctx)
// var kpreCaOut = NDArray.empty(shape,ctx)
// var d_preCa = NDArray.empty(shape,ctx)
// var d_preCaBuff = NDArray.empty(shape,ctx)
// //CDK NDArray.empty(shape,ctx)
// var d_aPreCDK = NDArray.empty(shape,ctx)
// // sensor: insert NDArray.empty(shape,ctx)
// var freeSensor = NDArray.empty(shape,ctx)
// // freeSensor nee NDArray.empty(shape,ctx)
// var kSensorIn = NDArray.empty(shape,ctx)
// var d_preSensor = NDArray.empty(shape,ctx)
// //BDNF=this.matri NDArray.empty(shape,ctx)
// // right now BDNF NDArray.empty(shape,ctx)
// var BDNF = NDArray.empty(shape,ctx)
// var d_aPreTrkB = NDArray.empty(shape,ctx)
// var kpreNR2BDeg = NDArray.empty(shape,ctx)
// var d_preNR2B = NDArray.empty(shape,ctx)
// var kpreAbetaIn = NDArray.empty(shape,ctx)
// var d_preAbeta = NDArray.empty(shape,ctx)
// var preMgIn = NDArray.empty(shape,ctx)
// var d_preMg = NDArray.empty(shape,ctx)
// var Pr = NDArray.empty(shape,ctx)
// var postVm = NDArray.empty(shape,ctx)
// var Iampa = NDArray.empty(shape,ctx)
// var pMgBlock = NDArray.empty(shape,ctx)
// var Inmda = NDArray.empty(shape,ctx)
// var postCaIn = NDArray.empty(shape,ctx)
// var fpostCaBuff = NDArray.empty(shape,ctx)
// var d_postCa = NDArray.empty(shape,ctx)
// var d_postCaBuff= NDArray.empty(shape,ctx)
// var pCN = NDArray.empty(shape,ctx)
// var d_aPostCN = NDArray.empty(shape,ctx)
// var pTrkB = NDArray.empty(shape,ctx)
// var d_aPostTrkB = NDArray.empty(shape,ctx)
// var kNMDARdeg = NDArray.empty(shape,ctx)
// var d_qNMDAR = NDArray.empty(shape,ctx)
//bak
val preVmbak = NDArray.ones(Config.SHAPE,Context.defaultCtx)
val vsbak = NDArray.ones(Config.SHAPE,ctx)
override def update(t: NDArray,y:Array[NDArray],yDot:Array[NDArray],indices:Array[Int]):Array[NDArray] = {
this.preCa = y(indices(0));
this.preCaBuff = y(indices(1));
this.aPreCDK = y(indices(2));
this.preSensor = y(indices(3));
this.aPreTrkB = y(indices(4));
this.preNR2B = y(indices(5));
this.preAbeta = y(indices(6));
this.preMg = y(indices(7));
this.postCa = y(indices(8));
this.postCaBuff = y(indices(9));
this.aPostCN = y(indices(10));
this.aPostTrkB = y(indices(11));
this.qNMDAR = y(indices(12));
//-------------------------Presyanptic dynamics-----------------------------
this.axon.preVm.copyTo(this.preVmbak)
this.Vs.copyTo(this.vsbak)
// var preVm=this.axon.preVm;
//Calcium
// for(i<-0 until Config.NUMBER){
// if(this.preVmbak(0,i) > this.vsbak(0,i)){
// Ivgcc_arr(i) = 0.05f * (this.preVmbak(0,i)-this.vsbak(0,i)) // simply assume the size of Ivgcc linearly correlated with Vm after it pass V threshold
// }
// }
var Ivgcc = NDArray.array(Ivgcc_arr, Config.SHAPE, ctx)
// pMgBlock = 1/(1+(this.Mgo/this.kMgNMDAR)*Math.exp(-2*preVm/25.4)); // NMDAR Mg block
var IpreNR2B = NDArray.zeros(Config.SHAPE,ctx) //Pr1*preNR2B1*(preVm1 - Vca)*pMgBlock1; // EPSCnmda
var IAChR = NDArray.zeros(Config.SHAPE,ctx);
var preCaIn = IpreNR2B*this.kNMDARCa+Ivgcc+IAChR;
var fCaBuff = this.tPreCaBuffer-this.preCaBuff; // Ca buffer
var kpreCaOut = onenda *(0.1f) / (NDArray.exp((this.Mg50-this.preMg)/this.MgSlope)+1); // Ca efflux is funciton of [Mg]i (Boltzmann sigmoid function)
var d_preCa = (preCaIn+this.b1 * this.preCaBuff - (kpreCaOut+this.a1*fCaBuff)*this.preCa)/this.preVol; // dx/dt = (Jin-Jout)/vol
var d_preCaBuff = this.a1*fCaBuff*this.preCa-this.b1*this.preCaBuff; // presynaptic Ca buffer
//CDK
var d_aPreCDK = this.a2*(-this.aPreCDK+1)*this.preCa-this.b2*this.aPreCDK; // presynaptic CDK activation depends on Ca level
// sensor: insertion of sensor inhibited by Ca depend activation of CDK5
var freeSensor = this.axon.freeSensor;
// freeSensor needs to be shared across synapses
var kSensorIn =(onenda * 0.0001f)/(NDArray.exp((this.aPreCDK-this.aCDK50)/this.aCDKSlope)+1);
var d_preSensor = (kSensorIn*freeSensor-this.kSensorDeg*this.preSensor)/this.preVol;
//BDNF=this.matrix.BDNF
// right now BDNF retrograde signalling is synapse specific
var BDNF = (this.dendrite.postVm-this.Vr)*this.qNMDAR; // retrograde signalling following coinsident detection
// TrkB activation depends on BDNF concentration
var d_aPreTrkB = this.a4*(-this.aPreTrkB+1)*BDNF-this.b4*this.aPreTrkB;
// degradation of presynaptic NR2B by Calpain/CDK5 dependent process
var kpreNR2BDeg = (onenda * 0.0002f)/(NDArray.exp((this.aCDKNR2B50-this.aPreCDK)/this.aCDKNR2BSlope)+1);
var d_preNR2B = (this.kpreNR2BIn - kpreNR2BDeg*this.preNR2B)/this.preVol;
// synthesis of Abeta is inhibited by BDNF
var kpreAbetaIn =(onenda * 0.0001f)/(NDArray.exp((this.aPreTrkB-this.aPreTrkB50)/this.aPreTrkBSlope)+1);
var d_preAbeta = (kpreAbetaIn - this.kpreAbetaDeg*this.preAbeta)/this.preVol;
// Mg
var preMgIn = this.kMgIn+this.kBDNFMg*BDNF; // presyanptic Mg influx = constitive + regrade signalling
var d_preMg = (preMgIn-this.kpreMgOut*this.preMg)/this.preVol;
// ------------------postsynaptic--------------------------
var Pr = this.preCa*this.preSensor; // Pr update probability of release
var postVm = this.dendrite.postVm;
var Iampa = Pr*this.qAMPA*(postVm-this.Ve); // EPSCampa
var pMgBlock = onenda /((this.Mgo/this.kMgNMDAR)*NDArray.exp(postVm*(-2f/25.4f))+1); // NMDAR Mg block
var Inmda = Pr*this.qNMDAR*(postVm - this.Vca)*pMgBlock; // EPSCnmda
this.EPSC = Iampa+Inmda;
//Calcium
var postCaIn = Inmda*this.kNMDARCa+this.CaBasal; // total postsynaptic Ca influx = NMDAR + VGCC
var fpostCaBuff = this.tPostCaBuffer-this.postCaBuff;
var d_postCa = (postCaIn+this.b5*this.postCaBuff - (this.kpostCaOut+this.a5*fpostCaBuff)*this.postCa)/this.postVol;
var d_postCaBuff = this.a5*fpostCaBuff*this.postCa-this.b5*this.postCaBuff; // postsynaptic Ca buffer
// degradation of NMDAR is promoted by CN Calcineurin, which is activated by [Ca], and
// protected by BDNF via activation of Src Kinase.
var pCN = onenda /(NDArray.exp((this.CN50-this.aPostCN)/this.CNSlope)+1);
var d_aPostCN = this.a6*(-this.aPostCN+1)*this.postCa-this.b6*this.aPostCN; // postsynaptic CN activation
var pTrkB = onenda/(NDArray.exp((this.aPostTrkB-this.TrkB50)/this.TrkBSlope)+1);
var d_aPostTrkB = this.a7*(-this.aPostTrkB+1)*BDNF-this.b7*this.aPostTrkB;
var kNMDARdeg = pCN*pTrkB*0.005f; // BDNF/CN
var d_qNMDAR = this.kNMDARIn - kNMDARdeg*this.qNMDAR;
yDot(indices(0)) = d_preCa;
yDot(indices(1)) = d_preCaBuff;
yDot(indices(2)) = d_aPreCDK;
yDot(indices(3)) = d_preSensor;
yDot(indices(4)) = d_aPreTrkB;
yDot(indices(5)) = d_preNR2B;
yDot(indices(6)) = d_preAbeta;
yDot(indices(7)) = d_preMg;
yDot(indices(8)) = d_postCa;
yDot(indices(9)) = d_postCaBuff;
yDot(indices(10)) = d_aPostCN;
yDot(indices(11)) = d_aPostTrkB;
yDot(indices(12)) = d_qNMDAR
IpreNR2B dispose()
IAChR dispose()
preCaIn dispose()
fCaBuff dispose()
kpreCaOut dispose()
kSensorIn dispose()
BDNF dispose()
kpreNR2BDeg dispose()
kpreAbetaIn dispose()
preMgIn dispose()
Pr dispose()
Iampa dispose()
pMgBlock dispose()
Inmda dispose()
postCaIn dispose()
fpostCaBuff dispose()
pCN dispose()
pTrkB dispose()
kNMDARdeg dispose()
yDot
}
//connectivity
def getaxon():Axon = {
this.axon;
}
def getdendrite():Dendrite = {
this.dendrite;
}
}
|
Liuxg16/BrainMatrix
|
scalakernel/src/main/java/thu/brainmatrix/char_rnn_symbol/Lstm.scala
|
package thu.brainmatrix.char_rnn_symbol
import thu.brainmatrix.Symbol
import thu.brainmatrix.Shape
import Config._
object Lstm {
/**
* @author liuxianggen
* @date 20160718
* @brief
* @param n_layer
* @param seq_len:the length of sequence
* @param n_layer
* @param n_layer
* @return
* @example
* @note
*/
def LSTM(n_layer:Int,seq_len:Int,dim_hidden:Int,dim_embed:Int,
n_alphabet:Int,dropout:Float=0, output_states:Boolean=false):Symbol = {
val embed_W = Symbol.CreateVariable("_embed_weight")
val pred_W = Symbol.CreateVariable("_pred_weight")
val pred_b = Symbol.CreateVariable("_pred_bias")
var layer_param_states_ = for(i<- 0 until n_layer) yield{
val param = new LSTMParam(Symbol.CreateVariable(s"_l${i}_i2h_weight"),
Symbol.CreateVariable(s"_l${i}_h2h_weight"),
Symbol.CreateVariable(s"_l${i}_i2h_bias"),
Symbol.CreateVariable(s"_l${i}_h2h_bias"))
val state = new LSTMState(Symbol.CreateVariable(s"_l${i}_init_c"),
Symbol.CreateVariable(s"_l${i}_init_h"))
(param,state)
}
val layer_param_states = layer_param_states_.toArray
var data = Symbol.CreateVariable("data")
var label = Symbol.CreateVariable("label")
// val embed = Symbol.Embedding("embed")(Map("data" -> data, "input_dim" -> inputSize,
// "weight" -> embedWeight, "output_dim" -> numEmbed))
val inputs = Symbol.SliceChannel()(Array(data),Map("num_outputs" -> seq_len, "squeeze_axis" -> true))
var dpRatio = 0f
var hiddens = Array[Symbol]()
var hidden: Symbol = null
var input :Symbol = null
for(t<-0 until seq_len){
input = inputs.get(t)
hidden = Symbol.FullyConnected(s"fully_$t")(Map("data"->input,"weight"->embed_W,"no_bias"->"true","num_hidden"->dim_embed))
//stack LSTM cells
for(i<-0 until n_layer){
if (i == 0) dpRatio = 0f else dpRatio = dropout
val (l_param,l_state) = layer_param_states(i)
//val dp = if(i==1) 1 else dropout //not do dropout for input layer
val next_state = lstmCell(s"_lstm_$t",hidden, l_state,l_param, num_hidden=dim_hidden, dropout=dpRatio)
hidden = next_state.h
layer_param_states(i) = (l_param,next_state)
}
if (dropout > 0f) hidden = Symbol.Dropout()(Map("data" -> hidden, "p" -> dropout))
hiddens = hiddens :+ hidden
}
val hiddenConcat = Symbol.Concat()(hiddens, Map("dim" -> 0))
val pred = Symbol.FullyConnected("pred")(Map("data"->hiddenConcat,"weight"->pred_W,"bias"->pred_b,"num_hidden"->n_alphabet))
val label1 = Symbol.Reshape()(Map("data" -> label, "target_shape" -> "(0,)"))
val smax = Symbol.SoftmaxOutput("softmax")(Map("data" -> pred,"label"->label1))
smax
}
//test
def LSTMNet(n_layer:Int,seq_len:Int,dim_hidden:Int,dim_embed:Int,
n_alphabet:Int,dropout:Float=0, output_states:Boolean=false):Symbol = {
val embed_W = Symbol.CreateVariable("_embed_weight")
val pred_W = Symbol.CreateVariable("_pred_weight")
val pred_b = Symbol.CreateVariable("_pred_bias")
var layer_param_states_ = for(i<- 0 until n_layer) yield{
val param = new LSTMParam(Symbol.CreateVariable(s"_l${i}_i2h_weight"),
Symbol.CreateVariable(s"_l${i}_h2h_weight"),
Symbol.CreateVariable(s"_l${i}_i2h_bias"),
Symbol.CreateVariable(s"_l${i}_h2h_bias"))
val state = new LSTMState(Symbol.CreateVariable(s"_l${i}_init_c"),
Symbol.CreateVariable(s"_l${i}_init_h"))
(param,state)
}
val layer_param_states = layer_param_states_.toArray
var outputs = Array[Symbol]()
for(t<-0 until seq_len){
var data_i = Symbol.CreateVariable(s"data_$t")
var label_i = Symbol.CreateVariable(s"label_$t")
var hidden = Symbol.FullyConnected(s"fully_$t")(Map("data"->data_i,"weight"->embed_W,"no_bias"->"true","num_hidden"->dim_embed))
//stack LSTM cells
for(i<-0 until n_layer){
val (l_param,l_state) = layer_param_states(i)
//val dp = if(i==1) 1 else dropout //not do dropout for input layer
val next_state = lstmCell(s"_lstm_$t",hidden, l_state,l_param, num_hidden=dim_hidden, dropout=dropout)
hidden = next_state.h
layer_param_states(i) = (l_param,next_state)
}
val pred = Symbol.FullyConnected(s"_pred_$t")(Map("data"->hidden,"weight"->pred_W,"bias"->pred_b,"num_hidden"->n_alphabet))
val smax = Symbol.SoftmaxOutput(s"_softmax_$t")(Map("data" -> pred,"label"->label_i))
outputs = outputs :+ smax
}
Symbol.Group(outputs: _*)
}
def lstmGenerator(n_layer:Int,seq_len:Int,dim_hidden:Int,dim_embed:Int,
n_alphabet:Int,dropout:Float=0, output_states:Boolean=false):Symbol = {
var layer_param_states_ = for(i<- 0 until n_layer) yield{
val param = new LSTMParam(Symbol.CreateVariable(s"_l${i}_i2h_weight"),
Symbol.CreateVariable(s"_l${i}_h2h_weight"),
Symbol.CreateVariable(s"_l${i}_i2h_bias"),
Symbol.CreateVariable(s"_l${i}_h2h_bias"))
val state = new LSTMState(Symbol.CreateVariable(s"_l${i}_init_c"),
Symbol.CreateVariable(s"_l${i}_init_h"))
(param,state)
}
val layer_param_states = layer_param_states_.toArray
var outputs = Array[Symbol]()
for(t<-0 until seq_len){
var data_i = Symbol.CreateVariable(s"data_$t")
var hidden = Symbol.FullyConnected(s"fully_$t")(Map("data"->data_i,"num_hidden"->dim_embed))
//stack LSTM cells
for(i<-0 until n_layer){
val (l_param,l_state) = layer_param_states(i)
val next_state = lstmCell(s"_lstm_$t",hidden, l_state,l_param, num_hidden=dim_hidden, dropout=dropout)
hidden = next_state.h
layer_param_states(i) = (l_param,next_state)
}
if(dropout>0)
hidden = Symbol.Dropout()(Map("data"->hidden,"p"->dropout))
val pred = Symbol.FullyConnected(s"_pred_$t")(Map("data"->hidden,"num_hidden"->n_alphabet))
val smax = Symbol.SoftmaxOutput(s"_softmax_$t")(Map("data" -> pred))
outputs = outputs :+ smax
}
Symbol.Group(outputs: _*)
}
def lstmCell(name:String,input:Symbol, prev_state:LSTMState, param:LSTMParam,
num_hidden:Int=512, dropout:Float=0):LSTMState = {
var x = {
if (dropout > 0f) Symbol.Dropout()(Map("data" -> input, "p" -> dropout))
else input
}
val i2h = Symbol.FullyConnected(s"${name}_i2h")(Map("data"->x,"weight"->param.i2h_W,"num_hidden"->num_hidden*4,"bias"->param.i2h_b))
val h2h = Symbol.FullyConnected(s"${name}_h2h")(Map("data"->prev_state.h,"weight"->param.h2h_W,"num_hidden"->num_hidden*4,"bias"->param.h2h_b))
val gates = Symbol.SliceChannel(s"${name}_gates")(Array(i2h+h2h),Map("num_outputs"->4))
val in_gate = Symbol.Activation()(Map("data"->gates.get(0),"name" -> "sig_in_gate", "act_type" -> "sigmoid"))
val in_trans = Symbol.Activation()(Map("data"->gates.get(1),"name" -> "sig_in_trans", "act_type" -> "tanh"))
val forget_gate = Symbol.Activation()(Map("data"->gates.get(2),"name" -> "sig_f_gate", "act_type" -> "sigmoid"))
val out_gate = Symbol.Activation()(Map("data"->gates.get(3),"name" -> "sig_out_gate", "act_type" -> "sigmoid"))
val next_c = (forget_gate * prev_state.c) + (in_gate * in_trans)
val next_h = out_gate * Symbol.Activation()(Map("data"->next_c, "act_type"->"tanh"))
// val (a,b,_) = next_c.inferShape(Map("data_1"-> Vector(32,24),"_l0_init_h"->Vector(32,64),"_l0_init_c"->Vector(32,64)))
// val (a1,b1,_) = (next_c).inferShape(Map("data_1"-> Vector(32,24),"_l0_init_c"->Vector(32,64),"_l0_init_h"->Vector(32,64)))
new LSTMState(next_c,next_h)
}
/**
* @author liuxianggen
* @date 20160718
* @brief
* @param n_layer
* @param seq_len:the length of sequence
* @param n_layer
* @param n_layer
* @return
* @example
* @note
*/
def LSTM_forward(n_layer:Int,seq_len:Int,dim_hidden:Int,dim_embed:Int,
n_alphabet:Int,dropout:Float=0, output_states:Boolean=false):Symbol = {
val embed_W = Symbol.CreateVariable("_embed_weight")
val pred_W = Symbol.CreateVariable("_pred_weight")
val pred_b = Symbol.CreateVariable("_pred_bias")
var layer_param_states_ = for(i<- 0 until n_layer) yield{
val param = new LSTMParam(Symbol.CreateVariable(s"_l${i}_i2h_weight"),
Symbol.CreateVariable(s"_l${i}_h2h_weight"),
Symbol.CreateVariable(s"_l${i}_i2h_bias"),
Symbol.CreateVariable(s"_l${i}_h2h_bias"))
val state = new LSTMState(Symbol.CreateVariable(s"_l${i}_init_c"),
Symbol.CreateVariable(s"_l${i}_init_h"))
(param,state)
}
val layer_param_states = layer_param_states_.toArray
var data = Symbol.CreateVariable("data")
var label = Symbol.CreateVariable("label")
// val embed = Symbol.Embedding("embed")(Map("data" -> data, "input_dim" -> inputSize,
// "weight" -> embedWeight, "output_dim" -> numEmbed))
var dpRatio = 0f
var hiddens = Array[Symbol]()
var hidden = Symbol.FullyConnected(s"fully_0")(Map("data"->data,"weight"->embed_W,"no_bias"->"true","num_hidden"->dim_embed))
//stack LSTM cells
for(i<-0 until n_layer){
if (i == 0) dpRatio = 0f else dpRatio = dropout
val (l_param,l_state) = layer_param_states(i)
//val dp = if(i==1) 1 else dropout //not do dropout for input layer
val next_state = lstmCell("_lstm_0",hidden, l_state,l_param, num_hidden=dim_hidden, dropout=dpRatio)
hidden = next_state.h
layer_param_states(i) = (l_param,next_state)
}
if (dropout > 0f) hidden = Symbol.Dropout()(Map("data" -> hidden, "p" -> dropout))
hiddens = hiddens :+ hidden
val hiddenConcat = Symbol.Concat()(hiddens, Map("dim" -> 0))
val pred = Symbol.FullyConnected("pred")(Map("data"->hiddenConcat,"weight"->pred_W,"bias"->pred_b,"num_hidden"->n_alphabet))
val label1 = Symbol.Reshape()(Map("data" -> label, "target_shape" -> "(0,)"))
val smax = Symbol.SoftmaxOutput("softmax")(Map("data" -> pred,"label"->label1))
smax
}
def main(args:Array[String]){
// var layer_param_states = for(i<- 0 until 2) yield{
// val param = new LSTMParam(Symbol.CreateVariable(s"_l${i}_i2h_weight"),
// Symbol.CreateVariable(s"_l${i}_h2h_weight"),
// Symbol.CreateVariable(s"_l${i}_i2h_bias"),
// Symbol.CreateVariable(s"_l${i}_h2h_bias"))
// val state = new LSTMState(Symbol.CreateVariable(s"_l${i}_init_c"),
// Symbol.CreateVariable(s"_l${i}_init_h"))
// (param,state)
// }
//
val train_data_shape_map = (0 until SEQ_LENGTH).map(x => {
(s"data_$x",Shape(BATCH_SIZE,1))
}).toMap
val label_data_shape_map = (0 until SEQ_LENGTH).map(x => {
(s"label_$x",Shape(BATCH_SIZE))
}).toMap
val init_state_map = Map("_l0_init_h"->Shape(16,64),"_l0_init_c"->Shape(16,64),"_l1_init_h"->Shape(16,64),"_l1_init_c"->Shape(16,64))
val input_shape = train_data_shape_map ++ label_data_shape_map ++ init_state_map
//
val lstm = Lstm.LSTMNet(LSTM_N_LAYER, SEQ_LENGTH, DIM_HIDDEN, DIM_EMBED, 87, DROPOUT)
lstm.listArguments().foreach(println)
val (a,b,_) = lstm.inferShape(input_shape)
println("**********LSTM**************")
// b.foreach {println}
// var data_i = Symbol.CreateVariable(s"data_1")
// var label_i = Symbol.CreateVariable(s"label_1")
// val h = Symbol.CreateVariable("h")
//
//
// var i2h_W = Symbol.CreateVariable("i2h_W")
// var h2h_W = Symbol.CreateVariable("h2h_W")
// var h2h_b = Symbol.CreateVariable("h2h_b")
// var i2h_b = Symbol.CreateVariable("i2h_b")
//
// var x = data_i
// val name = "1"
// val num_hidden = 64
// val i2h = Symbol.FullyConnected(s"${name}_i2h")(Map("data"->x,"weight"->i2h_W,"num_hidden"->num_hidden*4,"bias"->i2h_b))
// val h2h = Symbol.FullyConnected(s"${name}_h2h")(Map("data"->h,"weight"->h2h_W,"num_hidden"->num_hidden*4,"bias"->h2h_b))
//// val (a,b,_) = i2h.inferShape(Map("data_12"-> Vector(32,24)))
//// b.foreach(println)
// val gates = Symbol.SliceChannel(s"${name}_gates")(Array(i2h+h2h),Map("num_outputs"->4))
// val in_gate = Symbol.Activation()(Map("data"->gates.get(0),"name" -> "sig_in_gate", "act_type" -> "sigmoid"))
// val in_trans = Symbol.Activation()(Map("data"->gates.get(1),"name" -> "sig_in_trans", "act_type" -> "tanh"))
// val forget_gate = Symbol.Activation()(Map("data"->gates.get(2),"name" -> "sig_f_gate", "act_type" -> "sigmoid"))
// val out_gate = Symbol.Activation()(Map("data"->gates.get(3),"name" -> "sig_out_gate", "act_type" -> "sigmoid"))
// val fg = Symbol.FullyConnected(s"${name}_h22h")(Map("data"->h2h,"num_hidden"->64))
// val c = Symbol.CreateVariable("c")
// val next_c = (forget_gate *c )
// println(next_c.debug())
// val (a,b,_) = (next_c).inferShape(Map("data_1"-> Vector(32,24),"c"->Vector(32,64),"h"->Vector(32,64)))
// b.foreach {println}
//
// val c = lstmCell(s"_lstm_1",data_i, l_state,l_param, num_hidden=64, dropout=0)
}
}
class LSTMState(val c:Symbol,val h:Symbol)
class LSTMParam(val i2h_W:Symbol,val h2h_W:Symbol,val i2h_b:Symbol,val h2h_b:Symbol)
class LSTMFEDState(val input:Symbol)
|
Liuxg16/BrainMatrix
|
scalakernel/src/test/java/thu/brainmatrix/suite/kvstoreTest.scala
|
package thu.brainmatrix.suite
import thu.brainmatrix.Base._
import thu.brainmatrix.KVStore
import thu.brainmatrix.MXKVStoreUpdater
import thu.brainmatrix.NDArray
import thu.brainmatrix.Shape
import scala.Vector
object kvstoreTest {
def main(args:Array[String]){
test1
// val kv = KVStore.create()
// val shape:Shape =Vector(2,3,4)
// val shape1 = Vector(1,2)
//// val shape1 = Shape (3,4,5)
// val ndArray = NDArray.ones(shape)
// val ndArray1 = NDArray.ones(1,2)*4
//
// val keys = Array(2,3)
// val values = Array(ndArray,ndArray1)
// kv.init(keys, values)
// kv.pull(2, ndArray)
//// kv.init(2, NDArray.ones(shape1))
// kv.pull(3, ndArray1)
// println(kv.numWorkers)
}
def test {
val kv = KVStore.create()
val shape = Shape(2, 1)
val ndArray = NDArray.zeros(shape)
// kv.init(3, NDArray.ones(shape))
kv.push(3, NDArray.ones(shape) * 4)
kv.pull(3, ndArray)
// println(ndArray.toString)
// assert(ndArray.toArray === Array(4f, 4f))
}
def test1{
val kv = KVStore.create()
val updater = new MXKVStoreUpdater {
override def update(key: Int, input: NDArray, stored: NDArray): Unit = {
// scalastyle:off println
// println(s"update on key $key")
// scalastyle:on println
stored += input * 2
}
override def dispose(): Unit = {}
}
kv.setUpdater(updater)
val shape = Shape(2, 2)
val ndArray = NDArray.zeros(shape)
kv.init(3, NDArray.ones(shape) * 4)
kv.pull(3, ndArray)
println(ndArray)
// assert(ndArray.toArray === Array(4f, 4f))
kv.push(3, NDArray.ones(shape))
kv.pull(3, ndArray)
println(ndArray)
kv.push(3, NDArray.ones(shape))
kv.pull(3, ndArray)
println(ndArray)
// kv.finalize()
}
def test2{
val kv = KVStore.create("local")
}
def test3{
val kv = KVStore.create("local")
// assert(kv.numWorkers === 1)
// assert(kv.rank === 0)
}
}
|
Liuxg16/BrainMatrix
|
scalakernel/src/test/java/thu/brainmatrix/suite/OPTest.scala
|
//import
package thu.brainmatrix.suite
import thu.brainmatrix.OperatorProperty
import thu.brainmatrix.OperatorProperty
/***
* by liuxianggen
* 2016-3-9
* brief to test the functions in OperatorProperty class
*/
object OPTest {
def main(args:Array[String]){
println("<--------------TEST OperatorProperty----------------->")
// mapTest
// ClassTest
// InitTest
// ListArgsTest
// copyTest
// ListAuxsTest
NumOutputTest
}
/**
* 2016-3-20:Failed to write core dump. Core dumps have been disabled
* solved use : OperatorProperty *op = static_cast<OperatorProperty*>(opHandle);
* to replace :
* OperatorPropertyReg *e = static_cast<OperatorPropertyReg *>(creator);
* op = e->body();
*/
def ListArgsTest{
val op = OperatorProperty("FullyConnected")//FullyConnected,Activation
op.ListArguments.foreach {println}
}
def ListAuxsTest{
val op = OperatorProperty("FullyConnected")//FullyConnected,Activation
op.ListAuxiliaryStates().foreach {println}
}
//by liuxianggen
//2016-3-9
def InitTest(){
val keys = Array("act_type")
val values = Array("relu")
val op = OperatorProperty("Activation")//FullyConnected,Activation
op.Init(keys,values)
op.printParam()
}
/**
* 2016-3-20:Failed to write core dump. Core dumps have been disabled
* solved use : OperatorProperty *op = static_cast<OperatorProperty*>(opHandle);
* to replace :
* OperatorPropertyReg *e = static_cast<OperatorPropertyReg *>(creator);
* op = e->body();
*/
def NumOutputTest{
val op = OperatorProperty("FullyConnected")//FullyConnected,Activation
println(op.NumVisibleOutputs())
}
//by liuxianggen
//2016-3-9
def ClassTest(){
var keys = Array("act_type")
var values = Array("relu")
var op = OperatorProperty("Activation")//FullyConnected,Activation
op.Init(keys,values)
op.printParam()
val keys1 = Array("act_type")
val values1 = Array("sigmoid")
val op1 = OperatorProperty("Activation")//FullyConnected,Activation
op1.Init(keys1,values1)
op1.printParam()
}
/**
* 2016-3-15
* by liuxianggen
*/
// def SymbolBaseTest(){
// var node_order:Vector[NodeRef] = Vector()
// val oprf = new OperatorPropertyRef
// val node = new Node(oprf)
// val noderef = new NodeRef
// node_order :+= noderef
//// node.inputs :+= new DataEntry(noderef,2)
// println(node_order.size)
// }
//
/**
* 2016-3-15
* by liuxianggen
*
*/
def mapTest{
var map1 = Map("a"-> 1,"b"-> 3)
var map2 = map1
map2 += "c"-> 4
println(map1)
println(map2)
}
/**
* 2016-3-23
* by liuxianggen
* succeed!
*/
def copyTest{
val op = OperatorProperty("FullyConnected")
val op_1 = op.Copy()
op_1.ListArguments.foreach(println)
}
}
|
Liuxg16/BrainMatrix
|
scalakernel/src/test/java/thu/brainmatrix/suite/TestModelParallel.scala
|
//import thu.brainmatrix.Symbol
//import thu.brainmatrix.AttrScope
//import thu.brainmatrix.Context
//import thu.brainmatrix.NDArray
//import thu.brainmatrix.Base._
//
//
object ModelParallel {
//
// def main(args:Array[String]){
// testChain
//// testChain_simple
}
//
//
// def testChain {
// val n = 2
//
// val data1 = Symbol.Variable("data1")
// val data2 = Symbol.Variable("data2")
// println("-------------------------------------------")
// var net: Symbol = null
// new AttrScope(Map("ctx_group" -> "dev1")).withScope {
// println("here!!")
// net = (data1 + data2) * 3
// println("here!!")
// }
// new AttrScope(Map("ctx_group" -> "dev2")).withScope {
// net = net + data1
// }
// println("-------------------------------------------")
// val shape:Shape = Vector(4, 5)
// val (arr, arrGrad) =
// new Context(Context.cpu(1)).withScope {
// val arr = (0 until n).map(_ => NDArray.ones(shape))
// val arrGrad = (0 until n).map(_ => NDArray.empty(shape))
// (arr, arrGrad)
// }
//
// println("-------------------------------------------")
// val exec1 = net.bind(Context.cpu(),
// args = arr,
// argsGrad = arrGrad,
// gradReq = "write",
// auxStates = Nil,
// group2ctx = Map("dev1" -> Context.cpu(0), "dev2" -> Context.cpu(1)))
//
// println("-------------------------------------------")
// print(exec1.debugStr)
// println("-------------------------------------------")
//
//// arr(0).set(1f)
//// arr(1).set(2f)
//
// val arr2 = arr.map(_.copyTo(Context.cpu()))
// val arrGrad2 = arrGrad.map(_.copyTo(Context.cpu()))
//
//
// val exec2 = net.bind(Context.cpu(), args = arr2, argsGrad = arrGrad2)
//
// // Show the execution plan that involves copynode
// // scalastyle:off println
// print(exec1.debugStr)
// // scalastyle:on println
//
// exec1.forward()
// exec2.forward()
// println(exec1.outputs(0))
// println(exec2.outputs(0))
//
//
// val outGrad = NDArray.ones(shape, Context.cpu(1))
// exec1.backward(Array(outGrad))
// exec2.backward(Array(outGrad.copyTo(Context.cpu())))
//
// (arrGrad zip arrGrad2) foreach { case (a, b) =>
//// assert(reldiff(a, b) < 1e-6f)
// println(a-b)
// }
// }
//
//
// def testChain_simple{
// val n = 2
// val data1 = Symbol.Variable("data1")
// val data2 = Symbol.Variable("data2")
// var net: Symbol = null
// net = data1 + (data2 * 3)
//// net = net + data1
// val shape:Shape = Vector(4, 5)
//
// val (arr, arrGrad) =
// new Context(Context.cpu(0)).withScope {
// val arr = (0 until n).map(_ => NDArray.ones(shape))
// val arrGrad = (0 until n).map(_ => NDArray.empty(shape))
// (arr, arrGrad)
// }
//
//
// val exec1 = net.bind(Context.cpu(),
// args = arr,
// argsGrad = arrGrad,
// gradReq = "write",
// auxStates = Nil,
// group2ctx = Map("dev1" -> Context.cpu(0), "dev2" -> Context.cpu(1)))
//
// println("-------------------------------------------")
// print(exec1.debugStr)
// println("-------------------------------------------")
//
//
////
//// val arr2 = arr.map(_.copyTo(Context.cpu()))
//// val arrGrad2 = arrGrad.map(_.copyTo(Context.cpu()))
//// val exec2 = net.bind(Context.cpu(), args = arr2, argsGrad = arrGrad2)
////
//// // Show the execution plan that involves copynode
//// // scalastyle:off println
//// print(exec1.debugStr)
//// // scalastyle:on println
////
//// exec1.forward()
//// exec2.forward()
//// println(exec1.outputs(0))
//// println(exec2.outputs(0))
////
////
//// val outGrad = NDArray.ones(shape, Context.cpu(1))
//// exec1.backward(Array(outGrad))
//// exec2.backward(Array(outGrad.copyTo(Context.cpu())))
////
//// (arrGrad zip arrGrad2) foreach { case (a, b) =>
////// assert(reldiff(a, b) < 1e-6f)
//// println(a-b)
//// }
//
// }
//
//
//
//
//
//
//
//
////
//
//
//
//
//
//
//// assert(reldiff(exec1.outputs(0), exec2.outputs(0)) < 1e-6f)
////
//
//// (arrGrad zip arrGrad2) foreach { case (a, b) =>
//// assert(reldiff(a, b) < 1e-6f)
//// }
//// }
//
//}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.