text stringlengths 1 1.05M |
|---|
require 'spec_helper'
describe 'Stage administration using external data' do
def pmid_ex
28501917
end
def reference_text_1
"<NAME>. (2002) Personalised revision of ‘failed’ questions. Discourse Studies 4(4), 411–428."
end
def reference_text_2
"<NAME>, Salcedo & Díaz (2018). Buhos: A web-based systematic literature review management software"
end
def doi_ref_2
"10.1016/j.softx.2018.10.004"
end
def reference_1
Reference.get_by_text(reference_text_1)
end
def reference_2
Reference.get_by_text(reference_text_2)
end
def doi_ex
"10.1007/s00204-017-1980-3"
end
def doi_ref
"10.1177/14614456020040040101"
end
def pre_context
sr_for_report
CanonicalDocument[1].update(:title=>"Using Framework Analysis in nursing research: a worked example.", :doi=>doi_ex, :pmid=>pmid_ex)
Search[1].update(:valid=>true)
create_references(texts: [reference_text_1, reference_text_2],
cd_id:[nil,nil],
record_id:1)
unless ENV["NO_CROSSREF_MOCKUP"]
$db[:crossref_dois].insert(:doi=>doi_ex,:json=>read_fixture("10.1007___s00204-017-1980-3.json"))
$db[:crossref_dois].insert(:doi=>doi_ref,:json=>read_fixture("10.1177___14614456020040040101.json"))
$db[:crossref_dois].insert(:doi=>doi_ref_2, :json=>read_fixture("10.1016___j.softx.2018.10.004.json"))
$db[:crossref_queries].insert(:id=>'32e989d317ea4172766cc80e484dceaebd67dd7a962a15891ad0bd1eef6428af',:json=>read_fixture('32e989d317ea4172766cc80e484dceaebd67dd7a962a15891ad0bd1eef6428af.json'))
$db[:crossref_queries].insert(:id=>'b853d71e3273321a0423a6b4b4ebefb313bfdef4c3d133f219c1e8cb0ef35398',:json=>read_fixture('b853d71e3273321a0423a6b4b4ebefb313bfdef4c3d133f219c1e8cb0ef35398.json'))
end
end
def after_context
SystematicReview[1].delete
$db[:records_references].delete
$db[:records_searches].delete
$db[:records].delete
$db[:bib_references].delete
$db[:canonical_documents].delete
$db[:crossref_dois].delete
$db[:crossref_queries].delete
$db[:searches].delete
end
before(:all) do
RSpec.configure { |c| c.include RSpecMixin }
@temp=configure_empty_sqlite
login_admin
end
context "when /references/search_crossref_by_doi is used with a couple of dois assigned" do
before(:context) do
pre_context
reference_1.update(doi:doi_ref)
reference_2.update(doi:doi_ref_2)
CanonicalDocument.where(doi:[doi_ref,doi_ref_2]).delete
post "/references/search_crossref_by_doi", doi: [doi_ref, doi_ref_2]
end
it "should redirect" do
#p last_response.body
expect(last_response).to be_redirect
end
it "should create a canonical document with correct doi" do
expect(CanonicalDocument.where(doi:doi_ref).count).to eq(1)
expect(CanonicalDocument.where(doi:doi_ref_2).count).to eq(1)
end
after(:context) do
after_context
end
end
context "when add_doi on a valid reference" do
before(:context) do
pre_context
CanonicalDocument.where(:doi=>doi_ref).delete
reference_1.add_doi(doi_ref)
end
let(:cd_assoc) {CanonicalDocument.where(:doi=>doi_ref).first}
it "should create a canonical document with correct information" do
expect(cd_assoc).to be_truthy
end
it "should link reference to canonical document with correct information" do
expect(reference_1.canonical_document_id).to eq(cd_assoc.id)
end
after(:context) do
after_context
end
end
context "when /record/:id/search_crossref is called with a record" do
before(:context) do
pre_context
Record[1].update(title:'Using Framework Analysis')
get "/record/1/search_crossref"
end
it "should show a page including the name of reference" do
expect(last_response.body).to include "Using Framework Analysis"
end
after(:context) do
after_context
end
end
context "when /reference/:id/search_crossref is called with a ref without doi" do
before(:context) do
pre_context
ref_id=reference_1.id
get "/reference/#{ref_id}/search_crossref"
end
it "should show a page including the name of reference" do
expect(last_response.body).to include reference_text_1
end
after(:context) do
after_context
end
end
context "when /reference/:id/search_crossref is called with a ref with a doi" do
before(:context) do
pre_context
CanonicalDocument.where(doi:doi_ref).delete
reference_1.update(doi:doi_ref)
get "/reference/#{reference_1.id}/search_crossref"
end
it "should redirect" do
expect(last_response).to be_redirect
end
it "should create a canonical document with correct doi" do
expect(CanonicalDocument.where(doi:doi_ref).count).to eq(1)
end
after(:context) do
after_context
end
end
context "when /search/:id/records/complete_doi is called" do
before(:context) do
pre_context
$db[:records_references].delete
$db[:bib_references].delete
Record[1].update(:title=>"Using Framework Analysis in nursing research: a worked example", :year=>2013, :author=>"<NAME>. and <NAME>. and <NAME>. and <NAME>.", :canonical_document_id=>1, :doi=>doi_ex)
CanonicalDocument.insert(:title=>'dummy', :year=>0, :doi=>"10.1289/ehp.1307893")
get '/search/1/records/complete_doi'
end
let(:cd_on_ref) {CanonicalDocument.where(:doi=>"10.1289/ehp.1307893").first}
it "should create a correct crossref_integrator on Canonical Document" do
expect(Record[1].canonical_document.crossref_integrator).to be_truthy
end
it "should create references assigned to record" do
expect(Record[1].references.count).to eq(Record[1].canonical_document.crossref_integrator.references.count)
end
it "at least one reference have a doi assigned" do
#$log.info(Record[1].references.all)
expect(Record[1].references.any? {|v| !v[:doi].nil?}).to be_truthy
end
it "asign canonical document to reference with proper doi" do
expect(Reference.where(:doi=>"10.1289/ehp.1307893").first[:canonical_document_id]).to eq(cd_on_ref[:id])
end
after(:context) do
after_context
$db[:crossref_dois].delete
end
end
context "when /canonical_document/:id/search_crossref_references used" do
before(:context) do
pre_context
CanonicalDocument.where(doi:doi_ref).delete
reference_1.update(doi:doi_ref, canonical_document_id:nil)
get '/canonical_document/1/search_crossref_references'
end
let(:cd_assoc) {CanonicalDocument.where(:doi=>doi_ref).first}
it "should create a canonical document with correct information" do
expect(cd_assoc).to be_truthy
end
it "should link reference to canonical document with correct information" do
expect(reference_1.canonical_document_id).to eq(cd_assoc.id)
end
after(:context) do
after_context
end
end
context "when canonical_documents/review/:rev_id/complete_pubmed_pmid is called" do
before(:context) do
pre_context
CanonicalDocument[1].update(pmid:nil)
get '/canonical_documents/review/1/complete_pubmed_pmid'
end
it "should response be ok" do
expect(last_response).to be_redirect
end
it "should pmid be correct" do
expect(CanonicalDocument[1].pmid.to_s).to eq(pmid_ex.to_s)
end
after(:context) do
after_context
end
end
context "when /review/:rev_id/stage/:stage/complete_empty_abstract_scopus is called" do
before(:context) do
pre_context
Scopus_Abstract.insert(:id=>"2-s2.0-85019269575", :doi=>"10.1007/s00204-017-1980-3", :xml=>read_fixture("scopus_ex_1.xml"))
CanonicalDocument[1].update(abstract:nil)
get '/review/1/stage/review_full_text/complete_empty_abstract_scopus'
end
it "should response be ok" do
expect(last_response).to be_redirect
end
it "should include correct abstract on canonical document" do
expect(CanonicalDocument[1].abstract).to include("pioneered in the clinical field,")
end
after(:context) do
$db[:scopus_abstracts].delete
after_context
end
end
context "when /canonical_documents/review/:review_id/complete_abstract_scopus is called" do
before(:context) do
pre_context
Scopus_Abstract.insert(:id=>"2-s2.0-85019269575", :doi=>"10.1007/s00204-017-1980-3", :xml=>read_fixture("scopus_ex_1.xml"))
CanonicalDocument[1].update(abstract:nil)
get '/canonical_documents/review/1/complete_abstract_scopus'
end
it "should response be ok" do
expect(last_response).to be_redirect
end
it "should include correct abstract on canonical document" do
expect(CanonicalDocument[1].abstract).to include("pioneered in the clinical field,")
end
after(:context) do
$db[:scopus_abstracts].delete
after_context
end
end
context "when /canonical_document/:id/search_abstract_scopus is called" do
before(:context) do
pre_context
Scopus_Abstract.insert(:id=>"2-s2.0-85019269575", :doi=>"10.1007/s00204-017-1980-3", :xml=>read_fixture("scopus_ex_1.xml"))
CanonicalDocument[1].update(abstract:nil)
get '/canonical_document/1/search_abstract_scopus'
end
it "should response be ok" do
expect(last_response).to be_redirect
end
it "should include correct abstract on canonical document" do
expect(CanonicalDocument[1].abstract).to include("pioneered in the clinical field,")
end
after(:context) do
$db[:scopus_abstracts].delete
after_context
end
end
context "when /review/:rev_id/stage/:stage/complete_empty_abstract_pubmed is called" do
before(:context) do
pre_context
CanonicalDocument[1].update(abstract:nil)
get '/review/1/stage/review_full_text/complete_empty_abstract_pubmed'
end
it "should response be ok" do
expect(last_response).to be_redirect
end
it "should include correct abstract on canonical document" do
expect(CanonicalDocument[1].abstract).to include("pioneered in the clinical field,")
end
after(:context) do
after_context
end
end
context "when search for crossref references" do
before(:context) do
pre_context
get '/canonical_document/1/search_crossref_references'
end
it {expect(last_response).to be_redirect}
after(:context) do
after_context
end
end
context "when search for crossref data" do
before(:context) do
pre_context
get '/canonical_document/1/get_external_data/crossref'
end
it {expect(last_response).to be_redirect}
after(:context) do
after_context
end
end
context "when search for pubmed data" do
before(:context) do
pre_context
CanonicalDocument[1].update(:pmid=>pmid_ex)
get '/canonical_document/1/get_external_data/pubmed'
end
it {expect(last_response).to be_redirect}
after(:context) do
after_context
end
end
context "when update information of a canonical document using crossref" do
before(:context) do
pre_context
CanonicalDocument[1].update(title:nil, author: nil)
get '/canonical_document/1/update_using_crossref_info'
end
let(:cd) {CanonicalDocument[1]}
let(:cr) {CanonicalDocument[1].crossref_integrator}
it "expect last response to be redirect" do
#$log.info(last_response.body)
expect(last_response).to be_redirect
end
it "should update correct title and author" do
#$log.info(cd)
expect(cd.title).to eq cr.title
expect(cd.author).to eq cr.author
end
after(:context) do
after_context
end
end
context "when /search/:id/references/generate_canonical_doi/:n called using crossref" do
before(:context) do
pre_context
get '/search/1/references/generate_canonical_doi/20'
end
it "expect last response to be redirect" do
expect(last_response).to be_redirect
end
it "should update correct title and author" do
end
after(:context) do
after_context
end
end
end
|
const User = require("./User");
const Role = require("./Role");
module.exports = {
User,
Role
};
|
package cn.edu.sjtu.road;
import org.json.JSONException;
import org.json.JSONObject;
/**
* Created by jason on 5/5/2015.
*/
public class AccelerometerModel {
public String device;
public double longitudinal;
public double transverse;
public long timeUTC;
public double longitude;
public double latitude;
public double x;
public double y;
public double z;
public boolean sent=false;
public JSONObject toJson() throws JSONException {
JSONObject object = new JSONObject();
object.put("device", device);
object.put("timeUTC", timeUTC);
object.put("x", x);
object.put("y", y);
object.put("z", z);
return object;
}
}
|
<filename>kernel-d-log/log-api/src/main/java/cn/stylefeng/roses/kernel/log/api/factory/appender/AuthedLogAppender.java
/*
* Copyright [2020-2030] [https://www.stylefeng.cn]
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Guns采用APACHE LICENSE 2.0开源协议,您在使用过程中,需要注意以下几点:
*
* 1.请不要删除和修改根目录下的LICENSE文件。
* 2.请不要删除和修改Guns源码头部的版权声明。
* 3.请保留源码和相关描述文件的项目出处,作者声明等。
* 4.分发源码时候,请注明软件出处 https://gitee.com/stylefeng/guns
* 5.在修改包名,模块名称,项目代码等时,请注明软件出处 https://gitee.com/stylefeng/guns
* 6.若您的项目无法满足以上几点,可申请商业授权
*/
package cn.stylefeng.roses.kernel.log.api.factory.appender;
import cn.stylefeng.roses.kernel.auth.api.context.LoginContext;
import cn.stylefeng.roses.kernel.auth.api.pojo.login.LoginUser;
import cn.stylefeng.roses.kernel.log.api.pojo.record.LogRecordDTO;
/**
* 日志信息追加,用来追加用户的登录信息
*
* @author fengshuonan
* @date 2020/10/27 17:45
*/
public class AuthedLogAppender {
/**
* 填充token和userId字段
* <p>
* 但是此方法会依赖auth-api模块,所以用这个方法得引入auth模块
*
* @author fengshuonan
* @date 2020/10/27 18:22
*/
public static void appendAuthedHttpLog(LogRecordDTO logRecordDTO) {
// 填充当前登录的用户信息
try {
// 填充登录用户的token
logRecordDTO.setToken(LoginContext.me().getToken());
// 填充登录用户的userId
LoginUser loginUser = LoginContext.me().getLoginUser();
logRecordDTO.setUserId(loginUser.getUserId());
} catch (Exception ignored) {
// 获取不到用户登录信息,就不填充
}
}
}
|
<filename>app/controllers/cages_controller.rb
class CagesController < ApplicationController
def index
redirect_to :action => 'list'
end
# GETs should be safe (see http://www.w3.org/2001/tag/doc/whenToUseGet.html)
verify :method => :post, :only => [ :destroy, :create, :update ],
:redirect_to => { :action => :list }
def list
if params[:cages]
@cages = Cage.find(params[:cages])
else
@cages = Cage.find(:all, :conditions => 'date_destroyed is null', :order => 'name')
end
@show_leave_date_and_reason = false
end
def list_deactivated
@cages = Cage.find(:all, :conditions => 'date_destroyed is not null', :order => 'name')
@show_leave_date_and_reason = false
render :action => 'list'
end
def list_all
@cages = Cage.find(:all, :order => 'name')
@show_leave_date_and_reason = true
render :action => 'list'
end
def change_cages_list
if params[:protocol] && params[:protocol][:id] != ""
cages = Protocol.find(params[:protocol][:id]).cages
elsif params[:room] && params[:room][:id] != ""
cages = Room.find(params[:room][:id]).cages.active
elsif params[:species] && params[:species][:id] != ""
bats = Species.find(params[:species][:id]).bats.active
cages = bats.collect(&:cage).uniq.sort_by{|c| c.name}
elsif params[:user] && params[:user][:id] != ""
cages = User.find(params[:user][:id]).cages.active
elsif params[:option]
if params[:option]=='med'
cages = Cage.sick
elsif params[:option]=='flight'
cages = Cage.find(:all,:conditions=>'date_destroyed is null and flight_cage is true',
:order=>'name')
end
else
cages = []
end
render :partial => 'cage_list',
:locals => {:cage_list => cages,:weighing=>params[:weighing],
:show_leave_date_and_reason=>params[:show_leave_date_and_reason]}
end
def list_by_name
cages = Cage.find(params[:ids], :order => 'name')
render :partial => 'cage_list',
:locals => {:cage_list => cages,:weighing=>params[:weighing],
:show_leave_date_and_reason=>params[:show_leave_date_and_reason]}
end
def list_by_room
cages = Cage.find(params[:ids])
cages = cages.sort_by{|cage| [cage.room.name, cage.name]}
render :partial => 'cage_list',
:locals => {:cage_list => cages,:weighing=>params[:weighing],
:show_leave_date_and_reason=>params[:show_leave_date_and_reason]}
end
def list_by_owner
cages = Cage.find(params[:ids])
cages = cages.sort_by{|cage| [cage.user.name, cage.name]}
render :partial => 'cage_list',
:locals => {:cage_list => cages,:weighing=>params[:weighing],
:show_leave_date_and_reason=>params[:show_leave_date_and_reason]}
end
def list_by_bats
cages = Cage.find(params[:ids], :order => 'user_id, name')
cages = cages.sort_by{|cage| [-cage.bats.count, cage.name]}
render :partial => 'cage_list',
:locals => {:cage_list => cages,:weighing=>params[:weighing],
:show_leave_date_and_reason=>params[:show_leave_date_and_reason]}
end
def list_by_bat_weight
cages = Cage.find(params[:ids], :order => 'user_id, name')
cages = cages.sort_by{|cage| [cage.average_bat_weight, cage.name]}
render :partial => 'cage_list',
:locals => {:cage_list => cages,:weighing=>params[:weighing],
:show_leave_date_and_reason=>params[:show_leave_date_and_reason]}
end
def list_by_weigh_date
cages = Cage.find(params[:ids], :order => 'user_id, name')
cages = cages.sort_by{|cage| [cage.last_weigh_date.to_f, cage.name]}
render :partial => 'cage_list',
:locals => {:cage_list => cages,:weighing=>params[:weighing],
:show_leave_date_and_reason=>params[:show_leave_date_and_reason]}
end
def list_by_flown
cages = Cage.find(params[:ids], :order => 'user_id, name')
cages = cages.sort_by{|cage|
[(cage.last_flown == nil ? 0 : cage.last_flown.to_time.to_f), cage.name]}
render :partial => 'cage_list',
:locals => {:cage_list => cages,:weighing=>params[:weighing],
:show_leave_date_and_reason=>params[:show_leave_date_and_reason]}
end
def list_by_feed_tasks
cages = Cage.find(params[:ids], :order => 'user_id, name')
cages = cages.sort_by{|cage| cage.tasks.feeding_tasks.length}
render :partial => 'cage_list',
:locals => {:cage_list => cages,:weighing=>params[:weighing],
:show_leave_date_and_reason=>params[:show_leave_date_and_reason]}
end
def list_by_flight_cage
cages = Cage.find(params[:ids], :order => 'flight_cage desc, user_id, name')
render :partial => 'cage_list',
:locals => {:cage_list => cages,:weighing=>params[:weighing],
:show_leave_date_and_reason=>params[:show_leave_date_and_reason]}
end
def list_by_med
cages = Cage.find(params[:ids], :order => 'user_id, name')
cages = cages.sort_by{|cage| -cage.current_medical_problems.length}
render :partial => 'cage_list',
:locals => {:cage_list => cages,:weighing=>params[:weighing],
:show_leave_date_and_reason=>params[:show_leave_date_and_reason]}
end
def show
@cage = Cage.find(params[:id])
@tasks = @cage.tasks #should be the list of weighing tasks
@current_bats_cihs = Array.new
for bat in @cage.bats
@current_bats_cihs << bat.cage_in_histories[0]
end
@old_bats_cohs = @cage.cage_out_histories
end
def new
@cage = Cage.new
@deactivating = false
@rooms = Room.find(:all, :order => 'name')
if @rooms.length == 0
flash[:notice] = 'New cages need a room. Create a room before creating a cage.'
redirect_to :controller => 'rooms', :action => :new
end
end
def create
@cage = Cage.new(params[:cage])
@cage.date_destroyed = nil
if @cage.save
flash[:notice] = 'Cage was successfully created.'
redirect_to :controller => 'cages', :action => :show, :id => @cage
else
render :action => 'new'
end
end
def edit
@cage = Cage.find(params[:id])
@deactivating = false
@rooms = Room.find(:all, :order => 'name')
end
def update
@cage = Cage.find(params[:id])
#we don't want the name change propagated on an edit so we remove that from the hash
params[:cage].delete "name"
@deactivating = params[:deactivating]
old_owner_id = @cage.user_id
if @cage.update_attributes(params[:cage])
if old_owner_id != @cage.user_id #owner change requres a new bat changes entry
for bat in @cage.bats
bat_change = BatChange.new
bat_change.date = Date.today
bat_change.bat = bat
bat_change.note = ''
bat_change.user = User.find(session[:person])
bat_change.owner_new_id = @cage.user.id
bat_change.owner_old_id = old_owner_id
bat_change.note = "Owner Change"
bat_change.save
end
end
if @deactivating
for task in @cage.tasks
task.deactivate
end
end
flash[:notice] = 'Cage was successfully updated.'
if params[:redirectme] == 'list'
redirect_to :action => 'list'
else
redirect_to :action => 'show', :id => @cage
end
else
render :action => 'edit'
end
end
def destroy
Cage.find(params[:id]).destroy
redirect_to :action => 'list'
end
def deactivate
@cage = Cage.find(params[:id])
@rooms = Room.find(:all, :order => 'name')
if @cage.bats.length > 0
flash[:notice] = 'Deactivation failed. ' + @cage.name + ' is not empty.'
redirect_to :controller => :bats, :action => :move_bats
elsif @cage.tasks.current.length > 0
flash[:notice] = 'Deactivation failed. ' + @cage.name + ' still has feeding or weighing tasks.'
redirect_to :action => :show, :id => @cage
end
@deactivating = true
end
def reactivate
@cage = Cage.find(params[:id])
@cage.date_destroyed = nil
@cage.save
redirect_to :controller => :cages, :action => :list
end
def choose_cage_to_weigh
if params[:cages]
@cages = Cage.find(params[:cages], :order => "name")
else
@all_cages = Cage.active
@cages = Array.new
for cage in @all_cages
(cage.bats.count > 0) ? @cages << cage : ''
end
end
end
def weigh_cage
@cage = Cage.find(params[:id])
@cages = Cage.find(:all, :conditions => "date_destroyed is null", :order => "name" )
@bats = @cage.bats
end
def move_cage
@cages = Cage.active
end
def choose_room
cage = Cage.find(params[:cage])
rooms = Room.find(:all, :conditions => "id != " + cage.room.id.to_s)
render :partial=>'choose_room', :locals => {:cage=>cage, :rooms=>rooms}
end
def move_cage_summary
cage = Cage.find(params[:cage])
new_room = Room.find(params[:room])
render :partial => 'move_cage_summary', :locals=>{:cage=>cage, :new_room=>new_room}
end
def submit_move_cage
cage = Cage.find(params[:cage])
old_room = Room.find(cage.room)
cage.room = Room.find(params[:room])
cage.save
old_census = Census.find_or_create_by_date_and_room_id(Date.today, old_room)
old_census.tally(-cage.bats.length, Date.today, old_room)
for bat in cage.bats
old_census.bats_removed ? old_census.bats_removed = old_census.bats_removed + bat.band + ' ' : old_census.bats_removed = bat.band + ' '
end
old_census.save
new_census = Census.find_or_create_by_date_and_room_id(Date.today, cage.room)
new_census.tally(cage.bats.length, Date.today, cage.room)
for bat in cage.bats
new_census.bats_added ? new_census.bats_added = new_census.bats_added + bat.band + ' ' : new_census.bats_added = bat.band + ' '
end
new_census.save
for task in cage.tasks
task.room = cage.room
task.save
end
cage.tasks.today.each{|task| TaskCensus.room_swap(cage.room,task)}
flash[:notice] = 'Cage ' + cage.name + ' was moved from ' + old_room.name + ' to ' + cage.room.name
redirect_to :action => 'move_cage'
end
end
|
require 'thread'
module EMRPC
# Sends all the messages to a specified backend
# FIXME: deal with Object's methods gracefully.
class MethodProxy < BlankSlate
EMPTY_ARGS = [ ].freeze
attr_reader :__emrpc_backend
def initialize(backend)
@__emrpc_backend = backend
end
def method_missing(meth, *args, &blk)
@__emrpc_backend.send(meth, *args, &blk)
end
def id
@__emrpc_backend.send(:id)
end
def to_i
@__emrpc_backend.send(:to_i)
end
def to_s
@__emrpc_backend.send(:to_s)
end
def to_str
@__emrpc_backend.send(:to_str)
end
def is_a?(type)
@__emrpc_backend.send(:is_a?, type)
end
#alias :__class__ :class
def class
@__emrpc_backend.send(:class)
end
# Marshalling - just return a backend
def marshal_dump
@__emrpc_backend
end
def marshal_load(data)
initialize(data)
end
def inspect
"#<MethodProxy:0x#{__id__.to_s(16)} remote:#{@__emrpc_backend.send(:inspect)}>"
end
end
end
|
<gh_stars>0
export type ResetTime = number; //millsec
export class RateLimit<K> {
private readonly _queue = new Map<K, [number, ResetTime]>();
constructor(
private readonly limitAndExpire: (
k: K,
now: number
) => Promise<[number, ResetTime]>
) {}
private cf(k: K, nn: number, rt: ResetTime): [boolean, ResetTime] {
if (nn < 0) {
return [false, rt];
}
this._queue.set(k, [nn, rt]);
return [true, rt];
}
public async consume(
k: K,
n = 1,
now = Date.now()
): Promise<[boolean, ResetTime]> {
let v = this._queue.get(k);
if (!v) {
v = await this.limitAndExpire(k, now);
const [cnt, rt] = v;
setTimeout(() => {
const v = this._queue.get(k);
if (v && v[1] === rt) {
this._queue.delete(k);
}
}, rt - now);
const nn = cnt - n;
return this.cf(k, nn, rt);
}
const [cnt, rt] = v;
if (rt < now) {
this._queue.delete(k);
return this.consume(k, n, now);
}
const nn = cnt - n;
return this.cf(k, nn, rt);
}
}
|
<reponame>mscherotter/Microsoft.Mixer.MixPlay<filename>Microsoft.Mixer/LaunchEventArgs.h
#pragma once
namespace Microsoft
{
namespace Mixer
{
namespace MixPlay
{
public ref class LaunchEventArgs sealed
{
public:
LaunchEventArgs(Windows::Foundation::Uri^ uri);
property Windows::Foundation::Uri^ Uri;
};
}
}
}
|
#!/bin/bash
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
calling_dir() {
echo "$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
}
classpath() {
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
for i in `ls $DIR/../lib`
do
if [[ $i != hadoop* ]]
then
CLASSPATH=${CLASSPATH:+${CLASSPATH}:}$DIR/../lib/$i
else
HADOOP_CLASSPATH=${HADOOP_CLASSPATH:+${HADOOP_CLASSPATH}:}$DIR/../lib/$i
fi
done
if [ ! -z "$HADOOP_HOME" ] && [ -f $HADOOP_HOME/bin/hadoop ]
then
HADOOP_CLASSPATH=$($HADOOP_HOME/bin/hadoop classpath)
fi
CLASSPATH=$CLASSPATH:$HADOOP_CLASSPATH
if [ ! -z "$GOBBLIN_ADDITIONAL_JARS" ]
then
CLASSPATH=$GOBBLIN_ADDITIONAL_JARS:$CLASSPATH
fi
echo $CLASSPATH
}
for i in "$@"
do
case "$1" in
"classpath")
classpath
exit
esac
done
CLASSPATH=$(classpath)
if [ -z "$GOBBLIN_LOG4J_CONFIGURATION" ]
then
GOBBLIN_LOG4J_CONFIGURATION=$(calling_dir)/../conf/log4j.properties
fi
java -Dlog4j.configuration=file:$GOBBLIN_LOG4J_CONFIGURATION -cp "$CLASSPATH" $GOBBLIN_OPTS gobblin.runtime.cli.GobblinCli $@
|
<gh_stars>1-10
// Copyright (c) Microsoft Corporation.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
// test <random> C++11 header, part 4
#define TEST_NAME "<random>, part 4"
#include "tdefs.h"
#include <cmath>
#include <random>
#include <sstream>
typedef int Int32;
typedef unsigned int Uint32;
template <class Eng>
struct test_globals { // tests engine global functions
static void test() { // test globals
Eng rng0, rng1, rng2;
(void) rng2();
CHECK(rng0 == rng1);
CHECK(rng0 != rng2);
(void) rng1();
CHECK(rng1 == rng2);
STD stringstream str;
str << rng1;
CHECK(rng1 == rng2);
str >> rng0;
CHECK(rng0 == rng1);
CHECK(rng0() == rng1());
}
};
static void tseed_seq() { // test class seed_seq
Uint32 arr1[5] = {'a', 'b', 'c', 'd', 'e'};
Uint32 arr2[5] = {0};
bool st = STD is_same<STD seed_seq::result_type, Uint32>::value;
CHECK(st);
STD seed_seq seq;
CHECK_INT(seq.size(), 0);
seq.param(&arr1[0]);
CHECK_INT(arr1[0], 'a');
STD seed_seq seq1(&arr1[0], &arr1[5]);
CHECK_INT(seq1.size(), 5);
seq1.param(&arr2[0]);
CHECK_INT(arr2[0], 'a');
CHECK_INT(arr2[4], 'e');
seq.generate(&arr2[0], &arr2[4]);
CHECK_INT(arr2[3], 3895714911U);
seq1.generate(&arr2[0], &arr2[4]);
CHECK_INT(arr2[3], 3734116661U);
STD seed_seq seq2(STD initializer_list<Uint32>(&arr1[0], &arr1[5]));
CHECK_INT(seq2.size(), 5);
seq2.param(&arr2[0]);
CHECK_INT(arr2[0], 'a');
CHECK_INT(arr2[4], 'e');
}
static void tgenerate() { // test generate_canonical
typedef STD ranlux24 rng_t;
rng_t gen1, gen2;
double x = STD generate_canonical<double, 40, rng_t>(gen1);
CHECK(0.0 <= x && x < 1.0);
double y = STD generate_canonical<double, 40>(gen2);
CHECK_DOUBLE(x, y);
}
static void tlinear() { // test linear_congruential_engine
typedef STD linear_congruential_engine<Uint32, 16807, 0, 2147483647> rng_t;
CHECK_INT(rng_t::multiplier, 16807);
CHECK_INT(rng_t::increment, 0);
CHECK_INT(rng_t::modulus, 2147483647);
CHECK_INT(rng_t::default_seed, 1);
bool st = STD is_same<rng_t::result_type, Uint32>::value;
CHECK(st);
CHECK_INT(rng_t::min(), 1);
CHECK_INT(rng_t::max(), 2147483646);
rng_t rng;
CHECK_INT(rng.min(), 1);
CHECK_INT(rng.max(), 2147483646);
CHECK_INT(rng(), 16807);
rng_t rng1(2);
CHECK_INT(rng1.min(), 1);
CHECK_INT(rng1.max(), 2147483646);
CHECK_INT(rng1(), 33614);
STD seed_seq seq;
rng_t rng2(seq);
CHECK_INT(rng2(), 651595794U);
rng.seed(1);
CHECK_INT(rng.min(), 1);
CHECK_INT(rng.max(), 2147483646);
CHECK_INT(rng(), 16807);
rng.seed(2);
CHECK_INT(rng.min(), 1);
CHECK_INT(rng.max(), 2147483646);
CHECK_INT(rng(), 33614);
rng.seed(seq);
CHECK_INT(rng(), 651595794U);
rng.seed(1);
rng.discard(1);
CHECK_INT(rng(), 282475249);
test_globals<rng_t>::test();
// check large values
typedef unsigned long long int_type;
const int_type max_val = (int_type) -1;
typedef STD linear_congruential_engine<int_type, max_val - 1, 0, max_val> rng4_t;
rng4_t rng4(1);
CHECK(rng4() == max_val - 1);
CHECK(rng4() == 1);
}
static void tmersenne() {
typedef STD mersenne_twister_engine<Uint32, 32, 624, 397, 31, 0x9908b0df, 11, 0xffffffff, 7, 0x9d2c5680, 15,
0xefc60000, 18, 1812433253>
rng_t;
CHECK_INT(rng_t::word_size, 32);
CHECK_INT(rng_t::state_size, 624);
CHECK_INT(rng_t::shift_size, 397);
CHECK_INT(rng_t::mask_bits, 31);
CHECK_INT((int) rng_t::xor_mask, (int) 0x9908b0df);
CHECK_INT(rng_t::tempering_u, 11);
CHECK_INT(rng_t::tempering_d, (int) 0xffffffff);
CHECK_INT(rng_t::tempering_s, 7);
CHECK_INT((int) rng_t::tempering_b, (int) 0x9d2c5680);
CHECK_INT(rng_t::tempering_t, 15);
CHECK_INT((int) rng_t::tempering_c, (int) 0xefc60000);
CHECK_INT(rng_t::tempering_l, 18);
CHECK_INT(rng_t::initialization_multiplier, 1812433253);
CHECK_INT(rng_t::default_seed, 5489);
bool st = STD is_same<rng_t::result_type, Uint32>::value;
CHECK(st);
CHECK_INT(rng_t::min(), 0);
CHECK_INT(rng_t::max(), 0xffffffff);
rng_t rng;
CHECK_INT(rng.min(), 0);
CHECK_INT(rng.max(), 0xffffffff);
CHECK_INT(rng(), (int) 3499211612u);
rng_t rng1(1);
CHECK_INT(rng1.min(), 0);
CHECK_INT(rng1.max(), 0xffffffff);
CHECK_INT(rng1(), 1791095845);
STD seed_seq seq;
rng_t rng2(seq);
CHECK_INT(rng2(), 2872601305U);
rng.seed(1);
CHECK_INT(rng.min(), 0);
CHECK_INT(rng.max(), 0xffffffff);
CHECK_INT(rng(), 1791095845);
rng.seed(2);
CHECK_INT(rng.min(), 0);
CHECK_INT(rng.max(), 0xffffffff);
CHECK_INT(rng(), 1872583848);
rng.seed(seq);
CHECK_INT(rng(), 2872601305U);
rng.seed(1);
rng.discard(1);
CHECK_INT(rng(), 4282876139U);
test_globals<rng_t>::test();
}
static void tsubtract() {
typedef STD subtract_with_carry_engine<Uint32, 24, 10, 24> rng_t;
CHECK_INT(rng_t::word_size, 24);
CHECK_INT(rng_t::short_lag, 10);
CHECK_INT(rng_t::long_lag, 24);
CHECK_INT(rng_t::default_seed, 19780503);
bool st = STD is_same<rng_t::result_type, Uint32>::value;
CHECK(st);
CHECK_INT(rng_t::min(), 0);
CHECK_INT(rng_t::max(), (1 << 24) - 1);
rng_t rng;
CHECK_INT(rng.min(), 0);
CHECK_INT(rng.max(), (1 << 24) - 1);
CHECK_INT(rng(), 15039276);
rng_t rng1(1);
CHECK_INT(rng1.min(), 0);
CHECK_INT(rng1.max(), (1 << 24) - 1);
CHECK_INT(rng1(), 8871692);
STD seed_seq seq;
rng_t rng2(seq);
CHECK_INT(rng2(), 13077165U);
rng.seed(1);
CHECK_INT(rng.min(), 0);
CHECK_INT(rng.max(), (1 << 24) - 1);
CHECK_INT(rng(), 8871692);
rng.seed(2);
CHECK_INT(rng.min(), 0);
CHECK_INT(rng.max(), (1 << 24) - 1);
CHECK_INT(rng(), 966168);
rng.seed(1);
rng.discard(1);
CHECK_INT(rng(), 3740959);
test_globals<rng_t>::test();
}
static void tdiscard() {
int i;
typedef STD subtract_with_carry_engine<Uint32, 24, 10, 24> rng_base_t;
typedef STD discard_block_engine<rng_base_t, 223, 24> rng_t;
CHECK_INT(rng_t::block_size, 223);
CHECK_INT(rng_t::used_block, 24);
bool st = STD is_same<rng_t::result_type, rng_base_t::result_type>::value;
CHECK(st);
CHECK_INT(rng_t::min(), 0);
CHECK_INT(rng_t::max(), (1 << 24) - 1);
rng_t rng;
CHECK_INT(rng.min(), 0);
CHECK_INT(rng.max(), (1 << 24) - 1);
CHECK_INT(rng(), 15039276);
rng_base_t urng;
rng_t rng0(urng);
CHECK_INT(rng0.min(), 0);
CHECK_INT(rng0.max(), (1 << 24) - 1);
CHECK_INT(rng0(), 15039276);
rng_t rng1(1);
CHECK_INT(rng1.min(), 0);
CHECK_INT(rng1.max(), (1 << 24) - 1);
CHECK_INT(rng1(), 8871692);
STD seed_seq seq;
rng_t rng2(seq);
CHECK_INT(rng2(), 13077165U);
rng.seed();
CHECK_INT(rng.min(), 0);
CHECK_INT(rng.max(), (1 << 24) - 1);
CHECK_INT(rng(), 15039276);
rng.seed(1);
CHECK_INT(rng.min(), 0);
CHECK_INT(rng.max(), (1 << 24) - 1);
CHECK_INT(rng(), 8871692);
rng.seed(2);
CHECK_INT(rng.min(), 0);
CHECK_INT(rng.max(), (1 << 24) - 1);
CHECK_INT(rng(), 966168);
rng.seed(1);
rng.discard(1);
CHECK_INT(rng(), 3740959);
rng_base_t rng4;
rng_t rng5;
for (i = 0; i < rng_t::used_block; ++i)
CHECK_INT(rng4(), rng5());
CHECK(rng4() != rng5());
for (; i < rng_t::block_size; ++i) {
(void) rng4();
}
CHECK_INT(rng4(), rng5());
test_globals<rng_t>::test();
}
static void tindependent() {
typedef STD subtract_with_carry_engine<Uint32, 24, 10, 24> rng_base_t;
typedef STD independent_bits_engine<rng_base_t, 24, Uint32> rng_t;
bool st = STD is_same<rng_t::result_type, rng_base_t::result_type>::value;
CHECK(st);
CHECK_INT(rng_t::min(), 0);
CHECK_INT(rng_t::max(), (1 << 24) - 1);
rng_t rng;
CHECK_INT(rng.min(), 0);
CHECK_INT(rng.max(), (1 << 24) - 1);
CHECK_INT(rng(), 15039276);
rng_base_t urng;
rng_t rng0(urng);
CHECK_INT(rng0.min(), 0);
CHECK_INT(rng0.max(), (1 << 24) - 1);
CHECK_INT(rng0(), 15039276);
rng_t rng1(1);
CHECK_INT(rng1.min(), 0);
CHECK_INT(rng1.max(), (1 << 24) - 1);
CHECK_INT(rng1(), 8871692);
STD seed_seq seq;
rng_t rng2(seq);
CHECK_INT(rng2(), 13077165U);
rng.seed();
CHECK_INT(rng.min(), 0);
CHECK_INT(rng.max(), (1 << 24) - 1);
CHECK_INT(rng(), 15039276);
rng.seed(1);
CHECK_INT(rng.min(), 0);
CHECK_INT(rng.max(), (1 << 24) - 1);
CHECK_INT(rng(), 8871692);
rng.seed(2);
CHECK_INT(rng.min(), 0);
CHECK_INT(rng.max(), (1 << 24) - 1);
CHECK_INT(rng(), 966168);
rng.seed(1);
rng.discard(1);
CHECK_INT(rng(), 3740959);
test_globals<rng_t>::test();
}
static void tshuffle() {
typedef STD subtract_with_carry_engine<Uint32, 24, 10, 24> rng_base_t;
typedef STD shuffle_order_engine<rng_base_t, 5> rng_t;
CHECK_INT(rng_t::table_size, 5);
bool st = STD is_same<rng_t::result_type, rng_base_t::result_type>::value;
CHECK(st);
CHECK_INT(rng_t::min(), 0);
CHECK_INT(rng_t::max(), (1 << 24) - 1);
rng_t rng;
const int defaultResult = 14283486;
CHECK_INT(rng.min(), 0);
CHECK_INT(rng.max(), (1 << 24) - 1);
CHECK_INT(rng(), defaultResult);
rng_base_t urng;
rng_t rng0(urng);
CHECK_INT(rng0.min(), 0);
CHECK_INT(rng0.max(), (1 << 24) - 1);
CHECK_INT(rng0(), defaultResult);
rng_t rng1(1);
const int oneResult = 11575129;
CHECK_INT(rng1.min(), 0);
CHECK_INT(rng1.max(), (1 << 24) - 1);
CHECK_INT(rng1(), oneResult);
STD seed_seq seq;
rng_t rng2(seq);
CHECK_INT(rng2(), 747473);
rng.seed();
CHECK_INT(rng.min(), 0);
CHECK_INT(rng.max(), (1 << 24) - 1);
CHECK_INT(rng(), defaultResult);
rng.seed(1);
CHECK_INT(rng.min(), 0);
CHECK_INT(rng.max(), (1 << 24) - 1);
CHECK_INT(rng(), oneResult);
rng.seed(2);
CHECK_INT(rng.min(), 0);
CHECK_INT(rng.max(), (1 << 24) - 1);
CHECK_INT(rng(), 6373042);
rng.seed(1);
rng.discard(1);
CHECK_INT(rng(), 1619564);
test_globals<rng_t>::test();
}
static void tmt19937_64() {
typedef STD mt19937_64 rng_t;
rng_t rng;
rng_t::result_type res = 0;
for (int i = 0; i < 10000; ++i)
res = rng();
CHECK(res == 9981545732273789042ULL);
}
static void tranlux24_base() {
typedef STD ranlux24_base rng_t;
rng_t rng;
rng_t::result_type res = 0;
for (int i = 0; i < 10000; ++i)
res = rng();
CHECK_INT(res, 7937952);
}
static void tranlux24() {
typedef STD ranlux24 rng_t;
rng_t rng;
rng_t::result_type res = 0;
for (int i = 0; i < 10000; ++i)
res = rng();
CHECK_INT(res, 9901578);
}
static void tranlux48_base() {
typedef STD ranlux48_base rng_t;
rng_t rng;
rng_t::result_type res = 0;
for (int i = 0; i < 10000; ++i)
res = rng();
CHECK(res == 61839128582725ULL);
}
static void tranlux48() {
typedef STD ranlux48 rng_t;
rng_t rng;
rng_t::result_type res = 0;
for (int i = 0; i < 10000; ++i)
res = rng();
CHECK(res == 249142670248501ULL);
}
static void tknuth() {
typedef STD knuth_b rng_t;
rng_t rng;
rng_t::result_type res = 0;
for (int i = 0; i < 10000; ++i)
res = rng();
CHECK_INT(res, 1112339016U);
}
void test_main() { // test generators
tseed_seq();
tgenerate();
tlinear();
tmersenne();
tsubtract();
tdiscard();
tindependent();
tshuffle();
tmt19937_64();
tranlux24_base();
tranlux24();
tranlux48_base();
tranlux48();
tknuth();
}
|
import * as React from 'react';
const PieChart = require('react-svg-piechart').default;
import {COLORS} from '../../../shared/components';
import * as models from '../../../shared/models';
const healthColors = new Map<models.HealthStatusCode, string>();
healthColors.set('Healthy', COLORS.health.healthy);
healthColors.set('Progressing', COLORS.health.progressing);
healthColors.set('Degraded', COLORS.health.degraded);
healthColors.set('Missing', COLORS.health.missing);
healthColors.set('Unknown', COLORS.health.unknown);
const syncColors = new Map<models.SyncStatusCode, string>();
syncColors.set('Synced', COLORS.sync.synced);
syncColors.set('OutOfSync', COLORS.sync.out_of_sync);
syncColors.set('Unknown', COLORS.sync.unknown);
export const ApplicationsSummary = ({applications}: {applications: models.Application[]}) => {
const sync = new Map<string, number>();
applications.forEach(app => sync.set(app.status.sync.status, (sync.get(app.status.sync.status) || 0) + 1));
const health = new Map<string, number>();
applications.forEach(app => health.set(app.status.health.status, (health.get(app.status.health.status) || 0) + 1));
const attributes = [
{
title: 'APPLICATIONS:',
value: applications.length
},
{
title: 'SYNCED:',
value: applications.filter(app => app.status.sync.status === 'Synced').length
},
{
title: 'HEALTHY:',
value: applications.filter(app => app.status.health.status === 'Healthy').length
},
{
title: 'CLUSTERS:',
value: new Set(applications.map(app => app.spec.destination.server)).size
},
{
title: 'NAMESPACES:',
value: new Set(applications.map(app => app.spec.destination.namespace)).size
}
];
const charts = [
{
title: 'Sync',
data: Array.from(sync.keys()).map(key => ({title: key, value: sync.get(key), color: syncColors.get(key as models.SyncStatusCode)})),
legend: syncColors as Map<string, string>
},
{
title: 'Health',
data: Array.from(health.keys()).map(key => ({title: key, value: health.get(key), color: healthColors.get(key as models.HealthStatusCode)})),
legend: healthColors as Map<string, string>
}
];
return (
<div className='white-box applications-list__summary'>
<div className='row'>
<div className='columns large-4 small-12'>
<div className='white-box__details'>
<p className='row'>SUMMARY</p>
{attributes.map(attr => (
<div className='row white-box__details-row' key={attr.title}>
<div className='columns small-8'>{attr.title}</div>
<div style={{textAlign: 'right'}} className='columns small-4'>
{attr.value}
</div>
</div>
))}
</div>
</div>
{charts.map(chart => (
<React.Fragment key={chart.title}>
<div className='columns large-3 small-4'>
<h4 style={{textAlign: 'center'}}>{chart.title}</h4>
<PieChart data={chart.data} />
</div>
<div className='columns large-1 small-2'>
<ul>
{Array.from(chart.legend.keys()).map(key => (
<li style={{color: chart.legend.get(key)}} key={key}>
{key}
</li>
))}
</ul>
</div>
</React.Fragment>
))}
</div>
</div>
);
};
|
<reponame>ryoff/all_becomes_f<gh_stars>0
require "everything_becomes_f/version"
require "everything_becomes_f/core_ext/string"
require "everything_becomes_f/core_ext/time"
require "everything_becomes_f/core_ext/integer"
|
import * as db_app from '../../database/app.db'
import * as _ from 'lodash'
export const update_rank = async () => {
try {
let apps = await db_app.find_approved(true)
let rankings = []
let i = 0
for (let app of apps) {
rankings[i] = { name: app.name, data: [] }
for (let sort_type of ['dau', 'tx', 'volume_sbd', 'volume_steem', 'rewards_sbd', 'rewards_steem', 'steempower_effective']) {
for (let time of ['last_day', 'before_last_day', 'last_week', 'before_last_week', 'last_month', 'before_last_month']) {
rankings[i].data.push({ time, sort_type, points: await get_ranking(app.name, sort_type, time) })
}
}
i += 1;
}
let new_rankings = []
for (let app of rankings) {
let rank = {}
for (let time of ['last_day', 'before_last_day', 'last_week', 'before_last_week', 'last_month', 'before_last_month']) {
let data = app.data.filter(x => x.time === time)
rank[time] = data.reduce((a, b) => {
return a.points || a.points === 0 ? a.points + b.points : a + b.points
})
rank['name'] = app.name
}
new_rankings.push(rank)
}
let ranks = {}
for (let time of ['last_day', 'before_last_day', 'last_week', 'before_last_week', 'last_month', 'before_last_month']) {
ranks[time] = _(new_rankings).orderBy([time], ['desc']).map(x => x.name).value()
}
//console.log(rankings[0].data)
for (let app of apps) {
for (let time of ['last_day', 'before_last_day', 'last_week', 'before_last_week', 'last_month', 'before_last_month']) {
app.rank[time] = ranks[time].indexOf(app.name) + 1
}
app.markModified('rank')
await app.save()
}
} catch (error) {
console.error('update_rank', error)
}
}
const get_ranking = async (app_name, sort_type, time, order = 'desc') => {
let apps = await db_app.find_approved_lean(true, sort_type, order, time)
let app = apps.filter(x => x.name === app_name)[0]
let sum = 0
const i = sort_type.indexOf('_')
if (i > -1) {
if (sort_type.includes('steempower')) {
sum = app[sort_type.substring(0, i)][sort_type.substring(i + 1, sort_type.length)]
} else {
sum = app[sort_type.substring(0, i)][sort_type.substring(i + 1, sort_type.length)][time]
}
} else {
sum = app[sort_type][time]
}
let weight = calculate_weight_for_sort_type(sum, sort_type)
return weight
}
const calculate_weight_for_sort_type = (value, sort_type) => {
if(!value) return 0
let point = 1
if (sort_type === 'dau') {
point = 30
} else if (sort_type === 'tx') {
point = 0.04
} else if (sort_type.includes('volume') || sort_type.includes('rewards')) {
point = 0.01
} else if (sort_type.includes('steempower')) {
point = 0.0005
}
return point * value
} |
#!/bin/bash
PATH_TO_FILE='/root/shine/input.config'
SOURCE_FILE='mb-listener.c'
EXE_FILE='mb-listener.elf'
if [ -e $PATH_TO_FILE ]
then
#Remote data
remoteHost=$(awk '/^mqtt.remote.host/{print $NF}' $PATH_TO_FILE)
clientId=$(awk '/^generic.node-id/{print $NF}' $PATH_TO_FILE)
ledSubTopic=$(awk '/^mqtt.remote.ledSubTopic/{print $NF}' $PATH_TO_FILE)
remotePort=$(awk '/^mqtt.remote.port/{print $NF}' $PATH_TO_FILE)
#Local data
localHost=$(awk '/^mqtt.local.host/{print $NF}' $PATH_TO_FILE)
clientId=$(awk '/^generic.node-id/{print $NF}' $PATH_TO_FILE)
ledPubTopic=$(awk '/^mqtt.local.pubTopic/{print $NF}' $PATH_TO_FILE)
localPort=$(awk '/^mqtt.local.port /{print $NF}' $PATH_TO_FILE)
else
echo "[ERROR] Input.config File not found"
fi
#val=$( sed -ne '/mqtt.remote.ledSubTopic/ s/.*\t *//p' $PATH_TO_FILE )
#Saving data in input_sub file
echo $remoteHost > input_sub.config
echo "sub_"$clientId >> input_sub.config
echo $ledSubTopic >> input_sub.config
echo $remotePort >> input_sub.config
#Saving data n input_pub file
echo $localHost > input_pub.config
echo "pub_"$clientId >> input_pub.config
echo $ledPubTopic"_2" >> input_pub.config
echo $localPort >> input_pub.config
if [ -e $EXE_FILE ]
then
./$EXE_FILE
elif [ -e $SOURCE_FILE ]
then
gcc $SOURCE_FILE -o $EXE_FILE -l wiringPi -l paho-mqtt3c
./$EXE_FILE
else
echo "[ERROR] Source File not found"
fi
|
<reponame>tripl-ai/arc-sas-pipeline-plugin
import sbt._
object Dependencies {
// versions
lazy val sparkVersion = "3.0.1"
lazy val hadoopVersion = "3.2.0"
// testing
val scalaTest = "org.scalatest" %% "scalatest" % "3.0.7" % "test,it"
val hadoopCommon = "org.apache.hadoop" % "hadoop-common" % hadoopVersion % "it"
// arc
val arc = "ai.tripl" %% "arc" % "3.7.0" % "provided"
// spark
val sparkSql = "org.apache.spark" %% "spark-sql" % sparkVersion % "provided"
// parso does the actual parsing
val parso = "com.epam" % "parso" % "2.0.11"
// Project
val etlDeps = Seq(
scalaTest,
hadoopCommon,
arc,
sparkSql,
parso
)
} |
/**
* @module react
*/
import React, { Component } from 'react'
/**
* @module PropTypes
*/
import PropTypes from 'prop-types'
/**
* @module classNames
*/
import classNames from 'utils/classnames'
/**
* @module TileHeader
*/
import TileHeader from 'components/tiles/TileHeader'
/**
* @module TileFooter
*/
import TileFooter from 'components/tiles/TileFooter'
/**
* @module TileContent
*/
import TileContent from 'components/tiles/TileContent'
/**
* @module baseTile
*/
import baseTile from 'components/tiles/BaseTile'
/**
* @module TileImageContent
*/
import TileImageContent from 'components/tiles/TileImageContent'
/**
* @module TileVideoContent
*/
import TileVideoContent from 'components/tiles/TileVideoContent'
/**
* @module TileMediaContent
*/
import TileMediaContent from 'components/tiles/TileMediaContent'
/**
* createSlides
* @param {Array} images
* @param {Array} videos
* @param {String} rootPath
* @return {Array}
*/
export const createSlides = (attachments = [], rootPath) => {
return attachments.map((attachment, index) => {
if (attachment.type === 'video') {
return (
<div key={`mvideo-${index}`}>
<TileVideoContent
modifier={['no-margin-bottom']}
rootPath={rootPath}
poster={attachment.thumbnail}
src={attachment.path}/>
</div>
)
} else {
return (
<div key={`mimg-${index}`}>
<TileImageContent
modifier={['no-margin-bottom']}
rootPath={rootPath}
src={attachment.path}
alt={'Horse racing'} />
</div>
)
}
})
}
/**
* @name MultipleTile
* @param {Object} props
* @return {React.Component}
*/
class MultipleTile extends Component {
/**
* @constructor
* @param {Object} props
*/
constructor (props) {
super(props)
}
render () {
const {
className,
modifier,
name,
date,
text,
attachments,
rootPath,
commentCount
} = this.props
const modifiedClassNames = classNames('multiple-tile', className, modifier)
return (
<div className={modifiedClassNames}>
<TileMediaContent>
{createSlides(attachments, rootPath)}
</TileMediaContent>
<TileHeader
name={name}
date={date} />
<TileContent
text={text}/>
<TileFooter
commentLength={commentCount}
shareText={text} />
</div>
)
}
}
/**
* propTypes
* @type {Object}
*/
MultipleTile.propTypes = {
className: PropTypes.oneOfType([
PropTypes.string,
PropTypes.arrayOf(PropTypes.string)
]),
modifier: PropTypes.oneOfType([
PropTypes.string,
PropTypes.arrayOf(PropTypes.string)
]),
name: PropTypes.string,
date: PropTypes.string,
text: PropTypes.string,
src: PropTypes.string,
attachments: PropTypes.arrayOf(PropTypes.shape({
type: PropTypes.string,
path: PropTypes.string,
thumbnail: PropTypes.string
}))
}
/**
* defaultProps
* @type {Object}
*/
MultipleTile.defaultProps = {
className: '',
modifier: '',
name: '',
date: '',
text: '',
src: ''
}
/**
* @module MultipleTile
*/
export default baseTile(MultipleTile)
|
#!/bin/bash
# This script performs a complete Media Event Detection pipeline (MED) using video features:
# a) preprocessing of videos, b) feature representation,
# c) computation of MAP scores
# You can pass arguments to this bash script defining which one of the steps you want to perform.
# This helps you to avoid rewriting the bash script whenever there are
# intermediate steps that you don't want to repeat.
# execute: bash run.pipeline.sh -p true -f true -m true -y filepath
# Reading of all arguments:
while getopts p:f:m:y: option # p:f:m:y: is the optstring here
do
case "${option}"
in
p) PREPROCESSING=${OPTARG};; # boolean true or false
f) FEATURE_REPRESENTATION=${OPTARG};; # boolean
m) MAP=${OPTARG};; # boolean
y) YAML=$OPTARG;; # path to yaml file containing parameters for feature extraction
esac
done
export PATH=~/anaconda3/bin:$PATH
if [ "$PREPROCESSING" = true ] ; then
echo "#####################################"
echo "# PREPROCESSING #"
echo "#####################################"
# steps only needed once
video_path=~/video # path to the directory containing all the videos.
mkdir -p list downsampled_videos surf cnn kmeans # create folders to save features
awk '{print $1}' ../hw1_code/list/train > list/train.video # save only video names in one file (keeping first column)
awk '{print $1}' ../hw1_code/list/val > list/val.video
cat list/train.video list/val.video list/test.video > list/all.video #save all video names in one file
downsampling_frame_len=60
downsampling_frame_rate=15
# 1. Downsample videos into shorter clips with lower frame rates.
# TODO: Make this more efficient through multi-threading f.ex.
start=`date +%s`
for line in $(cat "list/all.video"); do
ffmpeg -y -ss 0 -i $video_path/${line}.mp4 -strict experimental -t $downsampling_frame_len -r $downsampling_frame_rate downsampled_videos/$line.ds.mp4
done
end=`date +%s`
runtime=$((end-start))
echo "Downsampling took: $runtime" #28417 sec around 8h without parallelization
# 2. TODO: Extract SURF features over keyframes of downsampled videos (0th, 5th, 10th frame, ...)
python surf_feat_extraction.py -i list/all.video config.yaml
# 3. TODO: Extract CNN features from keyframes of downsampled videos
fi
if [ "$FEATURE_REPRESENTATION" = true ] ; then
echo "#####################################"
echo "# SURF FEATURE REPRESENTATION #"
echo "#####################################"
# 1. TODO: Train kmeans to obtain clusters for SURF features
# 2. TODO: Create kmeans representation for SURF features
echo "#####################################"
echo "# CNN FEATURE REPRESENTATION #"
echo "#####################################"
# 1. TODO: Train kmeans to obtain clusters for CNN features
# 2. TODO: Create kmeans representation for CNN features
fi
if [ "$MAP" = true ] ; then
echo "#######################################"
echo "# MED with SURF Features: MAP results #"
echo "#######################################"
# Paths to different tools;
map_path=/home/ubuntu/tools/mAP
export PATH=$map_path:$PATH
# 1. TODO: Train SVM with OVR using only videos in training set.
# 2. TODO: Test SVM with val set and calculate its MAP scores for own info.
# 3. TODO: Train SVM with OVR using videos in training and validation set.
# 4. TODO: Test SVM with test set saving scores for submission
echo "#######################################"
echo "# MED with CNN Features: MAP results #"
echo "#######################################"
# 1. TODO: Train SVM with OVR using only videos in training set.
# 2. TODO: Test SVM with val set and calculate its MAP scores for own info.
# 3. TODO: Train SVM with OVR using videos in training and validation set.
# 4. TODO: Test SVM with test set saving scores for submission
fi
|
<filename>lib/view/mxSwimlaneManager.d.ts<gh_stars>10-100
declare module 'mxgraph' {
/**
* @class mxSwimlaneManager
* @extends mxEventSource
*
* Manager for swimlanes and nested swimlanes that sets the size of newly added
* swimlanes to that of their siblings, and propagates changes to the size of a
* swimlane to its siblings, if {@link siblings} is true, and its ancestors, if
* {@link bubbling} is true.
*/
class mxSwimlaneManager extends mxEventSource {
/**
* Constructs a new swimlane manager for the given graph.
*
* @param {mxGraph} graph Reference to the enclosing graph.
* @param {boolean} [horizontal]
* @param {boolean} [addEnabled]
* @param {boolean} [resizeEnabled]
*/
constructor(graph: mxGraph, horizontal?: boolean, addEnabled?: boolean, resizeEnabled?: boolean);
/**
* Reference to the enclosing {@link mxGraph}.
*/
graph: mxGraph;
/**
* Specifies if event handling is enabled.
* @default true
*/
enabled: boolean;
/**
* Specifies the orientation of the swimlanes.
* @default true
*/
horizontal: boolean;
/**
* Specifies if newly added cells should be resized to match the size of their
* existing siblings.
* @default true
*/
addEnabled: boolean;
/**
* Specifies if resizing of swimlanes should be handled.
* @default true
*/
resizeEnabled: boolean;
/**
* Holds the function that handles the move event.
*/
addHandler: Function;
/**
* Holds the function that handles the move event.
*/
resizeHandler: Function;
/**
* Returns true if events are handled. This implementation
* returns {@link enabled}.
*/
isEnabled(): boolean;
/**
* Enables or disables event handling. This implementation
* updates {@link enabled}.
*
* @param enabled Boolean that specifies the new enabled state.
*/
setEnabled(value: boolean): void;
/**
* Returns {@link horizontal}.
*/
isHorizontal(): boolean;
/**
* Sets {@link horizontal}.
*/
setHorizontal(value: boolean): void;
/**
* Returns {@link addEnabled}.
*/
isAddEnabled(): boolean;
/**
* Sets {@link addEnabled}.
*/
setAddEnabled(value: boolean): void;
/**
* Returns {@link resizeEnabled}.
*/
isResizeEnabled(): boolean;
/**
* Sets {@link resizeEnabled}.
*/
setResizeEnabled(value: boolean): void;
/**
* Returns the graph that this manager operates on.
*/
getGraph(): mxGraph;
/**
* Sets the graph that the manager operates on.
*/
setGraph(graph: mxGraph): void;
/**
* Returns true if the given swimlane should be ignored.
*/
isSwimlaneIgnored(swimlane: mxCell): boolean;
/**
* Returns true if the given cell is horizontal. If the given cell is not a
* swimlane, then the global orientation is returned.
*/
isCellHorizontal(cell: mxCell): boolean;
/**
* Called if any cells have been added.
*
* @param cell Array of {@link mxCell} that have been added.
*/
cellsAdded(cells: Array<mxCell>): void;
/**
* Updates the size of the given swimlane to match that of any existing
* siblings swimlanes.
*
* @param swimlane {@link mxCell} that represents the new swimlane.
*/
swimlaneAdded(swimlane: mxCell): void;
/**
* Called if any cells have been resizes. Calls {@link swimlaneResized} for all
* swimlanes where {@link isSwimlaneIgnored} returns false.
*
* @param cells Array of {@link mxCell} whose size was changed.
*/
cellsResized(cells: Array<mxCell>): void;
/**
* Called from {@link cellsResized} for all swimlanes that are not ignored to update
* the size of the siblings and the size of the parent swimlanes, recursively,
* if {@link bubbling} is true.
*
* @param swimlane {@link mxCell} whose size has changed.
*/
resizeSwimlane(swimlane: mxCell, w: number, h: number, parentHorizontal: boolean): void;
/**
* Removes all handlers from the {@link graph} and deletes the reference to it.
*/
destroy(): void;
}
}
|
import cn from './PlaceHolderItem.scss';
import React from "react";
const PlaceHolderItem = ({title}) => (
<li className={cn.placeHolderItem}>{title}</li>
);
export default PlaceHolderItem; |
#!/bin/bash
################################################################################
# Pegasus' Linux Administration Tools # Pegasus' Bash Function Library #
# (C)2017-2018 Mattijs Snepvangers # pegasus.ict@gmail.com #
# License: MIT # Please keep my name in the credits #
################################################################################
################################################################################
# PROGRAM_SUITE="Pegasus' Linux Administration Tools"
# SCRIPT_TITLE="AutoLoader"
# MAINTAINER="Mattijs Snepvangers"
# MAINTAINER_EMAIL="pegasus.ict@gmail.com"
# VER_MAJOR=0
# VER_MINOR=0
# VER_PATCH=0
# VER_STATE="ALPHA"
# BUILD=20191104
# LICENSE="MIT"
################################################################################
# fun: autoload_register
# txt: registers function placeholders which will load the respective library when required
# api: pbfl::internal
autoload_register() {
local -r LIB="dialog"
# dialog_init() { import_lib ${LIB} ; ${FUNCNAME[0]} $@; }
dialog_msg_box() { import_lib ${LIB} ; ${FUNCNAME[0]} $@; }
dialog_yn() { import_lib ${LIB} ; ${FUNCNAME[0]} $@; }
dialog_menu() { import_lib ${LIB} ; ${FUNCNAME[0]} $@; }
dialog_radiolist() { import_lib ${LIB} ; ${FUNCNAME[0]} $@; }
dialog_checklist() { import_lib ${LIB} ; ${FUNCNAME[0]} $@; }
}
|
docker build -t ex01 .
docker run -it --rm -p 10011:10011 -p 30033:30033 -p 9987:9987/udp ex01
|
<reponame>yunsean/yoga
package com.yoga.moment.message.mapper;
import com.yoga.core.mybatis.MyMapper;
import com.yoga.moment.message.model.MomentFollow;
import com.yoga.moment.message.model.MomentUpvote;
import org.apache.ibatis.annotations.Mapper;
import org.apache.ibatis.annotations.Param;
import java.time.LocalDateTime;
import java.util.List;
@Mapper
public interface MomentUpvoteMapper extends MyMapper<MomentUpvote> {
List<MomentUpvote> findUpvoteByMementId(@Param("messageId") long messageId);
long countUpvoteForUser(@Param("userId") long userId, @Param("after")LocalDateTime after);
}
|
fn eval(
list: &Ast,
current_scope: &Scope,
stack_key: &StackKey,
) -> Result<Vec<ResultType>, EvalError> {
match *list {
Ast::Seq(ref exprs, ..) => {
let mut evals = vec![];
for ex in exprs {
evals.push(eval(ex, current_scope, stack_key)?);
}
Ok(evals)
}
Ast::ReferSeq(ref id, ..) => {
if let Some(idx) = current_scope.highest_frame_idx(id, stack_key) {
// Handle the reference sequence based on the index and return the result
Ok(vec![handle_refer_seq(idx)])
} else {
// Handle the situation when the identifier does not exist in the current scope
Err(EvalError::IdentifierNotFound)
}
}
}
} |
<filename>postcss.config.js
module.exports = {
sourceMap: false,
plugins: [
require('postcss-import')(),
require('cssnano')({
preset: 'advanced',
autoprefixer: {
add: true
}
}),
require('autoprefixer')()
]
}
|
/*
* Copyright 2015-2021 <NAME> <<EMAIL>>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.nexttypes.nodes;
import java.io.InputStream;
import java.math.BigDecimal;
import java.sql.Savepoint;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.time.ZoneId;
import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.TreeMap;
import javax.mail.internet.InternetAddress;
import com.nexttypes.datatypes.ActionResult;
import com.nexttypes.datatypes.AlterFieldResult;
import com.nexttypes.datatypes.AlterIndexResult;
import com.nexttypes.datatypes.AlterResult;
import com.nexttypes.datatypes.Color;
import com.nexttypes.datatypes.Document;
import com.nexttypes.datatypes.FieldInfo;
import com.nexttypes.datatypes.FieldRange;
import com.nexttypes.datatypes.Filter;
import com.nexttypes.datatypes.HTMLFragment;
import com.nexttypes.datatypes.Image;
import com.nexttypes.datatypes.ImportObjectsResult;
import com.nexttypes.datatypes.ImportTypesResult;
import com.nexttypes.datatypes.Matrix;
import com.nexttypes.datatypes.NXObject;
import com.nexttypes.datatypes.Names;
import com.nexttypes.datatypes.ObjectField;
import com.nexttypes.datatypes.ObjectInfo;
import com.nexttypes.datatypes.Objects;
import com.nexttypes.datatypes.Reference;
import com.nexttypes.datatypes.Tuple;
import com.nexttypes.datatypes.Tuples;
import com.nexttypes.datatypes.Type;
import com.nexttypes.datatypes.TypeField;
import com.nexttypes.datatypes.TypeIndex;
import com.nexttypes.datatypes.TypeInfo;
import com.nexttypes.datatypes.TypeReference;
import com.nexttypes.datatypes.URL;
import com.nexttypes.datatypes.UpdateIdResult;
import com.nexttypes.datatypes.XML;
import com.nexttypes.datatypes.XML.Element;
import com.nexttypes.enums.ImportAction;
import com.nexttypes.enums.Order;
import com.nexttypes.interfaces.ObjectsStream;
import com.nexttypes.interfaces.TypesStream;
import com.nexttypes.system.Context;
import com.nexttypes.system.Module;
public abstract class Node extends Module {
public static void init(Context context) {};
public abstract String getVersion();
public abstract String[] getGroups(String user);
public abstract ZonedDateTime create(Type type);
public abstract ZonedDateTime addField(String type, String field, TypeField typeField);
public abstract ZonedDateTime addIndex(String type, String index, TypeIndex typeIndex);
public abstract AlterResult alter(Type type);
public abstract AlterResult alter(Type type, ZonedDateTime adate);
public abstract ZonedDateTime rename(String type, String newName);
public abstract AlterFieldResult alterField(String type, String field, TypeField typeField);
public abstract AlterIndexResult alterIndex(String type, String index, TypeIndex typeIndex);
public abstract ZonedDateTime renameField(String type, String field, String newName);
public abstract ZonedDateTime renameIndex(String type, String index, String newName);
public abstract ZonedDateTime insert(NXObject object);
public abstract ZonedDateTime update(NXObject object);
public abstract ZonedDateTime update(NXObject object, ZonedDateTime udate);
public abstract ZonedDateTime update(String type, String id, byte[] data);
public abstract UpdateIdResult updateId(String type, String id, String newId);
public abstract ZonedDateTime updateField(String type, String id, String field, Object value);
public abstract ZonedDateTime updatePassword(String type, String id, String field, String currentPassword,
String newPassword, String newPasswordRepeat);
public abstract boolean checkPassword(String type, String id, String field, String password);
public abstract NXObject get(String type, String id, String[] fields, String lang, boolean fulltext,
boolean binary, boolean documentPreview, boolean password, boolean objectName,
boolean referencesName);
public abstract Objects select(String type, String[] fields, String lang, Filter filter, String search,
LinkedHashMap<String, Order> order, Long offset, Long limit);
public abstract Objects select(String type, String[] fields, String lang, Filter filter, String search,
LinkedHashMap<String, Order> order, boolean fulltext, boolean binary, boolean documentPreview,
boolean password, boolean objectsName, boolean referencesName, Long offset, Long limit);
public abstract Objects select(String type, String[] fields, String lang, Filter[] filters, String search,
LinkedHashMap<String, Order> order, Long offset, Long limit);
public abstract Objects select(String type, String[] fields, String lang, Filter[] filters, String search,
LinkedHashMap<String, Order> order, boolean fulltext, boolean binary, boolean documentPreview,
boolean password, boolean objectsName, boolean referencesName, Long offset, Long limit);
public abstract Tuples select(String type, StringBuilder sql, ArrayList<Object> parameters, String filters,
String search, String[] searchFields, String order, Long offset, Long limit);
public abstract Tuple[] select(String type, StringBuilder sql, ArrayList<Object> parameters, String filters,
String order);
public abstract ObjectsStream selectStream(String type, String[] fields, String lang, Filter filter,
String search, LinkedHashMap<String, Order> order, Long offset, Long limit);
public abstract ObjectsStream selectStream(String type, String[] fields, String lang, Filter filter,
String search, LinkedHashMap<String, Order> order, boolean fulltext, boolean binary,
boolean documentPreview, boolean password, boolean objectsName, boolean referencesName,
Long offset, Long limit);
public abstract ObjectsStream selectStream(String type, String[] fields, String lang, Filter[] filters,
String search, LinkedHashMap<String, Order> order, Long offset, Long limit);
public abstract ObjectsStream selectStream(String type, String[] fields, String lang, Filter[] filters,
String search, LinkedHashMap<String, Order> order, boolean fulltext, boolean binary,
boolean documentPreview, boolean password, boolean objectsName, boolean referencesName,
Long offset, Long limit);
public abstract Type getType(String type);
public abstract LinkedHashMap<String, Type> getTypes(String[] types);
public abstract String[] getTypesName();
public abstract TypeInfo[] getTypesInfo();
public abstract TreeMap<String, TypeInfo> getTypesInfoOrderByName();
public abstract Boolean existsType(String type);
public abstract Boolean existsObject(String type, String id);
public abstract String[] getBinaryFieldsName(String type);
public abstract String getName(String type, String id, String lang);
public abstract Names getNames(String type, String lang);
public abstract Names getNames(String type, String lang, String search, Long offset,
Long limit);
public abstract Names getNames(String type, String sql, Object[] parameters, String lang,
String search, Long offset, Long limit);
public abstract Names getNames(String type, StringBuilder sql,
ArrayList<Object> parameters, String lang, String search, Long offset, Long limit);
public abstract Names getNames(String referencedType, String referencingType,
String referencingAction, String referencingField, String lang);
public abstract Names getNames(String referencedType, String referencingType,
String referencingAction, String referencingField, String lang, String search,
Long offset, Long limit);
public abstract LinkedHashMap<String, ObjectInfo[]> getObjectsInfo(String[] types);
public abstract Reference[] getReferences();
public abstract TreeMap<String, TreeMap<String, TreeMap<String, Reference>>>
getReferencesOrderByNames();
public abstract TypeReference[] getDownReferences(String type);
public abstract TypeReference[] getUpReferences(String type);
public abstract Reference[] getUpReferences(String[] types);
public abstract TypeField getTypeField(String type, String field);
public abstract LinkedHashMap<String, TypeField> getTypeFields(String type);
public abstract LinkedHashMap<String, TypeField> getTypeFields(String type, String... fields);
public abstract TypeIndex getTypeIndex(String type, String index);
public abstract LinkedHashMap<String, TypeIndex> getTypeIndexes(String type);
public abstract LinkedHashMap<String, TypeIndex> getTypeIndexes(String type, String... indexes);
public abstract String getFieldType(String type, String field);
public abstract String getActionFieldType(String type, String action, String field);
public abstract TypeField getActionField(String type, String action, String field);
public abstract LinkedHashMap<String, TypeField> getActionFields(String type, String action);
public abstract LinkedHashMap<String, LinkedHashMap<String, TypeField>> getTypeActions(String type);
public abstract Tuple getFieldsSize(String type, String id);
public abstract String getFieldContentType(String type, String field);
public abstract String getFieldContentType(String type, String id, String field);
public abstract Object getFieldDefault(String type, String field);
public abstract FieldRange getFieldRange(String type, String field);
public abstract FieldRange getActionFieldRange(String type, String action, String field);
public abstract String getCompositeFieldContentType(String type, String id, String field);
public abstract LinkedHashMap<String, String> getFieldsContentType(String type);
public abstract LinkedHashMap<String, FieldInfo> getFieldsInfo(String type, String id);
public abstract void drop(String... types);
public abstract ZonedDateTime dropField(String type, String field);
public abstract ZonedDateTime dropIndex(String type, String index);
public abstract void delete(String type, String... objects);
public abstract Object getField(String type, String id, String field);
public abstract String getStringField(String type, String id, String field);
public abstract byte[] getBinaryField(String type, String id, String field);
public abstract Image getImageField(String type, String id, String field);
public abstract HTMLFragment getHTMLField(String type, String id, String field);
public abstract byte[] getImageContent(String type, String id, String field);
public abstract byte[] getImageThumbnail(String type, String id, String field);
public abstract String getImageContentType(String type, String id, String field);
public abstract String getDocumentContentType(String type, String id, String field);
public abstract XML getXMLField(String type, String id, String field);
public abstract Element getHTMLElement(String type, String id, String field, String element);
public abstract Element getXMLElement(String type, String id, String field, String element);
public abstract Document getDocumentField(String type, String id, String field);
public abstract String getPasswordField(String type, String id, String field);
public abstract ObjectField getObjectField(String type, String id, String field);
public abstract ZonedDateTime getADate(String type);
public abstract ZonedDateTime getUDate(String type, String id);
public abstract String getETag(String type, String id);
public abstract ActionResult executeAction(String type, String id, String action, Object... parameters);
public abstract ActionResult executeAction(String type, String[] objects, String action, Object... parameters);
public abstract Long count(String type);
public abstract Long count(String sql, Object... parameters);
public abstract Long count(StringBuilder sql, Object... parameters);
public abstract Long count(StringBuilder sql, ArrayList<Object> parameters);
public abstract boolean hasObjects(String type);
public abstract boolean hasNullValues(String type, String field);
public abstract int execute(String sql, Object... parameters);
public abstract int execute(StringBuilder sql, Object... parameters);
public abstract int execute(StringBuilder sql, ArrayList<Object> parameters);
public abstract int execute(String sql, Integer expectedRows, Object... parameters);
public abstract int execute(StringBuilder sql, Integer expectedRows, Object... parameters);
public abstract int execute(StringBuilder sql, Integer expectedRows,
ArrayList<Object> parameters);
public abstract int execute(String sql, boolean useSavepoint, Integer expectedRows,
Object... parameters);
public abstract int execute(StringBuilder sql, boolean useSavepoint, Integer expectedRows,
Object... parameters);
public abstract int execute(StringBuilder sql, boolean useSavepoint, Integer expectedRows,
ArrayList<Object> parameters);
public abstract TypesStream exportTypes(String[] types, boolean includeObjects);
public abstract TypesStream exportTypes(String[] types, Filter filter, boolean includeObjects);
public abstract TypesStream exportTypes(String[] types, Filter[] filters, boolean includeObjects);
public abstract TypesStream backup(boolean full);
public abstract ObjectsStream exportObjects(String type, String[] objects, LinkedHashMap<String, Order> order);
public abstract ImportTypesResult importTypes(InputStream types, ImportAction existingTypesAction,
ImportAction existingObjectsAction);
public abstract ImportTypesResult importTypes(TypesStream types, ImportAction existingTypesAction,
ImportAction existingObjectsAction);
public abstract ImportObjectsResult importObjects(InputStream objects, ImportAction existingObjectsAction);
public abstract ImportObjectsResult importObjects(ObjectsStream objects, ImportAction existingObjectsAction);
public abstract void scanVirus(String type, String[] objects);
public abstract Short getInt16(String sql, Object... parameters);
public abstract Integer getInt32(String sql, Object... parameters);
public abstract Long getInt64(String sql, Object... parameters);
public abstract Float getFloat32(String sql, Object... parameters);
public abstract Double getFloat64(String sql, Object... parameters);
public abstract BigDecimal getNumeric(String sql, Object... parameters);
public abstract String getString(String sql, Object... parameters);
public abstract String getText(String sql, Object... parameters);
public abstract LocalDate getDate(String sql, Object... parameters);
public abstract LocalTime getTime(String sql, Object... parameters);
public abstract LocalDateTime getDateTime(String sql, Object... parameters);
public abstract byte[] getBinary(String sql, Object... parameters);
public abstract HTMLFragment getHTML(String sql, String allowedTags, Object... parameters);
public abstract URL getURL(String sql, Object... parameters);
public abstract InternetAddress getEmail(String sql, Object... parameters);
public abstract String getTel(String sql, Object... parameters);
public abstract Boolean getBoolean(String sql, Object... parameters);
public abstract ZoneId getTimeZone(String sql, Object... parameters);
public abstract Color getColor(String sql, Object... parameters);
public abstract Image getImage(String sql, Object... parameters);
public abstract Document getDocument(String sql, Object... parameters);
public abstract ZonedDateTime getUTCDateTime(String sql, Object... parameters);
public abstract Object getObject(String sql, Object... parameters);
public abstract Short[] getInt16Array(String sql, Object... parameters);
public abstract Integer[] getInt32Array(String sql, Object... parameters);
public abstract Long[] getInt64Array(String sql, Object... parameters);
public abstract Float[] getFloat32Array(String sql, Object... parameters);
public abstract Double[] getFloat64Array(String sql, Object... parameters);
public abstract BigDecimal[] getNumericArray(String sql, Object... parameters);
public abstract Boolean[] getBooleanArray(String sql, Object... parameters);
public abstract String[] getStringArray(String sql, Object... parameters);
public abstract String[] getTextArray(String sql, Object... parameters);
public abstract LocalDate[] getDateArray(String sql, Object... parameters);
public abstract LocalTime[] getTimeArray(String sql, Object... parameters);
public abstract LocalDateTime[] getDateTimeArray(String sql, Object... parameters);
public abstract ZonedDateTime[] getUTCDateTimeArray(String sql, Object... parameters);
public abstract byte[][] getBinaryArray(String sql, Object... parameters);
public abstract HTMLFragment[] getHTMLArray(String sql, String allowedTags, Object... parameters);
public abstract URL[] getURLArray(String sql, Object... parameters);
public abstract InternetAddress[] getEmailArray(String sql, Object... parameters);
public abstract String[] getTelArray(String sql, Object... parameters);
public abstract ZoneId[] getTimeZoneArray(String sql, Object... parameters);
public abstract Color[] getColorArray(String sql, Object... parameters);
public abstract Image[] getImageArray(String sql, Object... parameters);
public abstract Document[] getDocumentArray(String sql, Object... parameters);
public abstract <T> T[] getArray(String sql, Class<T> type, Object... parameters);
public abstract Tuple getTuple(String sql, Object... parameters);
public abstract Tuple getTuple(StringBuilder sql, Object... parameters);
public abstract Tuple getTuple(StringBuilder sql, ArrayList<Object> parameters);
public abstract Matrix getMatrix(String sql, String[] axes, Object... parameters);
public abstract Tuple[] query(String sql, Object... parameters);
public abstract Tuple[] query(StringBuilder sql, Object... parameters);
public abstract Tuple[] query(StringBuilder sql, ArrayList<Object> parameters);
public abstract <T> T[] query(String sql, Class<T> type, Object... parameters);
public abstract void commit();
public abstract Savepoint setSavepoint();
public abstract void rollback();
public abstract void rollback(Savepoint savepoint);
public abstract void setDeferredConstraints(boolean status);
public abstract void close();
} |
import { defineConfig } from 'vite'
import vue from '@vitejs/plugin-vue'
// https://vitejs.dev/config/
export default defineConfig(({ mode }) => {
return {
plugins: [vue()],
base: mode === 'production' ? '/tiptap-search-n-replace-demo/' : '/'
}
})
|
#!/bin/sh
set -e
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# use filter instead of exclude so missing patterns dont' throw errors
echo "rsync -av --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync -av --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identitiy
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
echo "/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} --preserve-metadata=identifier,entitlements \"$1\""
/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} --preserve-metadata=identifier,entitlements "$1"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current file
archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | rev)"
stripped=""
for arch in $archs; do
if ! [[ "${VALID_ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary" || exit 1
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "Pods-Instagram Deus/Bolts.framework"
install_framework "Pods-Instagram Deus/Parse.framework"
install_framework "Pods-Instagram Deus/ParseUI.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "Pods-Instagram Deus/Bolts.framework"
install_framework "Pods-Instagram Deus/Parse.framework"
install_framework "Pods-Instagram Deus/ParseUI.framework"
fi
|
import React from 'react';
import { Modal, Form, Button } from 'semantic';
import modal from 'helpers/modal';
import AutoFocus from 'components/AutoFocus';
import UrlField from 'components/form-fields/UrlField';
import ErrorMessage from 'components/ErrorMessage';
@modal
export default class AddLink extends React.Component {
constructor(props) {
super(props);
this.state = {
error: null,
url: '',
};
}
onLinkChange = (evt, { value }) => {
this.setState({
url: value,
});
};
onSubmit = async (evt) => {
evt.stopPropagation();
try {
const { url } = this.state;
this.props.onSubmit(evt, { value: url });
this.props.close();
} catch (error) {
this.setState({
error,
});
}
};
render() {
const { url, error } = this.state;
return (
<React.Fragment>
<Modal.Header>Add Link</Modal.Header>
<Modal.Content>
<AutoFocus>
<Form id="add-link" error={!!error} onSubmit={this.onSubmit}>
<ErrorMessage error={error} />
<UrlField label="URL" value={url} onChange={this.onLinkChange} />
</Form>
</AutoFocus>
</Modal.Content>
<Modal.Actions>
<Button primary form="add-link" content="Submit" />
</Modal.Actions>
</React.Fragment>
);
}
}
|
<reponame>hillcrestpaul0719/FantasyGame<filename>FantasyGame/src/MiningAction.java
import java.util.List;
import java.util.ArrayList;
import java.util.Random;
/**
* The action of mining
*
* @version 06-18-2021
* @author <NAME> & <NAME>
*/
public class MiningAction extends Action {
/**
* Constructor for mining
*/
public MiningAction() {
super("mine", "Mine for gold");
}
/**
* Allows the player to mine
*/
@Override
public void perform(Player player, Location location) {
//Variables
Random rng = new Random();
List<Item> items = new ArrayList<Item>();
int quantity = 0;
int chunkQuantity = 0;
boolean empty = true;
//Adds gold and gold chunks in the room to the items ArrayList
System.out.print("You survey the area for valuables, and there is ");
for (Item item:player.getRoom(location).inventory.getItems()) {
if (item.getName().equals("Gold") ||
item.getName().equals("Gold Chunk")) items.add(item);
}
//Checks whether there's gold or gold chunks in the room and if so
//prints the gold/gold chunks and their quantity
if (items.size() > 0) {
for (int i=0; i<items.size(); i++) {
if (i > 0) System.out.print(", ");
System.out.print(items.get(i).quantity + " " + items.get(i).getName());
quantity += items.get(i).quantity / 10 + 1;
if (items.get(i).getName().equals("Gold Chunk"))
chunkQuantity = items.get(i).quantity;
if (items.get(i).quantity != 0) empty = false;
}
} else System.out.print("nothing");
System.out.println(" in the room.");
//Checks if the player has mined before, and displays a custom message if
//they haven't
if (!player.mined) {
System.out.println("You wield a pickaxe for the first time, and are a bit nervous,");
System.out.println("but you strike down on the gold.");
System.out.println();
player.mined = true;
}
//The actual action of mining, which takes time
if (items.size() != 0) {
System.out.print("You start mining...");
int ore = rng.nextInt(quantity);
if (ore < chunkQuantity) {
try {
Thread.sleep(5000);
player.getRoom(location).inventory.give(player.inventory, "Gold Chunk", 1);
System.out.println("And find a gold chunk!");
}
catch (Exception e) {}
}
else {
try {
Thread.sleep(1000);
int goldQuantity = player.getRoom(location).inventory.getItem("Gold").quantity;
if (goldQuantity > 9) {
player.getRoom(location).inventory.give(player.inventory, "Gold", 10);
System.out.println("and get 10 pieces of gold.");
}
else {
player.getRoom(location).inventory.give(player.inventory, "Gold", goldQuantity);
if (goldQuantity != 1)
System.out.println("and get " + goldQuantity + " pieces of gold.");
else System.out.println("and get 1 piece of gold.");
}
}
catch (Exception e) {}
}
}
//Clears the room if there are no more gold and gold chunks in the room
if (empty) player.getRoom(location).inventory.clear();
}
}
|
import { Message } from 'element-ui'
import { Message as MESSAGE } from '../../utils/ui/message'
export class MessageHelper implements MESSAGE {
public async info(message?: string): Promise<any> {
if (message) {
return Message.info(message)
}
return null
}
public async warning(message?: string): Promise<any> {
if (message) {
return Message.warning(message)
}
return null
}
public async error(message?: string): Promise<any> {
if (message) {
return Message.error(message)
}
return null
}
public async success(message?: string): Promise<any> {
if (message) {
return Message.success(message)
}
return null
}
}
|
<gh_stars>0
package vectorwing.farmersdelight.common.item;
import net.minecraft.world.InteractionHand;
import net.minecraft.world.InteractionResultHolder;
import net.minecraft.world.entity.player.Player;
import net.minecraft.world.item.ItemStack;
import net.minecraft.world.item.ItemUtils;
import net.minecraft.world.item.UseAnim;
import net.minecraft.world.level.Level;
public class DrinkableItem extends ConsumableItem
{
public DrinkableItem(Properties properties) {
super(properties);
}
public DrinkableItem(Properties properties, boolean hasPotionEffectTooltip, boolean hasCustomTooltip) {
super(properties, hasPotionEffectTooltip, hasCustomTooltip);
}
@Override
public int getUseDuration(ItemStack stack) {
return 32;
}
@Override
public UseAnim getUseAnimation(ItemStack stack) {
return UseAnim.DRINK;
}
@Override
public InteractionResultHolder<ItemStack> use(Level worldIn, Player playerIn, InteractionHand handIn) {
return ItemUtils.startUsingInstantly(worldIn, playerIn, handIn);
}
}
|
public interface SsWaterstationOrderDetailMapper extends BaseMapper<SsWaterstationOrderDetail> {
// Method signature for inserting a new order detail
void insert(SsWaterstationOrderDetail orderDetail);
// Method signature for retrieving an order detail by its ID
SsWaterstationOrderDetail selectById(Long orderId);
// Method signature for updating an existing order detail
void update(SsWaterstationOrderDetail orderDetail);
// Method signature for deleting an order detail by its ID
void delete(Long orderId);
// Custom query method to retrieve order details by order ID
List<SsWaterstationOrderDetail> selectByOrderId(Long orderId);
// Custom query method to retrieve order details by customer ID
List<SsWaterstationOrderDetail> selectByCustomerId(Long customerId);
} |
# Calculating the total cost of items in a grocery list
list_cost = 0
list_items = {"banana": 2.5, "apple": 3.7, "orange": 4.2, "tomato": 1.2}
for item, price in list_items.items():
list_cost += price
print('The total cost of the items in the list is {}'.format(list_cost) |
import numpy as np
import os.path
import time
import matplotlib._pylab_helpers
from matplotlib.backends.backend_pdf import PdfPages
# import plotly.plotly as py
# import plotly.tools as tls
def return_length_of_nonzero_array(X):
"""
Takes in a numpy.ndarray X of shape (m,n) and returns the length of the array that removes any trailing zeros.
"""
assert str(type(X))=="<class 'numpy.ndarray'>", "X should be a numpy array"
assert np.shape(X)[1]!=1, "X should be a wide rectangular array. (m,1) is a column, therefore a nonzero X of this shape will return 1 (trivial solution). Transpose X to properly identify nonzero array length."
assert np.shape(X)!=(1,1), "Check input. Should not be of shape (1,1) (trivial solution)."
if (X[:,1:]!=np.zeros(np.shape(X[:,1:]))).all():
return(np.shape(X)[1])
else:
return(np.argmax((X[:,1:] == np.zeros(np.shape(X[:,1:]))).sum(axis=0) == np.shape(X[:,1:])[0])+1)
def save_figures(Destination,BaseFileName,**kwargs):
"""
"""
SubFolder = kwargs.get("SubFolder",time.strftime("%Y_%m_%d_%H%M%S")+"/")
FilePath = Destination + SubFolder
assert type(Destination) == str and Destination[-1] == "/", \
"Destination must be a string ending is '/'. Currently Destination = " + str(Destination)
assert type(SubFolder) == str and SubFolder[-1] == "/", \
"SubFolder must be a string ending is '/'. Currently SubFolder = " + str(SubFolder)
if not os.path.exists(FilePath):
os.makedirs(FilePath)
figs = kwargs.get("figs",
[manager.canvas.figure for manager in matplotlib._pylab_helpers.Gcf.get_all_fig_managers()]
)
SaveAsPDF = kwargs.get("SaveAsPDF",False)
assert type(SaveAsPDF)==bool, "SaveAsPDF must be either True or False."
i = 1
FileName = BaseFileName + "_" + "{:0>2d}".format(i) + "-01.jpg"
if os.path.exists(FilePath + FileName) == True:
while os.path.exists(FilePath + FileName) == True:
i += 1
FileName = BaseFileName + "_" + "{:0>2d}".format(i) + "-01.jpg"
for i in range(len(figs)):
figs[i].savefig(FilePath + FileName[:-6] + "{:0>2d}".format(i+1) + ".jpg")
if SaveAsPDF == True:
PDFFileName = FileName[:-7] + ".pdf"
assert not os.path.exists(FilePath + PDFFileName), \
("Error with naming file. "
+ PDFFileName
+ " should not already exist as "
+ FileName
+ " does not exist. Try renaming or deleting "
+ PDFFileName
)
PDFFile = PdfPages(FilePath + PDFFileName)
if len(figs)==1:
PDFFile.savefig(figs[0])
else:
[PDFFile.savefig(fig) for fig in figs]
PDFFile.close()
#
# def save_figures_to_plotly(FileName,**kwargs):
# """
#
# """
# figs = kwargs.get("figs",
# [manager.canvas.figure for manager in matplotlib._pylab_helpers.Gcf.get_all_fig_managers()]
# )
#
# FileTime = time.strftime("%Y_%m_%d_%H%M%S")
# for i in range(len(figs)):
# plotly_fig = tls.mpl_to_plotly(figs[i])
# py.plot(plotly_fig,filename=(FileName + "-" + FileTime + "-" + "{:0>2d}".format(i+1)))
|
/*
* Minio Cloud Storage, (C) 2015, 2016 Minio, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package main
import (
"errors"
"net"
"net/http"
"os"
"strconv"
"strings"
"syscall"
"time"
router "github.com/gorilla/mux"
"github.com/journeymidnight/yig/api"
"github.com/journeymidnight/yig/helper"
"github.com/journeymidnight/yig/log"
"github.com/journeymidnight/yig/storage"
)
type ServerConfig struct {
Address string
KeyFilePath string // path for SSL key file
CertFilePath string // path for SSL certificate file
Logger *log.Logger // global logger
ObjectLayer *storage.YigStorage
}
// configureServer handler returns final handler for the http server.
func configureServerHandler(c *ServerConfig) http.Handler {
// Initialize API.
apiHandlers := api.ObjectAPIHandlers{
ObjectAPI: c.ObjectLayer,
}
// Initialize router.
mux := router.NewRouter()
// Register all routers.
api.RegisterAPIRouter(mux, apiHandlers)
// Add new routers here.
// List of some generic handlers which are applied for all
// incoming requests.
var handlerFns = []api.HandlerFunc{
// Limits the number of concurrent http requests.
api.SetCommonHeaderHandler,
// CORS setting for all browser API requests.
api.SetCorsHandler,
// Validates all incoming URL resources, for invalid/unsupported
// resources client receives a HTTP error.
api.SetIgnoreResourcesHandler,
// Auth handler verifies incoming authorization headers and
// routes them accordingly. Client receives a HTTP error for
// invalid/unsupported signatures.
api.SetAuthHandler,
// Add new handlers here.
api.SetLogHandler,
api.NewAccessLogHandler,
// This handler must be last one.
api.SetGenerateContextHandler,
}
// Register rest of the handlers.
return api.RegisterHandlers(mux, c.ObjectLayer.MetaStorage, handlerFns...)
}
// configureServer configure a new server instance
func configureServer(c *ServerConfig) *api.Server {
apiServer := &api.Server{
Server: &http.Server{
Addr: c.Address,
// Adding timeout of 10 minutes for unresponsive client connections.
ReadTimeout: 10 * time.Minute,
WriteTimeout: 10 * time.Minute,
Handler: configureServerHandler(c),
MaxHeaderBytes: 1 << 20,
},
}
apiServer.Server.SetKeepAlivesEnabled(helper.CONFIG.KeepAlive)
// Returns configured HTTP server.
return apiServer
}
// getListenIPs - gets all the ips to listen on.
func getListenIPs(httpServerConf *http.Server) (hosts []string, port string) {
host, port, err := net.SplitHostPort(httpServerConf.Addr)
helper.FatalIf(err, "Unable to parse host port.")
switch {
case host != "":
hosts = append(hosts, host)
default:
addrs, err := net.InterfaceAddrs()
helper.FatalIf(err, "Unable to determine network interface address.")
for _, addr := range addrs {
if addr.Network() == "ip+net" {
host := strings.Split(addr.String(), "/")[0]
if ip := net.ParseIP(host); ip.To4() != nil {
hosts = append(hosts, host)
}
}
}
}
return hosts, port
}
// Print listen ips.
func printListenIPs(tls bool, hosts []string, port string) {
for _, host := range hosts {
if tls {
logger.Printf(5, " https://%s:%s\n", host, port)
} else {
logger.Printf(5, " http://%s:%s\n", host, port)
}
}
}
// Extract port number from address address should be of the form host:port.
func getPort(address string) int {
_, portStr, err := net.SplitHostPort(address)
helper.FatalIf(err, "Unable to parse host port.")
portInt, err := strconv.Atoi(portStr)
helper.FatalIf(err, "Invalid port number.")
return portInt
}
// Make sure that none of the other processes are listening on the
// specified port on any of the interfaces.
//
// On linux if a process is listening on 127.0.0.1:9000 then Listen()
// on ":9000" fails with the error "port already in use".
// However on macOS Listen() on ":9000" falls back to the IPv6 address.
// This causes confusion on macOS that minio server is not reachable
// on 127.0.0.1 even though minio server is running. So before we start
// the minio server we make sure that the port is free on all the IPs.
func checkPortAvailability(port int) {
isAddrInUse := func(err error) bool {
// Check if the syscall error is EADDRINUSE.
// EADDRINUSE is the system call error if another process is
// already listening at the specified port.
neterr, ok := err.(*net.OpError)
if !ok {
return false
}
osErr, ok := neterr.Err.(*os.SyscallError)
if !ok {
return false
}
sysErr, ok := osErr.Err.(syscall.Errno)
if !ok {
return false
}
if sysErr != syscall.EADDRINUSE {
return false
}
return true
}
ifcs, err := net.Interfaces()
if err != nil {
helper.FatalIf(err, "Unable to list interfaces.")
}
for _, ifc := range ifcs {
addrs, err := ifc.Addrs()
if err != nil {
helper.FatalIf(err, "Unable to list addresses on interface %s.", ifc.Name)
}
for _, addr := range addrs {
ipnet, ok := addr.(*net.IPNet)
if !ok {
helper.ErrorIf(errors.New(""), "Failed to assert type on (*net.IPNet) interface.")
continue
}
ip := ipnet.IP
network := "tcp4"
if ip.To4() == nil {
network = "tcp6"
}
tcpAddr := net.TCPAddr{IP: ip, Port: port, Zone: ifc.Name}
l, err := net.ListenTCP(network, &tcpAddr)
if err != nil {
if isAddrInUse(err) {
// Fail if port is already in use.
helper.FatalIf(err, "Unable to listen on %s:%.d.", tcpAddr.IP, tcpAddr.Port)
} else {
// Ignore other errors.
continue
}
}
if err = l.Close(); err != nil {
helper.FatalIf(err, "Unable to close listener on %s:%.d.", tcpAddr.IP, tcpAddr.Port)
}
}
}
}
func isSSL(c *ServerConfig) bool {
if helper.FileExists(c.KeyFilePath) && helper.FileExists(c.CertFilePath) {
return true
}
return false
}
var ApiServer *api.Server
// blocks after server started
func startApiServer(c *ServerConfig) {
serverAddress := c.Address
host, port, _ := net.SplitHostPort(serverAddress)
// If port empty, default to port '80'
if port == "" {
port = "80"
// if SSL is enabled, choose port as "443" instead.
if isSSL(c) {
port = "443"
}
}
// Check if requested port is available.
checkPortAvailability(getPort(net.JoinHostPort(host, port)))
// Configure server.
apiServer := configureServer(c)
hosts, port := getListenIPs(apiServer.Server) // get listen ips and port.
tls := apiServer.Server.TLSConfig != nil // 'true' if TLS is enabled.
logger.Println(5, "\nS3 Object Storage:")
// Print api listen ips.
printListenIPs(tls, hosts, port)
go func() {
var err error
// Configure TLS if certs are available.
if isSSL(c) {
err = apiServer.Server.ListenAndServeTLS(c.CertFilePath, c.KeyFilePath)
} else {
// Fallback to http.
err = apiServer.Server.ListenAndServe()
}
helper.FatalIf(err, "API server error.")
}()
}
func stopApiServer() {
ApiServer.Stop()
}
|
<gh_stars>0
# implementation of card game - Memory
# I originally coded this with timers for hiding the cards instead of hiding them
# on a mouse click. And I removed cards that matched instead of leaving them face up.
# I thought it worked pretty well but it didn't meet the requirements of the grading
# rubrick, so I had to make changes. If you want to see that code, you can see it at
# http://www.codeskulptor.org/#user39_efDZwo8MIu_0.py
import simplegui
import random
from math import sqrt
cards = list() # list to hold the cards
card_size = 75 # x dimension of card (y dimension is calculated based on this)
margins = ( 20, 20 ) # spacing around edges
pad = ( 10, 10 ) # intercard spacing
##showtime = 700 # number of milliseconds to show revealed, unmatched cards
##matchtime = 350 # number of milliseconds to show revealed, matched cards
fontsize = 35 # size of the font for card faces
game = {
'over' : False,
'best' : 0,
'draws' : 0,
'drawn' : None,
'match' : None,
}
game_over_text = "Game Over!"
animated = False
animation_tick = 0
w = card_size # width of a card is the card_size
h = ((1 + sqrt(5)) / 2 ) *card_size # height of a card is phi times width
canvaswidth = margins[0] + 4 * (w + pad[0]) + margins[0]/2
canvasheight = margins[1] + 4 * (h + pad[1]) + margins[1]/2
for x in range(4):
for y in range(4):
xpos = margins[0] + x * ( w + pad[0] ) - 0.5
ypos = margins[1] + y * ( h + pad[1] ) - 0.5
# remember: x is horizontal offset, y is vertical offset
cards.append( { 'location' : { 'x' : xpos, 'y' : ypos },
'value' : 'A',
'size' : { 'x' : w, 'y' : h },
'face' : False,
'color' : '#990033',
'fill' : '#009933',
'fontcolor' : 'yellow',
'fontsize' : fontsize,
'linewidth' : 2,
'drawn' : True,
})
def initialize_cards():
global cards
card_values = range(8) + range(8)
random.shuffle(card_values)
for i in range(len(card_values)):
cards[i]['value'] = card_values[i]
cards[i]['face'] = False
cards[i]['drawn'] = True
def draw_card( card, canvas ):
if not card['drawn']: return
x = card['location']['x']
y = card['location']['y']
w = card['size']['x']
h = card['size']['y']
# location of this card, set of points describing a rectangle
loc = [
( x, y ),
( x, y+h ),
( x+w, y+h ),
( x+w, y),
]
# decoration on this card, set of points describing a diamond in the rectangle
dec = [
( x + w/2, y ),
( x + w, y + h/2 ),
( x + w/2, y + h ),
( x, y + h/2 ),
]
tx = x + w/2 - card['fontsize']/4
ty = y + h/2 + card['fontsize']/4
canvas.draw_polygon(loc, card['linewidth'], card['color'], card['fill'])
if card['face']:
canvas.draw_text(str(card['value']), (tx,ty), card['fontsize'], card['fontcolor'])
else:
canvas.draw_polygon(dec, card['linewidth'], card['color'])
canvas.draw_text("?", (tx, ty), card['fontsize'], card['color'])
def hide_all():
for card in cards:
card['face'] = False
if showtimer.is_running(): showtimer.stop()
def show_all():
for card in cards:
card['face'] = True
if showtimer.is_running(): showtimer.stop()
def hide_matches():
game['drawn']['drawn'] = False
game['drawn'] = False
game['match']['drawn'] = False
game['match'] = False
if matchtimer.is_running(): matchtimer.stop()
any = False
for card in cards:
any = any or card['drawn']
if not any:
if game['draws'] < game['best'] or game['best'] == 0: game['best'] = game['draws']
game['over'] = True
animationtimer.start()
# helper function to initialize globals
def new_game():
global animation_tick
initialize_cards()
game['draws'] = 0
game['drawn'] = False
game['match'] = False
game['over'] = False
## if showtimer.is_running(): showtimer.stop()
## if matchtimer.is_running(): matchtimer.stop()
if animationtimer.is_running(): animationtimer.stop()
animation_tick = 0
def clicked(card,pos):
if not card['drawn'] or card['face']: return False
x = card['location']['x']
y = card['location']['y']
w = card['size']['x']
h = card['size']['y']
return not ( pos[0] < x or pos[0] > x + w or pos[1] < y or pos[1] > y + h )
# define event handlers
def mouseclick(pos):
# add game state logic here
global cards, hidetimer, showtimer
## if showtimer.is_running() or matchtimer.is_running() or animated: return
if animated: return
all = True
for card in cards:
if clicked(card,pos):
card['face'] = True
if game['drawn'] and game['match']:
if game['drawn']['value'] != game['match']['value']:
game['drawn']['face'] = False
game['match']['face'] = False
game['drawn'] = None
game['match'] = None
if not game['drawn']:
game['drawn'] = card
elif not game['match']:
game['match'] = card
game['draws'] += 1
all = all and card['face']
if all:
if game['draws'] < game['best'] or game['best'] == 0: game['best'] = game['draws']
for card in cards:
card['drawn'] = False
game['over'] = True
animationtimer.start()
# cards are logically 50x100 pixels in size (or not, I set mine differently, above)
def draw(canvas):
global game_over
for card in cards:
draw_card(card,canvas)
label.set_text("Turns = " + str(game['draws']))
if game['best'] > 0:
best.set_text("Best = " + str(game['best']))
if game['over']:
game_over_width = frame.get_canvas_textwidth(game_over_text, animation_tick)
canvas.draw_text(game_over_text, ( canvaswidth/2 - game_over_width/2,
canvasheight/2 ), animation_tick, "red" )
if animation_tick >= fontsize*2:
animationtimer.stop()
def animation():
global animation_tick
animation_tick += 1
def game_over():
"""Prematurely end the game for debugging"""
for card in cards:
card['drawn'] = False
animationtimer.start()
game['over'] = True
# create frame and add a button and labels
frame = simplegui.create_frame("Concentration", canvaswidth, canvasheight)
line = frame.add_label("----------------------------")
label = frame.add_label("Turns = 0")
best = frame.add_label("Best = 0")
line = frame.add_label("----------------------------")
frame.add_button("Reset", new_game)
line = frame.add_label("----------------------------")
#line = frame.add_label("----------DEBUGGING---------")
#frame.add_button("Show All", show_all)
#frame.add_button("Hide All", hide_all)
#frame.add_button("Animate", animation)
#frame.add_button("Game Over", game_over)
# register event handlers
frame.set_mouseclick_handler(mouseclick)
frame.set_draw_handler(draw)
##showtimer = simplegui.create_timer(showtime,hide_all)
##matchtimer = simplegui.create_timer(matchtime,hide_matches)
animationtimer = simplegui.create_timer(10,animation)
# get things rolling
new_game()
frame.start()
# Always remember to review the grading rubric
|
def updateSliderValue(playSlider, sender):
playSlider.value = sender.playValue |
#include "z3D/z3D.h"
#include "settings.h"
#include "grog.h"
#define EnHs_ActionAfterTradeCojiro ((EnHs_ActionFunc)0x3B02C0)
void EnHs_CheckForShouldDespawn(EnHs* self) {
if ((gSettingsContext.shuffleAdultTradeQuest == SHUFFLEADULTTRADEQUEST_ON) && (gSaveContext.itemGetInf[2] & 0x2000)) {
Actor_Kill(&self->actor);
} else if ((gSettingsContext.shuffleAdultTradeQuest == SHUFFLEADULTTRADEQUEST_OFF) && (gSaveContext.itemGetInf[3] & 0x1)) {
Actor_Kill(&self->actor);
}
}
void EnHs_SetTradedCojiroFlag(EnHs* self, GlobalContext* globalCtx) {
gSaveContext.itemGetInf[2] |= 0x2000;
self->actionFunc = EnHs_ActionAfterTradeCojiro;
self->unk_704 |= 1;
}
|
#ifndef _IINPUT_H_
#define _IINPUT_H_
#if defined(_MSC_VER)
#pragma once
#endif
/*
* LEGAL NOTICE
* This computer software was prepared by Battelle Memorial Institute,
* hereinafter the Contractor, under Contract No. DE-AC05-76RL0 1830
* with the Department of Energy (DOE). NEITHER THE GOVERNMENT NOR THE
* CONTRACTOR MAKES ANY WARRANTY, EXPRESS OR IMPLIED, OR ASSUMES ANY
* LIABILITY FOR THE USE OF THIS SOFTWARE. This notice including this
* sentence must appear on any copies of this computer software.
*
* EXPORT CONTROL
* User agrees that the Software will not be shipped, transferred or
* exported into any country or used in any manner prohibited by the
* United States Export Administration Act or any other applicable
* export laws, restrictions or regulations (collectively the "Export Laws").
* Export of the Software may require some form of license or other
* authority from the U.S. Government, and failure to obtain such
* export control license may result in criminal liability under
* U.S. laws. In addition, if the Software is identified as export controlled
* items under the Export Laws, User represents and warrants that User
* is not a citizen, or otherwise located within, an embargoed nation
* (including without limitation Iran, Syria, Sudan, Cuba, and North Korea)
* and that User is not otherwise prohibited
* under the Export Laws from receiving the Software.
*
* Copyright 2011 Battelle Memorial Institute. All Rights Reserved.
* Distributed as open-source under the terms of the Educational Community
* License version 2.0 (ECL 2.0). http://www.opensource.org/licenses/ecl2.php
*
* For further details, see: http://www.globalchange.umd.edu/models/gcam/
*
*/
/*!
* \file iinput.h
* \ingroup Objects
* \brief IInput interface header file.
* \author <NAME>
*/
#include <string>
#include <xercesc/dom/DOMNode.hpp>
#include <iosfwd> // remove when csv output is removed.
class Tabs;
class DependencyFinder;
class ICaptureComponent;
class IInfo;
class MoreSectorInfo;
class AGHG;
class ICaptureComponent;
class NationalAccount;
class Expenditure;
// Until copyParam is fixed.
class DemandInput;
class ProductionInput;
class NodeInput;
class TradeInput;
class BuildingDemandInput;
class EnergyInput;
class NonEnergyInput;
class RenewableInput;
class InputSubsidy;
class InputTax;
class InputOMVar;
class InputOMFixed;
class InputCapital;
#include "util/base/include/ivisitable.h"
/*!
* \ingroup Objects
* \brief Represents a single generic input to a production function.
* \details
* \author <NAME>
*/
class IInput: public IVisitable {
public:
/*!
* \brief Define different type attributes of inputs. These are not mutually
* exclusive.
* \details The types are represented as bits of an integer to allow testing
* of multiple flags at once. For instance, to test if an input has
* both the ENERGY and FACTOR flags, the function hasTypeFlag is
* called as:
*
* hasTypeFlag( IInput::ENERGY | IInput::FACTOR )
*
* To add additional flags simply increment the bit-shift by one.
*/
enum Type {
//! Energy.
ENERGY = 1 << 0,
//! Material.
MATERIAL = 1 << 1,
//! Factor supply.
FACTOR = 1 << 2,
//! Land.
LAND = 1 << 3,
//! Labor.
LABOR = 1 << 4,
//! Capital.
CAPITAL = 1 << 5,
//! Primary energy.
PRIMARY = 1 << 6,
//! Secondary energy.
SECONDARY = 1 << 7,
//! Numeraire.
NUMERAIRE = 1 << 8,
//! Initialized
INITIALIZED = 1 << 9,
//! Subsidy.
SUBSIDY = 1 << 10,
//! Tax.
TAX = 1 << 11,
//! Traded Good.
TRADED = 1 << 12,
//! O&M Input
OM_VAR = 1 << 13,
//! O&M Input
OM_FIXED = 1 << 14
};
/*!
* \brief Constructor.
* \details Inlined constructor to avoid compiler problems with abstract
* base classes.
*/
IInput();
/*!
* \brief Destructor.
* \details Inlined destructor to avoid compiler problems with abstract base
* classes.
*/
virtual ~IInput();
/*!
* \brief Creates an exact copy of the input.
* \return An exact copy of the capture input.
*/
virtual IInput* clone() const = 0;
/*!
* \brief Copy parameters from another input.
* \param aInput An input from which to copy.
* \param aPeriod Period in which the input is being copied.
*/
virtual void copyParam( const IInput* aInput,
const int aPeriod ) = 0;
/*!
* \brief Returns whether the type of the object is the same as the passed
* in type.
* \param aType Type to check the object's type against.
* \return Whether the type of the object is the same as the passed in type.
*/
virtual bool isSameType( const std::string& aType ) const = 0;
/*!
* \brief Return the name of the input.
* \return The name of the input.
*/
virtual const std::string& getName() const = 0;
/*!
* \brief Return the name of the input for reporting.
* \return The name of the input for reporting.
*/
virtual const std::string& getXMLReportingName() const = 0;
/*!
* \brief Parse the data for this object starting at a given node.
* \param aNode Root node from which to parse data.
*/
virtual void XMLParse( const xercesc::DOMNode* aNode ) = 0;
/*!
* \brief Write data from this object in an XML format so that it can be
* read back in later as input.
* \param aOut Filestream to which to write.
* \param aTabs Object responsible for writing the correct number of tabs.
*/
virtual void toInputXML( std::ostream& aOut,
Tabs* aTabs ) const = 0;
/*!
* \brief Write data from this object in an XML format for debugging.
* \param aPeriod Period for which to write data.
* \param aOut Filestream to which to write.
* \param aTabs Object responsible for writing the correct number of tabs.
*/
virtual void toDebugXML( const int aPeriod,
std::ostream& aOut,
Tabs* aTabs ) const = 0;
/*!
* \brief Returns whether the input has the specified type flag set.
* \details
* \param aTypeFlag A bit mask of flags containing all flags to check for.
* \return Whether the specified type flag is set.
*/
virtual bool hasTypeFlag( const int aTypeFlag ) const = 0;
/*!
* \brief Complete the initialization of the input.
* \param aRegionName Name of the region containing the input.
* \param aSectorName Name of the sector containing the input.
* \param aSubsectorName Name of the subsector containing the input.
* \param aTechName Name of the Technology containing the input.
* \param aDependencyFinder The input dependency finder, which may be null.
* \param aTechInfo Technology's info object.
*/
virtual void completeInit( const std::string& aRegionName,
const std::string& aSectorName,
const std::string& aSubsectorName,
const std::string& aTechName,
DependencyFinder* aDependencyFinder,
const IInfo* aTechInfo ) = 0;
/*!
* \brief Initialize an input for a given period.
* \param aRegionName Name of the containing region.
* \param aSectorName Name of the containing sector.
* \param aIsInvestmentPeriod Whether this is the initial investment period
* of the Technology.
* \param aIsTrade Whether this is a trade technology.
* \param aPeriod Model period.
*/
virtual void initCalc( const std::string& aRegionName,
const std::string& aSectorName,
const bool aIsNewInvestmentPeriod,
const bool aIsTrade,
const int aPeriod ) = 0;
/*!
* \brief Get the currency demand for input used.
* \param aPeriod Model period.
* \return The currency demand for each input used.
*/
virtual double getCurrencyDemand( const int aPeriod ) const = 0;
/*!
* \brief Set the currency demand for input used.
* \param aCurrencyDemand Currency demand.
* \param aRegionName Region name.
* \param aPeriod Model period.
*/
virtual void setCurrencyDemand( const double aCurrencyDemand,
const std::string& aRegionName,
const int aPeriod ) = 0;
/*!
* \brief Get the physical demand for input used.
* \param aPeriod Model period.
* \return The physical demand for each input used.
*/
virtual double getPhysicalDemand( const int aPeriod ) const = 0;
/*!
* \brief Get the carbon content of the input used.
* \param aPeriod Model period.
* \return The carbon content of each input used.
*/
virtual double getCarbonContent( const int aPeriod ) const = 0;
/*!
* \brief Set the physical demand for input used.
* \param aPhysicalDemand Currency demand.
* \param aRegionName Region name.
* \param aPeriod Model period.
*/
virtual void setPhysicalDemand( const double aPhysicalDemand,
const std::string& aRegionName,
const int aPeriod ) = 0;
/*!
* \brief Get the price of the input in a given period.
* \param aRegionName Name of the region containing the input.
* \param aPeriod Period for which to return price.
* \return The price in the given period.
*/
virtual double getPrice( const std::string& aRegionName,
const int aPeriod ) const = 0;
/*!
* \brief Set the price of an input in a given period.
* \param aRegionName Name of the region containing the input.
* \param aPrice The new price of the input.
* \param aPeriod Model period.
*/
virtual void setPrice( const std::string& aRegionName,
const double aPrice,
const int aPeriod ) = 0;
/*!
* \brief Get the price adjustment factor.
* \details
* \return The price adjustment factor.
* \todo Remove this if possible.
*/
virtual double getPriceAdjustment() const = 0;
/*!
* \brief Get the price paid of the input in a given period.
* \param aRegionName Name of the region containing the input.
* \param aPeriod Period for which to return price paid.
* \return The price paid in the given period.
*/
virtual double getPricePaid( const std::string& aRegionName,
const int aPeriod ) const = 0;
/*!
* \brief Set the price paid of the input in a given period.
* \param aPricePaid The price paid to set.
* \param aPeriod Period for which to set price paid.
*/
virtual void setPricePaid( const double aPricePaid,
const int aPeriod ) = 0;
/*!
* \brief Calculate the price paid of the input.
* \param aRegionName Name of the region containing the input.
* \param aSectorName Name of the containing sector.
* \param aMoreSectorInfo The sector info which may contain additional costs.
* \param aGhgs GHGs which may add to the cost of the input.
* \param aSequestrationDevice A capture component which may capture some emssions
* and thus reduce emissions tax costs.
* \param aLifetimeYears The number of years the technology will operate for.
* Used to calculate depreciation of capital.
* \param aPeriod Period for which to calculate price paid.
*/
virtual void calcPricePaid( const std::string& aRegionName,
const std::string& aSectorName,
const MoreSectorInfo* aMoreSectorInfo,
const std::vector<AGHG*>& aGhgs,
const ICaptureComponent* aSequestrationDevice,
const int aLifetimeYears,
const int aPeriod ) = 0;
/*!
* \brief Get the coefficient of the input.
* \param aPeriod Model period.
* \return Coefficient
*/
virtual double getCoefficient( const int aPeriod ) const = 0;
/*!
* \brief Set the coefficient of the input.
* \param aPeriod Model period.
* \param aCoefficient The new coefficient.
*/
virtual void setCoefficient( const double aCoefficient,
const int aPeriod ) = 0;
/*! \brief Get the conversion factor.
* \param aPeriod Model period
* \details TODO
* \return The conversion factor.
*/
virtual double getConversionFactor( const int aPeriod ) const = 0;
/*! \brief Get the emissions coefficient of the input for a given gas.
* \param aGHGName The name of the gas.
* \param aPeriod Model period
* \return The emissions coefficient for the gas.
*/
virtual double getCO2EmissionsCoefficient( const std::string& aGHGName,
const int aPeriod ) const = 0;
/*!
* \brief Calculate taxes from the input.
* \details Calculates the taxes and places them into the appropriate
* accounting structure.
* \param aRegionName Name of the region containing the input.
* \param aNationalAccount The national account to add taxes into if avaiable.
* \param aExpenditure The current period expenditure to track technology expenses if availabe.
* \param aPeriod The period in which to calculate taxes.
* \return The amount of non-emission taxes collected.
*/
virtual double calcTaxes( const std::string& aRegionName,
NationalAccount* aNationalAccount,
Expenditure* aExpenditure,
const int aPeriod ) const = 0;
/*!
* \brief Get the current calibration quantity.
* \param aPeriod The period for which to get the calibration quantity.
* \details
* \return The current calibration quantity.
*/
virtual double getCalibrationQuantity( const int aPeriod ) const = 0;
/*!
* \brief Get the price elasticity of the input.
* \return The price elasticity.
*/
virtual double getPriceElasticity() const = 0;
/*!
* \brief Get the income elasticity of the input.
* \return The income elasticity.
*/
virtual double getIncomeElasticity() const = 0;
/*!
* \brief Get the input specific technical change.
* \param aPeriod Model period.
* \return The input specific technical change.
* \author <NAME>
*/
virtual double getTechChange( const int aPeriod ) const = 0;
/*! \brief Write out the SGM csv output file.
* \todo Remove this function.
* \param aFile Output file.
* \param aPeriod Model period.
*/
virtual void csvSGMOutputFile( std::ostream& aFile,
const int aPeriod ) const = 0;
/*!
* \brief Hook for an input to do interpolations to fill in any data that
* should be interpolated to a newly created input for the missing
* technology.
* \param aYear the year to be filled in.
* \param aPreviousYear The year of the last parsed input.
* \param aNextYear The year of the next closest parsed input.
* \param aPreviousInput The previous parsed input.
* \param aNextInput The next parsed input.
*/
virtual void doInterpolations( const int aYear, const int aPreviousYear,
const int aNextYear, const IInput* aPreviousInput,
const IInput* aNextInput ) = 0;
virtual void copyParamsInto( ProductionInput& aInput,
const int aPeriod ) const = 0;
virtual void copyParamsInto( DemandInput& aInput,
const int aPeriod ) const = 0;
virtual void copyParamsInto( NodeInput& aInput,
const int aPeriod ) const = 0;
virtual void copyParamsInto( TradeInput& aInput,
const int aPeriod ) const = 0;
virtual void copyParamsInto( EnergyInput& aInput,
const int aPeriod ) const = 0;
virtual void copyParamsInto( NonEnergyInput& aInput,
const int aPeriod ) const = 0;
virtual void copyParamsInto( InputCapital& aInput,
const int aPeriod ) const = 0;
virtual void copyParamsInto( InputOMFixed& aInput,
const int aPeriod ) const = 0;
virtual void copyParamsInto( InputOMVar& aInput,
const int aPeriod ) const = 0;
virtual void copyParamsInto( BuildingDemandInput& aInput,
const int aPeriod ) const = 0;
virtual void copyParamsInto( RenewableInput& aInput,
const int aPeriod ) const = 0;
virtual void copyParamsInto( InputSubsidy& aInput,
const int aPeriod ) const = 0;
virtual void copyParamsInto( InputTax& aInput,
const int aPeriod ) const = 0;
// IVisitable interface.
virtual void accept( IVisitor* aVisitor,
const int aPeriod ) const = 0;
};
// Inline function definitions.
inline IInput::IInput(){
}
inline IInput::~IInput(){
}
#endif // _IINPUT_H_
|
/******************************************************************************
*
* Copyright(c) 2007 - 2011 Realtek Corporation. All rights reserved.
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of version 2 of the GNU General Public License as
* published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* You should have received a copy of the GNU General Public License along with
* this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110, USA
*
*
******************************************************************************/
#ifndef __INC_HAL8192EPHYCFG_H__
#define __INC_HAL8192EPHYCFG_H__
/*--------------------------Define Parameters-------------------------------*/
#define LOOP_LIMIT 5
#define MAX_STALL_TIME 50 //us
#define AntennaDiversityValue 0x80 //(Adapter->bSoftwareAntennaDiversity ? 0x00:0x80)
#define MAX_TXPWR_IDX_NMODE_92S 63
#define Reset_Cnt_Limit 3
#ifdef CONFIG_PCI_HCI
#define MAX_AGGR_NUM 0x0B
#else
#define MAX_AGGR_NUM 0x07
#endif // CONFIG_PCI_HCI
/*--------------------------Define Parameters-------------------------------*/
/*------------------------------Define structure----------------------------*/
/* BB/RF related */
/*------------------------------Define structure----------------------------*/
/*------------------------Export global variable----------------------------*/
/*------------------------Export global variable----------------------------*/
/*------------------------Export Marco Definition---------------------------*/
/*------------------------Export Marco Definition---------------------------*/
/*--------------------------Exported Function prototype---------------------*/
//
// BB and RF register read/write
//
u32 PHY_QueryBBReg8192E( IN PADAPTER Adapter,
IN u32 RegAddr,
IN u32 BitMask );
void PHY_SetBBReg8192E( IN PADAPTER Adapter,
IN u32 RegAddr,
IN u32 BitMask,
IN u32 Data );
u32 PHY_QueryRFReg8192E( IN PADAPTER Adapter,
IN u8 eRFPath,
IN u32 RegAddr,
IN u32 BitMask );
void PHY_SetRFReg8192E( IN PADAPTER Adapter,
IN u8 eRFPath,
IN u32 RegAddr,
IN u32 BitMask,
IN u32 Data );
//
// Initialization related function
//
/* MAC/BB/RF HAL config */
int PHY_MACConfig8192E(IN PADAPTER Adapter );
int PHY_BBConfig8192E(IN PADAPTER Adapter );
int PHY_RFConfig8192E(IN PADAPTER Adapter );
/* RF config */
//
// BB TX Power R/W
//
void PHY_GetTxPowerLevel8192E( IN PADAPTER Adapter, OUT s32* powerlevel );
void PHY_SetTxPowerLevel8192E( IN PADAPTER Adapter, IN u8 channel );
BOOLEAN PHY_UpdateTxPowerDbm8192E( IN PADAPTER Adapter, IN int powerInDbm );
VOID
PHY_SetTxPowerIndex_8192E(
IN PADAPTER Adapter,
IN u32 PowerIndex,
IN u8 RFPath,
IN u8 Rate
);
u8
PHY_GetTxPowerIndex_8192E(
IN PADAPTER pAdapter,
IN u8 RFPath,
IN u8 Rate,
IN CHANNEL_WIDTH BandWidth,
IN u8 Channel
);
//
// Switch bandwidth for 8192S
//
VOID
PHY_SetBWMode8192E(
IN PADAPTER pAdapter,
IN CHANNEL_WIDTH Bandwidth,
IN u8 Offset
);
//
// channel switch related funciton
//
VOID
PHY_SwChnl8192E(
IN PADAPTER Adapter,
IN u8 channel
);
VOID
PHY_SetSwChnlBWMode8192E(
IN PADAPTER Adapter,
IN u8 channel,
IN CHANNEL_WIDTH Bandwidth,
IN u8 Offset40,
IN u8 Offset80
);
void
phy_SpurCalibration_8192E(
IN PADAPTER Adapter
);
//
// BB/MAC/RF other monitor API
//
VOID
PHY_SetRFPathSwitch_8192E(
IN PADAPTER pAdapter,
IN BOOLEAN bMain
);
VOID
storePwrIndexDiffRateOffset(
IN PADAPTER Adapter,
IN u32 RegAddr,
IN u32 BitMask,
IN u32 Data
);
/*--------------------------Exported Function prototype---------------------*/
#endif // __INC_HAL8192CPHYCFG_H
|
#!/bin/bash
set -eu -o pipefail
workspace=${1?Usage: $0 <workspace>}
../../../bin/cicdctl creds aws-mfa "$workspace"
AWS_PROFILE=admin-${workspace}
AWS_OPTS="--profile=${AWS_PROFILE} --region=us-west-2"
account_id=$(aws $AWS_OPTS sts get-caller-identity | jq -r '.Account')
image_ids=$(aws ${AWS_OPTS} \
ec2 describe-images \
--owners "${account_id}" \
--query 'Images[*].ImageId' \
--output text
)
for image_id in $image_ids; do
snapshot_ids=$(aws ${AWS_OPTS} \
ec2 describe-images \
--image-ids "$image_id" \
--query 'Images[*].BlockDeviceMappings[*].Ebs.SnapshotId' \
--output text
)
aws ${AWS_OPTS} ec2 deregister-image --image-id "$image_id"
for snapshot_id in $snapshot_ids; do
aws ${AWS_OPTS} ec2 delete-snapshot --snapshot-id "$snapshot_id"
done
done
|
#!/bin/bash
#
# Install R: dependencies, runtime, IDE, tools
#
# Set params ----
RSTUDIO_SERVER_VERSION="1.4.1717"; readonly RSTUDIO_SERVER_VERSION # note: check number of latest version [1]
# R-packages dependencies ----
apt install -y gfortran libxml2-dev libssl-dev libcurl4-openssl-dev
# Install R CRAN ----
# update indices
apt update -qq
# install two helper packages we need
apt install --no-install-recommends software-properties-common dirmngr
# add the signing key (by Michael Rutter) for these repos
wget -qO- https://cloud.r-project.org/bin/linux/ubuntu/marutter_pubkey.asc | sudo tee -a /etc/apt/trusted.gpg.d/cran_ubuntu_key.asc
# add the R 4.0 repo from CRAN
add-apt-repository "deb https://cloud.r-project.org/bin/linux/ubuntu $(lsb_release -cs)-cran40/"
apt install --no-install-recommends -y r-base
# validate R installation
R --version
# Install RStudio Server ----
apt install -y gdebi-core
wget https://download2.rstudio.org/server/bionic/amd64/rstudio-server-${RSTUDIO_SERVER_VERSION}-amd64.deb
gdebi --quiet rstudio-server-${RSTUDIO_SERVER_VERSION}-amd64.deb
# validate RStudio installation
rstudio-server status
# add user for connection with RStudio via SSH tunnel (if not yet)
adduser "<user_name>"
# Install dependencies for R packages ----
# Reticulate
apt install -y libpng-dev
# For SQL Server connection support see [2-4]
# apt install -y unixodbc-dev
# apt install -y r-cran-odbc
# Set R package binaries source https://launchpad.net/~c2d4u.team/+archive/ubuntu/c2d4u4.0+
add-apt-repository ppa:c2d4u.team/c2d4u4.0+
apt update
# References ----
# 1. https://cran.r-project.org/
# 2. https://rstudio.com/products/rstudio/download-server/
# 3. https://db.rstudio.com/databases/microsoft-sql-server/
# 4. https://docs.microsoft.com/en-us/sql/connect/odbc/linux-mac/installing-the-microsoft-odbc-driver-for-sql-server?view=sql-server-ver15#ubuntu17
# 5. https://db.rstudio.com/best-practices/drivers/#linux-debian-ubuntu
# 6. https://rtask.thinkr.fr/installation-of-r-4-0-on-ubuntu-20-04-lts-and-tips-for-spatial-packages/
|
#!/bin/bash
echo Running on $HOSTNAME
num_obj=$1
name=$2
encoder=$3
bs=$4
cmap=$5
run=$6
loss=$7
emb=$8
save="models_"$emb"/FixedUnobserved/"$name"_"$run"/"
name=$name"_"$loss"_"$encoder"_"$num_obj"_"$cmap
echo $name
python ./train_reward_predictor.py --save-folder $save""$name
python ./train_reward_predictor.py --save-folder $save""$name --random
python ./train_reward_predictor.py --save-folder $save""$name --finetune
|
<filename>totalizer/common/src/main/java/com/meterware/totalizer/DataItem.java
package com.meterware.totalizer;
/********************************************************************************************************************
* $Id$
*
* Copyright (c) 2005, <NAME>
*
*******************************************************************************************************************/
/**
* @author <a href="mailto:<EMAIL>"><NAME></a>
*/
public class DataItem {
int _row;
int _col;
String _value;
public DataItem( int row, int col, Object value ) {
_row = row;
_col = col;
_value = (String) value;
}
public boolean equals( Object obj ) {
if (!getClass().equals( obj.getClass())) return false;
DataItem other = (DataItem) obj;
return _row == other._row && _col == other._col && _value.equals( other._value );
}
public int hashCode() {
return _row * 1000 + _col * 50 + _value.hashCode();
}
public String toString() {
return "Item [" + _row + "," + _col + "," + _value + "]";
}
}
|
<filename>recommend-web/public/webpack.config.js<gh_stars>0
var CopyWebpackPlugin = require('copy-webpack-plugin');
var path = require('path');
var webpack = require('webpack');
module.exports = {
devtool: 'source-map',
entry: {
'main': __dirname + '/app/main',
'app': __dirname + '/app/index.jsx'
},
output: {
filename: '[name].js',
path: __dirname + '/dist'
},
// Jquery imported into global context from main (using script-loader)
externals: {
// require("jquery") is external and available on the global var jQuery
"jquery": "jQuery",
},
plugins: [
new webpack.IgnorePlugin(/^\.\/locale$/, /moment$/),
//global jquery is provided to any webpack modules
new webpack.ProvidePlugin({
$: 'jquery',
jQuery: 'jquery',
'window.jquery': 'jquery',
'window.jQuery': 'jquery',
}),
//copy patternfly assets
new CopyWebpackPlugin([
{
from: { glob: './node_modules/patternfly/dist/img/*.*'},
to: './img',
flatten: true
},
{
from: { glob: './node_modules/patternfly/dist/fonts/*.*'},
to: './fonts',
flatten: true
},
{
from: { glob: './node_modules/patternfly/dist/css/*.*'},
to: './css',
flatten: true
},
{
from: { glob: './node_modules/react-bootstrap-table/css/*.*'},
to: './css',
flatten: true
},
{
from: { glob: './app/main.css'},
to: './css',
flatten: true
}
])
],
module: {
loaders: [
{
loader: 'babel-loader',
test: /\.(jsx|js)$/,
exclude: /node_modules/,
query: {
presets: ["react", "es2015", "stage-2"]
},
},
{
loader: 'eslint-loader',
test: /\.(jsx|js)$/,
exclude: /node_modules/,
query: {
presets: ["react", "es2015", "stage-2"]
}
},
{
test: /\.exec\.js$/,
use: [ 'script-loader' ]
}
],
},
};
|
const path = require('path');
const HtmlWebpackPlugin = require('html-webpack-plugin');
const MiniCssExtractPlugin = require('mini-css-extract-plugin');
const StandardCssLoader = [
{ loader: MiniCssExtractPlugin.loader },
{
loader: 'css-loader',
options: {
modules: { auto: true, localIdentName: '[local]_MODULE_[contenthash:6]' }
}
}
];
module.exports = {
mode: 'development',
entry: './src/index.tsx',
target: 'web',
output: {
filename: '[name].bundle.[contenthash:6].js',
path: path.resolve(__dirname, 'dist')
},
resolve: {
extensions: ['.ts', '.tsx', '.js'],
alias: {
'@': path.join(__dirname, 'src')
},
},
module: {
rules: [
{
test: /\.tsx?$/,
use: 'ts-loader',
exclude: ['/node-modules/']
},
{
test: /\.css$/,
use: StandardCssLoader,
},
{
test: /\.less$/,
use: StandardCssLoader.concat(
{ loader: 'less-loader' }
)
},
{
test: /\.md$/,
use: [
{
loader: "html-loader"
},
{
loader: "markdown-loader",
options: {
/* your options here , see marked get more*/
}
}
]
}
]
},
plugins: [
new HtmlWebpackPlugin({
template: path.join(__dirname, 'src', 'index.html'),
}),
new MiniCssExtractPlugin({
filename: "[name].[contenthash:6].css",
chunkFilename: "chunk.[id].[contenthash:6].css",
}),
],
devtool: 'inline-source-map',
devServer: {
contentBase: path.join(__dirname, 'dist'),
compress: false,
port: 9090
}
} |
#!/bin/bash
# Unlike 1a this setup interleaves the TDNN and LSTM layers.
#System tdnn_lstm_1a_ld5tdnn_lstm_1b_ld5
#WER on train_dev(tg) 13.42 13.00
#WER on train_dev(fg) 12.42 12.03
#WER on eval2000(tg) 15.7 15.3
#WER on eval2000(fg) 14.2 13.9
#Final train prob -0.0538088 -0.056294
#Final valid prob -0.0800484-0.0813322
#Final train prob (xent) -0.7603 -0.777787
#Final valid prob (xent) -0.949909 -0.939146
set -e
# configs for 'chain'
stage=12
train_stage=-10
get_egs_stage=-10
speed_perturb=true
dir=exp/chain/tdnn_lstm_1b # Note: _sp will get added to this if $speed_perturb == true.
decode_iter=
decode_dir_affix=
# training options
leftmost_questions_truncate=-1
chunk_width=150
chunk_left_context=40
chunk_right_context=0
xent_regularize=0.025
label_delay=5
# decode options
extra_left_context=50
extra_right_context=0
frames_per_chunk=
remove_egs=false
common_egs_dir=
affix=
# End configuration section.
echo "$0 $@" # Print the command line for logging
. ./cmd.sh
. ./path.sh
. ./utils/parse_options.sh
if ! cuda-compiled; then
cat <<EOF && exit 1
This script is intended to be used with GPUs but you have not compiled Kaldi with CUDA
If you want to use GPUs (and have them), go to src/, and configure and make on a machine
where "nvcc" is installed.
EOF
fi
# The iVector-extraction and feature-dumping parts are the same as the standard
# nnet3 setup, and you can skip them by setting "--stage 8" if you have already
# run those things.
suffix=
if [ "$speed_perturb" == "true" ]; then
suffix=_sp
fi
dir=$dir${affix:+_$affix}
if [ $label_delay -gt 0 ]; then dir=${dir}_ld$label_delay; fi
dir=${dir}$suffix
train_set=train_nodup$suffix
ali_dir=exp/tri4_ali_nodup$suffix
treedir=exp/chain/tri5_7d_tree$suffix
lang=data/lang_chain_2y
# if we are using the speed-perturbed data we need to generate
# alignments for it.
local/nnet3/run_ivector_common.sh --stage $stage \
--speed-perturb $speed_perturb \
--generate-alignments $speed_perturb || exit 1;
if [ $stage -le 9 ]; then
# Get the alignments as lattices (gives the CTC training more freedom).
# use the same num-jobs as the alignments
nj=$(cat exp/tri4_ali_nodup$suffix/num_jobs) || exit 1;
steps/align_fmllr_lats.sh --nj $nj --cmd "$train_cmd" data/$train_set \
data/lang exp/tri4 exp/tri4_lats_nodup$suffix
rm exp/tri4_lats_nodup$suffix/fsts.*.gz # save space
fi
if [ $stage -le 10 ]; then
# Create a version of the lang/ directory that has one state per phone in the
# topo file. [note, it really has two states.. the first one is only repeated
# once, the second one has zero or more repeats.]
rm -rf $lang
cp -r data/lang $lang
silphonelist=$(cat $lang/phones/silence.csl) || exit 1;
nonsilphonelist=$(cat $lang/phones/nonsilence.csl) || exit 1;
# Use our special topology... note that later on may have to tune this
# topology.
steps/nnet3/chain/gen_topo.py $nonsilphonelist $silphonelist >$lang/topo
fi
if [ $stage -le 11 ]; then
# Build a tree using our new topology.
steps/nnet3/chain/build_tree.sh --frame-subsampling-factor 3 \
--leftmost-questions-truncate $leftmost_questions_truncate \
--context-opts "--context-width=2 --central-position=1" \
--cmd "$train_cmd" 7000 data/$train_set $lang $ali_dir $treedir
fi
if [ $stage -le 12 ]; then
echo "$0: creating neural net configs using the xconfig parser";
num_targets=$(tree-info $treedir/tree |grep num-pdfs|awk '{print $2}')
learning_rate_factor=$(echo "print 0.5/$xent_regularize" | python)
mkdir -p $dir/configs
cat <<EOF > $dir/configs/network.xconfig
input dim=100 name=ivector
input dim=40 name=input
# please note that it is important to have input layer with the name=input
# as the layer immediately preceding the fixed-affine-layer to enable
# the use of short notation for the descriptor
fixed-affine-layer name=lda input=Append(-2,-1,0,1,2,ReplaceIndex(ivector, t, 0)) affine-transform-file=$dir/configs/lda.mat
# the first splicing is moved before the lda layer, so no splicing here
relu-renorm-layer name=tdnn1 dim=1024
relu-renorm-layer name=tdnn2 input=Append(-1,0,1) dim=1024
relu-renorm-layer name=tdnn3 input=Append(-1,0,1) dim=1024
# check steps/libs/nnet3/xconfig/lstm.py for the other options and defaults
lstmp-layer name=lstm1 cell-dim=1024 recurrent-projection-dim=256 non-recurrent-projection-dim=256 delay=-3
relu-renorm-layer name=tdnn4 input=Append(-3,0,3) dim=1024
relu-renorm-layer name=tdnn5 input=Append(-3,0,3) dim=1024
lstmp-layer name=lstm2 cell-dim=1024 recurrent-projection-dim=256 non-recurrent-projection-dim=256 delay=-3
relu-renorm-layer name=tdnn6 input=Append(-3,0,3) dim=1024
relu-renorm-layer name=tdnn7 input=Append(-3,0,3) dim=1024
lstmp-layer name=lstm3 cell-dim=1024 recurrent-projection-dim=256 non-recurrent-projection-dim=256 delay=-3
## adding the layers for chain branch
output-layer name=output input=lstm3 output-delay=$label_delay include-log-softmax=false dim=$num_targets max-change=1.5
# adding the layers for xent branch
# This block prints the configs for a separate output that will be
# trained with a cross-entropy objective in the 'chain' models... this
# has the effect of regularizing the hidden parts of the model. we use
# 0.5 / args.xent_regularize as the learning rate factor- the factor of
# 0.5 / args.xent_regularize is suitable as it means the xent
# final-layer learns at a rate independent of the regularization
# constant; and the 0.5 was tuned so as to make the relative progress
# similar in the xent and regular final layers.
output-layer name=output-xent input=lstm3 output-delay=$label_delay dim=$num_targets learning-rate-factor=$learning_rate_factor max-change=1.5
EOF
steps/nnet3/xconfig_to_configs.py --xconfig-file $dir/configs/network.xconfig --config-dir $dir/configs/
fi
if [ $stage -le 13 ]; then
if [[ $(hostname -f) == *.clsp.jhu.edu ]] && [ ! -d $dir/egs/storage ]; then
utils/create_split_dir.pl \
/export/b0{5,6,7,8}/$USER/kaldi-data/egs/swbd-$(date +'%m_%d_%H_%M')/s5c/$dir/egs/storage $dir/egs/storage
fi
steps/nnet3/chain/train.py --stage $train_stage \
--cmd "$decode_cmd" \
--feat.online-ivector-dir exp/nnet3/ivectors_${train_set} \
--feat.cmvn-opts "--norm-means=false --norm-vars=false" \
--chain.xent-regularize $xent_regularize \
--chain.leaky-hmm-coefficient 0.1 \
--chain.l2-regularize 0.00005 \
--chain.apply-deriv-weights false \
--chain.lm-opts="--num-extra-lm-states=2000" \
--trainer.num-chunk-per-minibatch 64 \
--trainer.frames-per-iter 1200000 \
--trainer.max-param-change 2.0 \
--trainer.num-epochs 4 \
--trainer.optimization.shrink-value 0.99 \
--trainer.optimization.num-jobs-initial 3 \
--trainer.optimization.num-jobs-final 16 \
--trainer.optimization.initial-effective-lrate 0.001 \
--trainer.optimization.final-effective-lrate 0.0001 \
--trainer.optimization.momentum 0.0 \
--trainer.deriv-truncate-margin 8 \
--egs.stage $get_egs_stage \
--egs.opts "--frames-overlap-per-eg 0" \
--egs.chunk-width $chunk_width \
--egs.chunk-left-context $chunk_left_context \
--egs.chunk-right-context $chunk_right_context \
--egs.dir "$common_egs_dir" \
--cleanup.remove-egs $remove_egs \
--feat-dir data/${train_set}_hires \
--tree-dir $treedir \
--lat-dir exp/tri4_lats_nodup$suffix \
--dir $dir || exit 1;
fi
if [ $stage -le 14 ]; then
# Note: it might appear that this $lang directory is mismatched, and it is as
# far as the 'topo' is concerned, but this script doesn't read the 'topo' from
# the lang directory.
utils/mkgraph.sh --self-loop-scale 1.0 data/lang_sw1_tg $dir $dir/graph_sw1_tg
fi
decode_suff=sw1_tg
graph_dir=$dir/graph_sw1_tg
if [ $stage -le 15 ]; then
[ -z $extra_left_context ] && extra_left_context=$chunk_left_context;
[ -z $extra_right_context ] && extra_right_context=$chunk_right_context;
[ -z $frames_per_chunk ] && frames_per_chunk=$chunk_width;
iter_opts=
if [ ! -z $decode_iter ]; then
iter_opts=" --iter $decode_iter "
fi
for decode_set in train_dev eval2000; do
(
steps/nnet3/decode.sh --acwt 1.0 --post-decode-acwt 10.0 \
--nj 50 --cmd "$decode_cmd" $iter_opts \
--extra-left-context $extra_left_context \
--extra-right-context $extra_right_context \
--frames-per-chunk "$frames_per_chunk" \
--online-ivector-dir exp/nnet3/ivectors_${decode_set} \
$graph_dir data/${decode_set}_hires \
$dir/decode_${decode_set}${decode_dir_affix:+_$decode_dir_affix}_${decode_suff} || exit 1;
if $has_fisher; then
steps/lmrescore_const_arpa.sh --cmd "$decode_cmd" \
data/lang_sw1_{tg,fsh_fg} data/${decode_set}_hires \
$dir/decode_${decode_set}${decode_dir_affix:+_$decode_dir_affix}_sw1_{tg,fsh_fg} || exit 1;
fi
) &
done
fi
wait;
exit 0;
|
#!/bin/bash
docker run sparrow_cloud:unittest /bin/bash -c \
'py.test tests && py.test access_control' |
def last(collection):
if isinstance(collection, dict):
return list(collection.keys())[-1] if collection else None
else:
try:
return list(collection)[-1]
except IndexError:
return None
def rest(collection):
if isinstance(collection, dict):
keys = list(collection.keys())
return tuple((keys[i], collection[keys[i]]) for i in range(1, len(keys)))
else:
return tuple(list(collection)[1:]) |
<filename>algorithms/021-merge-two-sorted-lists.js
/**
* 21. Merge Two Sorted Lists
* https://leetcode.com/problems/merge-two-sorted-lists
*
* Merge two sorted linked lists and return it as a new list. The new list should be
* made by splicing together the nodes of the first two lists.
*
* Author: <NAME>
* Date: 7/5/2017
*/
/**
* Definition for singly-linked list.
* function ListNode(val) {
* this.val = val;
* this.next = null;
* }
*/
/**
* @param {ListNode} l1
* @param {ListNode} l2
* @return {ListNode}
*/
var mergeTwoLists = function(l1, l2) {
const head = new ListNode();
let headNew = head;
while(l1 && l2) {
if (l1.val > l2.val) {
headNew.next = l2;
l2 = l2.next;
} else {
headNew.next = l1;
l1 = l1.next;
}
headNew = headNew.next;
}
headNew.next = l1 ? l1 : l2;
return head.next;
};
// Use recursive call
var mergeTwoListsV2 = function(l1, l2) {
if (!l1) return l2;
if (!l2) return l1;
if (l1.val < l2.val) {
l1.next = mergeTwoListsV2(l1.next, l2);
return l1;
} else {
l2.next = mergeTwoListsV2(l2.next, l1);
return l2;
}
};
|
#!/bin/bash -ex
# Copyright 2021 The XLS Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Checks that the given input file(s) contains the string 'clk'.
grep clk "$@"
|
<filename>src/screens/Dataviz/index.js
import React, { useState, useEffect } from 'react';
import { useDispatch } from 'react-redux';
import LayoutWrapper from 'sharedUI/LayoutWrapper';
import Dataviz from './components/Dataviz';
import TabBar from './components/TabBar';
import ArrowButton from './components/ArrowButton';
import InfoText from './components/InfoText';
import { resetTabIndex } from 'states/actions/datavizAction';
import { SIZES, STRINGS } from 'configs';
const DatavizScreen = ({ route }) => {
const { W, H, H_SHRINK } = SIZES.DATAVIZ;
const { UP, DOWN } = STRINGS.ARROW;
const [datavizShrunk, setDataVizShrunk] = useState(false);
const onPressButton = () => setDataVizShrunk(!datavizShrunk);
const dispatch = useDispatch();
useEffect(() => {
return () => {
resetTabIndex(dispatch);
};
}, []);
return (
<LayoutWrapper screenName={route.name}>
<Dataviz width={W} height={datavizShrunk ? H_SHRINK : H} />
<TabBar />
<ArrowButton
pressHandler={onPressButton}
iconType={datavizShrunk ? UP : DOWN}
/>
{datavizShrunk && <InfoText />}
</LayoutWrapper>
);
};
export default DatavizScreen;
|
<reponame>Zovube/Tasks-solutions
#include<bits/stdc++.h>
using namespace std;
int main() {
long long ans[] = { 2, 3, 5, 7, 13, 17, 19, 31, 61, 89, 107, 127, 521, 607, 1279, 2203, 2281, 3217, 4253, 4423, 9689, 9941, 11213, 19937, 21701, 23209, 44497, 86243, 110503, 132049, 216091, 756839, 859433, 1257787, 1398269, 2976221, 3021377, 6972593 };
int tests;
cin >> tests;
while(tests--) {
int n;
cin >> n;
cout << ans[n - 1] << endl;
}
return 0;
}
|
!/bin/sh
make clean
make -j8 dist
rm -rf ../Southpole
cp -r dist/Southpole ..
make clean
|
#!/usr/bin/env bash
# compile proto file to PersonProtos in target/generated-sources
mvn clean generate-sources
|
#!/usr/bin/env sh
#
#PointHQ_Key="sdfsdfsdfljlbjkljlkjsdfoiwje"
#
#PointHQ_Email="xxxx@sss.com"
PointHQ_Api="https://api.pointhq.com"
######## Public functions #####################
#Usage: add _acme-challenge.www.domain.com "XKrxpRBosdIKFzxW_CT3KLZNf6q0HG9i01zxXp5CPBs"
dns_pointhq_add() {
fulldomain=$1
txtvalue=$2
PointHQ_Key="${PointHQ_Key:-$(_readaccountconf_mutable PointHQ_Key)}"
PointHQ_Email="${PointHQ_Email:-$(_readaccountconf_mutable PointHQ_Email)}"
if [ -z "$PointHQ_Key" ] || [ -z "$PointHQ_Email" ]; then
PointHQ_Key=""
PointHQ_Email=""
_err "You didn't specify a PointHQ API key and email yet."
_err "Please create the key and try again."
return 1
fi
if ! _contains "$PointHQ_Email" "@"; then
_err "It seems that the PointHQ_Email=$PointHQ_Email is not a valid email address."
_err "Please check and retry."
return 1
fi
#save the api key and email to the account conf file.
_saveaccountconf_mutable PointHQ_Key "$PointHQ_Key"
_saveaccountconf_mutable PointHQ_Email "$PointHQ_Email"
_debug "First detect the root zone"
if ! _get_root "$fulldomain"; then
_err "invalid domain"
return 1
fi
_debug _sub_domain "$_sub_domain"
_debug _domain "$_domain"
_info "Adding record"
if _pointhq_rest POST "zones/$_domain/records" "{\"zone_record\": {\"name\":\"$_sub_domain\",\"record_type\":\"TXT\",\"data\":\"$txtvalue\",\"ttl\":3600}}"; then
if printf -- "%s" "$response" | grep "$fulldomain" >/dev/null; then
_info "Added, OK"
return 0
else
_err "Add txt record error."
return 1
fi
fi
_err "Add txt record error."
return 1
}
#fulldomain txtvalue
dns_pointhq_rm() {
fulldomain=$1
txtvalue=$2
PointHQ_Key="${PointHQ_Key:-$(_readaccountconf_mutable PointHQ_Key)}"
PointHQ_Email="${PointHQ_Email:-$(_readaccountconf_mutable PointHQ_Email)}"
if [ -z "$PointHQ_Key" ] || [ -z "$PointHQ_Email" ]; then
PointHQ_Key=""
PointHQ_Email=""
_err "You didn't specify a PointHQ API key and email yet."
_err "Please create the key and try again."
return 1
fi
_debug "First detect the root zone"
if ! _get_root "$fulldomain"; then
_err "invalid domain"
return 1
fi
_debug _sub_domain "$_sub_domain"
_debug _domain "$_domain"
_debug "Getting txt records"
_pointhq_rest GET "zones/${_domain}/records?record_type=TXT&name=$_sub_domain"
if ! printf "%s" "$response" | grep "^\[" >/dev/null; then
_err "Error"
return 1
fi
if [ "$response" = "[]" ]; then
_info "No records to remove."
else
record_id=$(printf "%s\n" "$response" | _egrep_o "\"id\":[^,]*" | cut -d : -f 2 | tr -d \" | head -n 1)
_debug "record_id" "$record_id"
if [ -z "$record_id" ]; then
_err "Can not get record id to remove."
return 1
fi
if ! _pointhq_rest DELETE "zones/$_domain/records/$record_id"; then
_err "Delete record error."
return 1
fi
_contains "$response" '"status":"OK"'
fi
}
#################### Private functions below ##################################
#_acme-challenge.www.domain.com
#returns
# _sub_domain=_acme-challenge.www
# _domain=domain.com
_get_root() {
domain=$1
i=2
p=1
while true; do
h=$(printf "%s" "$domain" | cut -d . -f $i-100)
_debug h "$h"
if [ -z "$h" ]; then
#not valid
return 1
fi
if ! _pointhq_rest GET "zones"; then
return 1
fi
if _contains "$response" "\"name\":\"$h\"" >/dev/null; then
_sub_domain=$(printf "%s" "$domain" | cut -d . -f 1-$p)
_domain=$h
return 0
fi
p=$i
i=$(_math "$i" + 1)
done
return 1
}
_pointhq_rest() {
m=$1
ep="$2"
data="$3"
_debug "$ep"
_pointhq_auth=$(printf "%s:%s" "$PointHQ_Email" "$PointHQ_Key" | _base64)
export _H1="Authorization: Basic $_pointhq_auth"
export _H2="Content-Type: application/json"
export _H3="Accept: application/json"
if [ "$m" != "GET" ]; then
_debug data "$data"
response="$(_post "$data" "$PointHQ_Api/$ep" "" "$m")"
else
response="$(_get "$PointHQ_Api/$ep")"
fi
if [ "$?" != "0" ]; then
_err "error $ep"
return 1
fi
_debug2 response "$response"
return 0
}
|
<reponame>israelvallejos2/bus-mall<filename>JS/app.js<gh_stars>0
'use strict'
const myContainer = document.querySelector('section');
const myButton = document.querySelector('section + div');
let imageOne = document.querySelector('section img:first-child');
let imageTwo = document.querySelector('section img:nth-child(2)');
let imageThree = document.querySelector('section img:nth-child(3)');
const results = document.querySelector('ul');
let allProducts = [];
let clicks = 0;
const clicksAllowed = 25;
function Product(name, fileExtension = 'jpg') {
this.name = name;
this.src = `IMG/${name}.${fileExtension}`;
this.likes = 0;
this.views = 0;
allProducts.push(this);
}
new Product('bag', 'jpg');
new Product('banana');
new Product('bathroom');
new Product('boots')
new Product('breakfast');
new Product('bubblegum');
new Product('chair');
new Product('cthulhu');
new Product('dog-duck');
new Product('dragon');
new Product('pen');
new Product('pet-sweep');
new Product('scissors');
new Product('shark');
new Product('sweep');
new Product('tauntaun');
new Product('unicorn');
new Product('water-can');
new Product('wine-glass');
function selectRandomProduct() {
return Math.floor(Math.random() * allProducts.length);
}
console.log(selectRandomProduct());
function renderProduct() {
let ProductOne = selectRandomProduct();
let ProductTwo = selectRandomProduct();
let ProductThree = selectRandomProduct();
while (ProductOne === ProductTwo) {
ProductTwo = selectRandomProduct();
}
imageOne.src = allProducts[ProductOne].src;
imageOne.alt = allProducts[ProductOne].name;
allProducts[ProductOne].views++;
imageTwo.src = allProducts[ProductTwo].src;
imageTwo.alt = allProducts[ProductTwo].name;
allProducts[ProductTwo].views++;
imageThree.src = allProducts[ProductThree].src;
imageThree.alt = allProducts[ProductThree].name;
allProducts[ProductThree].views++;
}
function handleProductClick(event) {
if (event.target === myContainer) {
alert('Please click on an image');
}
clicks++;
let clickedProduct = event.target.alt;
for (let i = 0; i < allProducts.length; i++) {
if (clickedProduct === allProducts[i].name) {
allProducts[i].likes++;
break;
}
}
renderProduct();
if (clicks === clicksAllowed) {
myContainer.removeEventListener('click', handleProductClick);
myButton.addEventListener('click', handleButtonClick);
myButton.className = 'clicks-allowed';
}
}
function handleButtonClick() {
for (let i = 0; i < allProducts.length; i++) {
let li = document.createElement('li')
li.textContent = `${allProducts[i].name} had ${allProducts[i].views} view and was clicked ${allProducts[i].likes} times.`;
results.appendChild(li);
};
}
renderProduct();
myContainer.addEventListener('click', handleProductClick); |
#!/usr/bin/env bash
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
VVV_CONFIG=/vagrant/vvv-custom.yml
if [[ -f /vagrant/config.yml ]]; then
VVV_CONFIG=/vagrant/config.yml
fi
codename=$(lsb_release --codename | cut -f2)
CERTIFICATES_DIR="/srv/certificates"
if [[ $codename == "trusty" ]]; then # VVV 2 uses Ubuntu 14 LTS trusty
echo " ! WARNING: Unsupported Ubuntu 14 detected! Switching certificate folder, please upgrade to VVV 3+"
CERTIFICATES_DIR="/vagrant/certificates"
fi
DEFAULT_CERT_DIR="${CERTIFICATES_DIR}/default"
CA_DIR="${CERTIFICATES_DIR}/ca"
ROOT_CA_DAYS=397 # MacOS/Apple won't accept Root CA's that last longer than this
SITE_CERTIFICATE_DAYS=200
# Fix a bug that happens if you run the provisioner sometimes
if [[ ! -e ~/.rnd ]]; then
echo " * Generating Random Number for cert generation..."
openssl rand -out ~/.rnd -hex 256 2>&1
fi
get_sites() {
local value=$(shyaml keys sites 2> /dev/null < ${VVV_CONFIG})
echo "${value:-$@}"
}
get_host() {
local value=$(shyaml get-value "sites.${1}.hosts.0" 2> /dev/null < ${VVV_CONFIG})
echo "${value:-$@}"
}
get_hosts() {
local value=$(shyaml get-values "sites.${1}.hosts" 2> /dev/null < ${VVV_CONFIG})
echo "${value:-$@}"
}
install_root_certificate() {
mkdir -p /usr/share/ca-certificates/vvv
if [[ ! -f /usr/share/ca-certificates/vvv/ca.crt ]]; then
echo " * Adding root certificate to the VM"
cp -f "${CA_DIR}/ca.crt" /usr/share/ca-certificates/vvv/ca.crt
echo " * Updating loaded VM certificates"
update-ca-certificates --fresh
fi
}
create_root_certificate() {
if [ ! -d "${CA_DIR}" ]; then
echo " * Setting up VVV Certificate Authority"
mkdir -p "${CA_DIR}"
fi
if [[ ! -e "${DEFAULT_CERT_DIR}/dev.key" ]]; then
echo " * Generating key root certificate"
openssl genrsa \
-out "${CA_DIR}/ca.key" \
2048 &>/dev/null
fi
openssl req \
-x509 -new \
-nodes \
-key "${CA_DIR}/ca.key" \
-sha256 \
-days $ROOT_CA_DAYS \
-config "${DIR}/openssl-ca.conf" \
-out "${CA_DIR}/ca.crt"
}
setup_default_certificate_key_csr() {
echo " * Generating key and CSR for vvv.test"
if [[ ! -e "${DEFAULT_CERT_DIR}/dev.key" ]]; then
echo " * Generating key for: 'vvv.test'"
openssl genrsa \
-out "${DEFAULT_CERT_DIR}/dev.key" \
2048 &>/dev/null
fi
if [[ ! -e "${DEFAULT_CERT_DIR}/dev.csr" ]]; then
echo " * Generating CSR for: 'vvv.test'"
openssl req \
-new \
-key "${DEFAULT_CERT_DIR}/dev.key" \
-out "${DEFAULT_CERT_DIR}/dev.csr" \
-subj "/CN=vvv.test/C=GB/ST=Test Province/L=Test Locality/O=VVV/OU=VVV" &>/dev/null
fi
}
create_default_certificate() {
echo " * Setting up default Certificate for vvv.test and vvv.local"
mkdir -p "${DEFAULT_CERT_DIR}"
setup_default_certificate_key_csr
echo " * Removing and renewing the default certificate"
rm "${DEFAULT_CERT_DIR}/dev.crt"
openssl x509 \
-req \
-in "${DEFAULT_CERT_DIR}/dev.csr" \
-CA "${CA_DIR}/ca.crt" \
-CAkey "${CA_DIR}/ca.key" \
-CAcreateserial \
-out "${DEFAULT_CERT_DIR}/dev.crt" \
-days $SITE_CERTIFICATE_DAYS \
-sha256 \
-extfile "${DIR}/openssl-default-cert.conf" &>/dev/null
}
install_default_certificate() {
echo " * Symlinking default server certificate and key"
rm -rf /etc/nginx/server-2.1.0.crt
rm -rf /etc/nginx/server-2.1.0.key
echo " * Symlinking ${DEFAULT_CERT_DIR}/dev.crt to /etc/nginx/server-2.1.0.crt"
ln -s "${DEFAULT_CERT_DIR}/dev.crt" /etc/nginx/server-2.1.0.crt
echo " * Symlinking ${DEFAULT_CERT_DIR}/dev.key to /etc/nginx/server-2.1.0.key"
ln -s "${DEFAULT_CERT_DIR}/dev.key" /etc/nginx/server-2.1.0.key
}
setup_site_key_csr() {
SITE=${1}
SITE_ESCAPED="${SITE//./\\.}"
COMMON_NAME=$(get_host "${SITE_ESCAPED}")
SITE_CERT_DIR="${CERTIFICATES_DIR}/${SITE}"
mkdir -p "${SITE_CERT_DIR}"
if [[ ! -e "${SITE_CERT_DIR}/dev.key" ]]; then
echo " * Generating key for: '${SITE}'"
openssl genrsa \
-out "${SITE_CERT_DIR}/dev.key" \
2048 &>/dev/null
fi
if [[ ! -e "${SITE_CERT_DIR}/dev.csr" ]]; then
echo " * Generating CSR for: '${SITE}'"
openssl req \
-new \
-key "${SITE_CERT_DIR}/dev.key" \
-out "${SITE_CERT_DIR}/dev.csr" \
-subj "/CN=${COMMON_NAME//\\/}/C=GB/ST=Test Province/L=Test Locality/O=VVV/OU=VVV" &>/dev/null
fi
}
regenerate_site_certificate() {
SITE=${1}
SITE_CERT_DIR="${CERTIFICATES_DIR}/${SITE}"
SITE_ESCAPED="${SITE//./\\.}"
COMMON_NAME=$(get_host "${SITE_ESCAPED}")
setup_site_key_csr "${SITE}"
echo " * Generating new certificate for: '${SITE}'"
rm -f "${SITE_CERT_DIR}/dev.crt"
# Copy over the site conf stub then append the domains
cp -f "${DIR}/openssl-site-stub.conf" "${SITE_CERT_DIR}/openssl.conf"
HOSTS=$(get_hosts "${SITE_ESCAPED}")
I=0
for DOMAIN in ${HOSTS}; do
((I++))
echo "DNS.${I} = ${DOMAIN//\\/}" >> "${SITE_CERT_DIR}/openssl.conf"
((I++))
echo "DNS.${I} = *.${DOMAIN//\\/}" >> "${SITE_CERT_DIR}/openssl.conf"
done
openssl x509 \
-req \
-in "${SITE_CERT_DIR}/dev.csr" \
-CA "${CA_DIR}/ca.crt" \
-CAkey "${CA_DIR}/ca.key" \
-CAcreateserial \
-out "${SITE_CERT_DIR}/dev.crt" \
-days 200 \
-sha256 \
-extfile "${SITE_CERT_DIR}/openssl.conf" &>/dev/null
}
process_site_certificates() {
echo " * Generating site certificates"
for SITE in $(get_sites); do
regenerate_site_certificate "${SITE}"
done
echo " * Finished generating site certificates"
}
create_root_certificate
install_root_certificate
create_default_certificate
install_default_certificate
process_site_certificates
echo " * Finished generating TLS certificates"
|
sudo dd if=dist/image.hdd of=/dev/sdc
|
from typing import Mapping, Optional
from aws_cdk import aws_lambda, aws_lambda_python
from aws_cdk.core import Construct
from .backend import BACKEND_DIRECTORY
from .bundled_code import bundled_code
from .common import LOG_LEVEL
from .lambda_config import LAMBDA_TIMEOUT, PYTHON_RUNTIME
class BundledLambdaFunction(aws_lambda.Function):
def __init__(
self,
scope: Construct,
construct_id: str,
*,
directory: str,
extra_environment: Optional[Mapping[str, str]],
botocore_lambda_layer: aws_lambda_python.PythonLayerVersion,
):
environment = {"LOGLEVEL": LOG_LEVEL}
if extra_environment is not None:
environment.update(extra_environment)
super().__init__(
scope,
construct_id,
code=bundled_code(directory),
handler=f"{BACKEND_DIRECTORY}.{directory}.task.lambda_handler",
runtime=PYTHON_RUNTIME,
environment=environment,
layers=[botocore_lambda_layer], # type: ignore[list-item]
timeout=LAMBDA_TIMEOUT,
)
|
# Assuming the necessary imports and variable definitions are present
# Corrected and completed code snippet
for i, vv in enumerate(v):
if not prop_spec.range[0] <= vv <= prop_spec.range[1]:
report.add(
'ERROR',
el.sourceline,
"%s : List value [%d] out of range (%d not in %s)" % (prop_str, i, vv, prop_spec.range)
) |
<gh_stars>0
"use strict";
// Load plugins
const autoprefixer = require("autoprefixer");
const browserSync = require("browser-sync");
// const cp = require("child_process");
const cssnano = require("cssnano");
// const del = require("del");
// const eslint = require("gulp-eslint");
const gulp = require("gulp");
// const imagemin = require("gulp-imagemin");
// const newer = require("gulp-newer");
const plumber = require("gulp-plumber");
const postcss = require("gulp-postcss");
const rename = require("gulp-rename");
const sass = require("gulp-sass");
const pump = require('pump');
const minify = require('gulp-minify');
const paths = {
proxy:'http://patka.local.com',
scripts: {
src: '../dev-js/**/*.js',
dest: '../js/'
},
scss: {
src: '../scss/*.scss',
watch:'../scss/**/*.scss',
dest: '../css/'
},
views: {
src: '../../app/views/**/*.php'
}
};
const server = browserSync.create();
// BrowserSync
function serve(done) {
server.init({
proxy : paths.proxy,
port: 3000
});
done();
}
// BrowserSync Reload
function reload(done) {
server.reload();
done();
}
function errorHandler(error){
console.log(error.message);
gulp.watch(paths.scss.watch, gulp.series(css));
}
function errorHandlerJS(error){
console.log(error.message);
}
// CSS task
function css() {
return gulp
.src(paths.scss.src)
.pipe(plumber(errorHandler))
.pipe(sass({ outputStyle: "expanded" }))
// .pipe(gulp.dest(paths.scss.dest))
// .pipe(rename({ suffix: ".min" }))
.pipe(postcss([autoprefixer(), cssnano()]))
.pipe(gulp.dest(paths.scss.dest))
// ], errorHandler);
.pipe(server.stream());
}
function js() {
return gulp
.src(paths.scripts.src)
.pipe(plumber(errorHandlerJS))
.pipe(minify({
ext:{
src:'-debug.js',
min:'.js'
},
noSource:true
}))
.pipe(gulp.dest(paths.scripts.dest))
// ], errorHandler);
// .pipe(browsersync.stream());
}
// define complex tasks
//const build = gulp.series(clean, gulp.parallel(css, js));
function watchFiles(){
gulp.watch(paths.scripts.src, gulp.series(js,reload));
gulp.watch(paths.views.src, gulp.series(reload));
gulp.watch(paths.scss.watch, gulp.series(css));
}
const dev = gulp.series(serve,watchFiles);
// export tasks;
exports.default = dev;
// export tasks
// // exports.images = images;
// exports.css = css;
// exports.js = js;
// exports.jekyll = jekyll;
// exports.clean = clean;
// exports.build = build;
// exports.watch = watch;
// exports.default = build;
|
use std::collections::HashMap;
use std::io;
struct BankAccount {
account_number: u32,
owner_name: String,
balance: f64,
}
impl BankAccount {
fn new(account_number: u32, owner_name: String, initial_balance: f64) -> BankAccount {
BankAccount {
account_number,
owner_name,
balance: initial_balance,
}
}
fn deposit(&mut self, amount: f64) {
self.balance += amount;
}
fn withdraw(&mut self, amount: f64) -> Result<(), &'static str> {
if amount > self.balance {
Err("Insufficient funds")
} else {
self.balance -= amount;
Ok(())
}
}
fn get_balance(&self) -> f64 {
self.balance
}
}
fn main() {
let mut accounts: HashMap<u32, BankAccount> = HashMap::new();
loop {
let mut input = String::new();
io::stdin().read_line(&mut input).expect("Failed to read input");
let tokens: Vec<&str> = input.trim().split_whitespace().collect();
match tokens[0] {
"create" => {
let account_number: u32 = tokens[1].parse().expect("Invalid account number");
let owner_name = tokens[2].to_string();
let initial_balance: f64 = tokens[3].parse().expect("Invalid initial balance");
let account = BankAccount::new(account_number, owner_name, initial_balance);
accounts.insert(account_number, account);
}
"deposit" => {
let account_number: u32 = tokens[1].parse().expect("Invalid account number");
let amount: f64 = tokens[2].parse().expect("Invalid amount");
if let Some(account) = accounts.get_mut(&account_number) {
account.deposit(amount);
} else {
println!("Account not found");
}
}
"withdraw" => {
let account_number: u32 = tokens[1].parse().expect("Invalid account number");
let amount: f64 = tokens[2].parse().expect("Invalid amount");
if let Some(account) = accounts.get_mut(&account_number) {
if let Err(err) = account.withdraw(amount) {
println!("{}", err);
}
} else {
println!("Account not found");
}
}
"balance" => {
let account_number: u32 = tokens[1].parse().expect("Invalid account number");
if let Some(account) = accounts.get(&account_number) {
println!("Balance: {}", account.get_balance());
} else {
println!("Account not found");
}
}
"exit" => break,
_ => println!("Invalid command"),
}
}
} |
#!/bin/sh
#
# Copyright (C) 2018 rosysong@rosinson.com
#
. /lib/nft-qos/core.sh
qosdef_monitor_get_ip_handle() { # <family> <chain> <ip>
echo $(nft -a list chain $1 nft-qos-monitor $2 2>/dev/null | grep $3 | awk '{print $11}')
}
qosdef_monitor_add() { # <mac> <ip> <hostname>
handle_dl=$(qosdef_monitor_get_ip_handle $NFT_QOS_INET_FAMILY download $2)
[ -z "$handle_dl" ] && nft add rule $NFT_QOS_INET_FAMILY nft-qos-monitor download ip daddr $2 counter
handle_ul=$(qosdef_monitor_get_ip_handle $NFT_QOS_INET_FAMILY upload $2)
[ -z "$handle_ul" ] && nft add rule $NFT_QOS_INET_FAMILY nft-qos-monitor upload ip saddr $2 counter
}
qosdef_monitor_del() { # <mac> <ip> <hostname>
local handle_dl handle_ul
handle_dl=$(qosdef_monitor_get_ip_handle $NFT_QOS_INET_FAMILY download $2)
handle_ul=$(qosdef_monitor_get_ip_handle $NFT_QOS_INET_FAMILY upload $2)
[ -n "$handle_dl" ] && nft delete handle $handle_dl
[ -n "$handle_ul" ] && nft delete handle $handle_ul
}
# init qos monitor
qosdef_init_monitor() {
local hook_ul="input" hook_dl="postrouting"
[ -z "$NFT_QOS_HAS_BRIDGE" ] && {
hook_ul="postrouting"
hook_dl="input"
}
nft add table $NFT_QOS_INET_FAMILY nft-qos-monitor
nft add chain $NFT_QOS_INET_FAMILY nft-qos-monitor upload { type filter hook $hook_ul priority 0\; }
nft add chain $NFT_QOS_INET_FAMILY nft-qos-monitor download { type filter hook $hook_dl priority 0\; }
}
|
# Copyright: 2020 Masatake YAMATO
# License: GPL-2
CTAGS=$1
. ../utils.sh
exit_if_win32 $CTAGS
O="--quiet --options=NONE "
(
cd /
${CTAGS} $O \
--extras=+p --pseudo-tags=TAG_PROC_CWD \
-o - \
input.c
)
|
#!/bin/bash
#
# To be run from one directory above this script.
## The input is some directory containing the switchboard-1 release 2
## corpus (LDC97S62). Note: we don't make many assumptions about how
## you unpacked this. We are just doing a "find" command to locate
## the .sph files.
# for example /mnt/matylda2/data/SWITCHBOARD_1R2
. path.sh
# The parts of the output of this that will be needed are
# [in data/local/dict/ ]
# lexicon.txt
# extra_questions.txt
# nonsilence_phones.txt
# optional_silence.txt
# silence_phones.txt
#check existing directories
[ $# != 0 ] && echo "Usage: local/fisher_prepare_dict.sh" && exit 1;
dir=data/local/dict_nosp
mkdir -p $dir
echo "Getting CMU dictionary"
svn co https://svn.code.sf.net/p/cmusphinx/code/trunk/cmudict $dir/cmudict
# silence phones, one per line.
for w in sil laughter noise oov; do echo $w; done > $dir/silence_phones.txt
echo sil > $dir/optional_silence.txt
# For this setup we're discarding stress.
cat $dir/cmudict/cmudict.0.7a.symbols | sed s/[0-9]//g | \
tr '[A-Z]' '[a-z]' | perl -ane 's:\r::; print;' | sort | uniq > $dir/nonsilence_phones.txt
# An extra question will be added by including the silence phones in one class.
cat $dir/silence_phones.txt| awk '{printf("%s ", $1);} END{printf "\n";}' > $dir/extra_questions.txt || exit 1;
grep -v ';;;' $dir/cmudict/cmudict.0.7a | tr '[A-Z]' '[a-z]' | \
perl -ane 'if(!m:^;;;:){ s:(\S+)\(\d+\) :$1 :; s: : :; print; }' | \
sed s/[0-9]//g | sort | uniq > $dir/lexicon1_raw_nosil.txt || exit 1;
# Add prons for laughter, noise, oov
for w in `grep -v sil $dir/silence_phones.txt`; do
echo "[$w] $w"
done | cat - $dir/lexicon1_raw_nosil.txt > $dir/lexicon2_raw.txt || exit 1;
# This is just for diagnostics:
cat data/train_fisher/text | \
awk '{for (n=2;n<=NF;n++){ count[$n]++; } } END { for(n in count) { print count[n], n; }}' | \
sort -nr > $dir/word_counts
# between lexicon2_raw and lexicon3_expand we limit it to the words seen in
# the Fisher data, and also expand the vocab for acronyms like c._n._n. and other
# underscore-containing things.
cat $dir/lexicon2_raw.txt | \
perl -e 'while(<STDIN>) { @A=split; $w = shift @A; $pron{$w} = join(" ", @A); }
($w) = @ARGV; open(W, "<$w") || die "Error opening word-counts from $w";
while(<W>) { # reading in words we saw in training data..
($c, $w) = split;
if (defined $pron{$w}) {
print "$w $pron{$w}\n";
} else {
@A = split("_", $w);
if (@A > 1) {
$this_pron = "";
$pron_ok = 1;
foreach $a (@A) {
if (defined($pron{$a})) { $this_pron = $this_pron . "$pron{$a} "; }
else { $pron_ok = 0; print STDERR "Not handling word $w, count is $c\n"; last; }
}
if ($pron_ok) { $this_pron =~ s/\s+$//; $new_pron{$w} = $this_pron; } }}}
foreach $w (keys %new_pron) { print "$w $new_pron{$w}\n"; } ' \
$dir/word_counts > $dir/lexicon3_expand_v1.txt || exit 1;
cat $dir/word_counts | awk '{print $2}' > $dir/fisher_word_list
filter_scp.pl $dir/fisher_word_list $dir/lexicon2_raw.txt > $dir/lexicon3_expand_v2.txt
cat $dir/lexicon3_expand_v1.txt $dir/lexicon3_expand_v2.txt | sort -u > $dir/lexicon3_expand.txt
cat $dir/lexicon3_expand.txt \
<( echo "mm m"
echo "<unk> oov" ) > $dir/lexicon4_extra.txt
cp $dir/lexicon4_extra.txt $dir/lexicon_fisher.txt
awk '{print $1}' $dir/lexicon_fisher.txt | \
perl -e '($word_counts)=@ARGV;
open(W, "<$word_counts")||die "opening word-counts $word_counts";
while(<STDIN>) { chop; $seen{$_}=1; }
while(<W>) {
($c,$w) = split;
if (!defined $seen{$w}) { print; }
} ' $dir/word_counts > $dir/oov_counts.txt
echo "*Highest-count OOVs are:"
head -n 20 $dir/oov_counts.txt
# Preparing SWBD acronymns from its dictionary
srcdir=data/local/train_swbd # This is where we downloaded some stuff..
dir=data/local/dict_nosp
mkdir -p $dir
srcdict=$srcdir/swb_ms98_transcriptions/sw-ms98-dict.text
# assume swbd_p1_data_prep.sh was done already.
[ ! -f "$srcdict" ] && echo "No such file $srcdict" && exit 1;
cp $srcdict $dir/lexicon0.txt || exit 1;
patch <local/dict.patch $dir/lexicon0.txt || exit 1;
#(2a) Dictionary preparation:
# Pre-processing (remove comments)
grep -v '^#' $dir/lexicon0.txt | awk 'NF>0' | sort > $dir/lexicon1_swbd.txt || exit 1;
cat $dir/lexicon1_swbd.txt | awk '{ for(n=2;n<=NF;n++){ phones[$n] = 1; }} END{for (p in phones) print p;}' | \
grep -v SIL > $dir/nonsilence_phones_msu.txt || exit 1;
local/swbd1_map_words.pl -f 1 $dir/lexicon1_swbd.txt | sort | uniq \
> $dir/lexicon2_swbd.txt || exit 1;
cp conf/MSU_single_letter.txt $dir/MSU_single_letter.txt
python local/format_acronyms_dict.py -i $dir/lexicon2_swbd.txt \
-o1 $dir/acronyms_lex_swbd.txt -o2 $dir/acronyms_lex_swbd_ori.txt \
-L $dir/MSU_single_letter.txt -M $dir/acronyms_raw.map
cat $dir/acronyms_raw.map | sort -u > $dir/acronyms_swbd.map
cat $dir/acronyms_lex_swbd.txt |\
sed 's/ ax/ ah/g' |\
sed 's/ en/ ah n/g' |\
sed 's/ el/ ah l/g' \
> $dir/acronyms_lex_swbd_cmuphones.txt
cat $dir/acronyms_lex_swbd_cmuphones.txt $dir/lexicon_fisher.txt | sort -u > $dir/lexicon.txt
echo Prepared input dictionary and phone-sets for Switchboard phase 1.
utils/validate_dict_dir.pl $dir
|
import { Users } from '../../modules/users/users.model';
import { NotFound } from '../../common/exceptions';
import { postsService } from '../../modules/posts/posts.service';
import { Answers } from './answers.model';
interface IAnswersData {
body: string;
postId: number;
userId: number;
}
class AnswersService {
public async createOne(answersData: IAnswersData): Promise<Answers> {
const { body, postId, userId } = answersData;
const post = await postsService.findOneById(postId);
if (!post) throw new NotFound(`Can't find post with id ${postId}`);
const answer: Answers = new Answers({
body,
postId,
userId,
});
return answer.save();
}
public async findAnswersByPostId(postId: number): Promise<Answers[] | null> {
const post = await postsService.findOneById(postId);
if (!post) throw new NotFound(`Can't find post with id ${postId}`);
const answers = await Answers.findAll({
where: { postId },
order: [['id', 'ASC']],
include: [
{
model: Users,
as: 'user',
attributes: ['id', 'name', 'email', 'avatar'],
},
],
});
return answers;
}
}
export const answersService = new AnswersService();
|
#!/bin/sh
set -e
##################################
##### SET THESE VARIABLES ########
##################################
AS_USER=xrates-staging # name of user on server
APP_ROOT=/home/xrates-staging/Xrates-Staging/current # path to application current folder
# update the name of the enviroment at '-E _____' to production, staging, etc
CMD="cd $APP_ROOT; bundle exec unicorn -D -c $APP_ROOT/config/deploy/staging/unicorn.rb -E staging"
##################################
PID=$APP_ROOT/tmp/pids/unicorn.pid
TIMEOUT=${TIMEOUT-60}
set -u
OLD_PIN="$PID.oldbin"
sig () {
test -s "$PID" && kill -$1 `cat $PID`
}
oldsig () {
test -s $OLD_PIN && kill -$1 `cat $OLD_PIN`
}
run () {
if [ "$(id -un)" = "$AS_USER" ]; then
eval $1
else
su -c "$1" - $AS_USER
fi
}
case "$1" in
start)
sig 0 && echo >&2 "Already running" && exit 0
run "$CMD"
;;
stop)
sig QUIT && exit 0
echo >&2 "Not running"
;;
force-stop)
sig TERM && exit 0
echo >&2 "Not running"
;;
restart|reload)
sig HUP && echo reloaded OK && exit 0
echo >&2 "Couldn't reload, starting '$CMD' instead"
run "$CMD"
;;
upgrade)
if sig USR2 && sleep 2 && sig 0 && oldsig QUIT
then
n=$TIMEOUT
while test -s $OLD_PIN && test $n -ge 0
do
printf '.' && sleep 1 && n=$(( $n - 1 ))
done
echo
if test $n -lt 0 && test -s $OLD_PIN
then
echo >&2 "$OLD_PIN still exists after $TIMEOUT seconds"
exit 1
fi
exit 0
fi
echo >&2 "Couldn't upgrade, starting '$CMD' instead"
run "$CMD"
;;
reopen-logs)
sig USR1
;;
*)
echo >&2 "Usage: $0 <start|stop|restart|upgrade|force-stop|reopen-logs>"
exit 1
;;
esac
|
#! /bin/bash
hosts=(box0 box1 box2)
for i in ${hosts[@]}
do
ssh jinzhongxu@$i "source /etc/profile;/home/jinzhongxu/zookeeper/bin/zkServer.sh $1" &
done
# zookeeper distributed active script, you can input "start|stop|restart|status" etc. command.
|
# rubocop:disable Metrics/LineLength
# == Schema Information
#
# Table name: blocks
#
# id :integer not null, primary key
# created_at :datetime not null
# updated_at :datetime not null
# blocked_id :integer not null, indexed
# user_id :integer not null, indexed
#
# Indexes
#
# index_blocks_on_blocked_id (blocked_id)
# index_blocks_on_user_id (user_id)
#
# Foreign Keys
#
# fk_rails_42f8051bae (user_id => users.id)
# fk_rails_c7fbc30382 (blocked_id => users.id)
#
# rubocop:enable Metrics/LineLength
require 'rails_helper'
RSpec.describe Block, type: :model do
it { should belong_to(:user) }
it { should belong_to(:blocked).class_name('User') }
end
|
<gh_stars>0
package composition
import (
"context"
"fmt"
"path/filepath"
"strconv"
"strings"
"testing"
"time"
"github.com/ash2k/stager"
"github.com/atlassian/ctrl"
cond_v1 "github.com/atlassian/ctrl/apis/condition/v1"
"github.com/atlassian/smith/pkg/resources"
"github.com/atlassian/smith/pkg/specchecker"
"github.com/atlassian/smith/pkg/store"
"github.com/atlassian/voyager"
"github.com/atlassian/voyager/pkg/apis/composition/v1"
comp_v1 "github.com/atlassian/voyager/pkg/apis/composition/v1"
form_v1 "github.com/atlassian/voyager/pkg/apis/formation/v1"
compclient_fake "github.com/atlassian/voyager/pkg/composition/client/fake"
compUpdater "github.com/atlassian/voyager/pkg/composition/updater"
formclient_fake "github.com/atlassian/voyager/pkg/formation/client/fake"
formInf "github.com/atlassian/voyager/pkg/formation/informer"
"github.com/atlassian/voyager/pkg/k8s"
k8s_testing "github.com/atlassian/voyager/pkg/k8s/testing"
"github.com/atlassian/voyager/pkg/k8s/updater"
"github.com/atlassian/voyager/pkg/options"
"github.com/atlassian/voyager/pkg/synchronization/api"
"github.com/atlassian/voyager/pkg/util/testutil"
"github.com/prometheus/client_golang/prometheus"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/mock"
"github.com/stretchr/testify/require"
"go.uber.org/zap"
"go.uber.org/zap/zaptest"
core_v1 "k8s.io/api/core/v1"
rbac_v1 "k8s.io/api/rbac/v1"
meta_v1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apimachinery/pkg/util/clock"
core_v1inf "k8s.io/client-go/informers/core/v1"
k8s_fake "k8s.io/client-go/kubernetes/fake"
kube_testing "k8s.io/client-go/testing"
"k8s.io/client-go/tools/cache"
)
const (
fixtureServiceDescriptorInputSuffix = ".sd.input.yaml"
fixtureLocationDescriptorsGlobSuffix = ".ld.*.yaml"
fixtureServiceDescriptorOutputSuffix = ".sd.output.yaml"
fixtureGlob = "*" + fixtureServiceDescriptorInputSuffix
)
func testHandleProcessResult(t *testing.T, filePrefix string) {
sd := &comp_v1.ServiceDescriptor{}
err := testutil.LoadIntoStructFromTestData(filePrefix+fixtureServiceDescriptorInputSuffix, sd)
require.NoError(t, err)
ldFiles, err := filepath.Glob(filepath.Join(testutil.FixturesDir, filePrefix+fixtureLocationDescriptorsGlobSuffix))
require.NoError(t, err)
results := make([]*FormationObjectResult, 0, len(ldFiles))
for _, ldFile := range ldFiles {
// Bunch of string splitting
_, filename := filepath.Split(ldFile)
// Load a list of location descriptors actually..
ld := &form_v1.LocationDescriptor{}
err := testutil.LoadIntoStructFromTestData(filename, ld)
require.NoError(t, err)
serviceName, serviceLabel := deconstructNamespaceName(ld.Namespace)
results = append(results, &FormationObjectResult{
ld: ld,
namespace: &core_v1.Namespace{
TypeMeta: meta_v1.TypeMeta{
Kind: k8s.NamespaceKind,
APIVersion: core_v1.SchemeGroupVersion.String(),
},
ObjectMeta: meta_v1.ObjectMeta{
Name: "test-sd",
Labels: map[string]string{
voyager.ServiceNameLabel: serviceName,
voyager.ServiceLabelLabel: string(serviceLabel),
},
},
},
})
}
testClock := clock.NewFakeClock(time.Date(2015, 10, 15, 9, 30, 0, 0, time.FixedZone("New_York", int(-4*time.Hour/time.Second))))
tc := testCase{
sd: sd,
clock: testClock,
test: func(t *testing.T, cntrlr *Controller, ctx *ctrl.ProcessContext, tc *testCase) {
_, _, err := cntrlr.handleProcessResult(ctx.Logger, sd.Name, sd, results, false, false, nil)
assert.NoError(t, err)
// Compare the outputs
fileName := filePrefix + fixtureServiceDescriptorOutputSuffix
sdExpected := &comp_v1.ServiceDescriptor{}
err = testutil.LoadIntoStructFromTestData(fileName, sdExpected)
require.NoError(t, err)
outputSd, ok := findUpdatedServiceDescriptor(tc.compFake.Actions())
require.True(t, ok)
testutil.ObjectCompareContext(t, testutil.FileName(fileName), sdExpected, outputSd)
},
}
tc.run(t)
}
func TestCompositionWithTestData(t *testing.T) {
t.Parallel()
files, errRead := filepath.Glob(filepath.Join(testutil.FixturesDir, fixtureGlob))
require.NoError(t, errRead)
// Sanity check that we actually loaded something otherwise bazel might eat
// our tests
if len(files) == 0 {
require.FailNow(t, "Expected some test fixtures, but didn't fine any")
}
for _, file := range files {
_, filename := filepath.Split(file)
sdFileName := strings.Split(filename, ".")
resultFilePrefix := strings.Join(sdFileName[:len(sdFileName)-3], ".")
t.Run(resultFilePrefix, func(t *testing.T) {
testHandleProcessResult(t, resultFilePrefix)
})
}
}
func TestCreatesNamespaceNoLabel(t *testing.T) {
t.Parallel()
tc := testCase{
sd: &v1.ServiceDescriptor{
TypeMeta: meta_v1.TypeMeta{},
ObjectMeta: meta_v1.ObjectMeta{
Name: "test-sd",
UID: "the-sd-uid",
Finalizers: []string{FinalizerServiceDescriptorComposition},
},
Spec: v1.ServiceDescriptorSpec{
Locations: []v1.ServiceDescriptorLocation{
locationNoLabel(),
},
},
},
test: func(t *testing.T, cntrlr *Controller, ctx *ctrl.ProcessContext, tc *testCase) {
cntrlr.Process(ctx)
ns, ok := findCreatedNamespace(tc.mainFake.Actions())
require.True(t, ok)
assert.Equal(t, tc.sd.Name, ns.Name, "Should have name set to sd name")
expectedLabels := map[string]string{
voyager.ServiceNameLabel: tc.sd.Name,
voyager.ServiceLabelLabel: "",
}
assert.Equal(t, expectedLabels, ns.GetLabels())
ownerRefs := ns.GetOwnerReferences()
assert.Len(t, ownerRefs, 1, "Should have owner reference set")
sdOwnerRef := ownerRefs[0]
assert.True(t, *sdOwnerRef.BlockOwnerDeletion)
assert.True(t, *sdOwnerRef.Controller)
assert.Equal(t, tc.sd.Kind, sdOwnerRef.Kind)
assert.Equal(t, tc.sd.Name, sdOwnerRef.Name)
assert.Equal(t, tc.sd.UID, sdOwnerRef.UID)
},
}
tc.run(t)
}
func TestCreatesNamespaceWithLabel(t *testing.T) {
t.Parallel()
tc := testCase{
sd: &v1.ServiceDescriptor{
TypeMeta: meta_v1.TypeMeta{},
ObjectMeta: meta_v1.ObjectMeta{
Name: "test-sd",
UID: "the-sd-uid",
Finalizers: []string{FinalizerServiceDescriptorComposition},
},
Spec: v1.ServiceDescriptorSpec{
Locations: []v1.ServiceDescriptorLocation{
locationWithLabel(),
},
},
},
test: func(t *testing.T, cntrlr *Controller, ctx *ctrl.ProcessContext, tc *testCase) {
cntrlr.Process(ctx)
ns, ok := findCreatedNamespace(tc.mainFake.Actions())
require.True(t, ok)
assert.Equal(t, fmt.Sprintf("%s--%s", tc.sd.Name, tc.sd.Spec.Locations[0].Label),
ns.Name, "Should have name set to sd name combined with location label")
expectedLabels := map[string]string{
voyager.ServiceNameLabel: tc.sd.Name,
voyager.ServiceLabelLabel: string(tc.sd.Spec.Locations[0].Label),
}
assert.Equal(t, expectedLabels, ns.GetLabels())
ownerRefs := ns.GetOwnerReferences()
assert.Len(t, ownerRefs, 1, "Should have owner reference set")
sdOwnerRef := ownerRefs[0]
assert.True(t, *sdOwnerRef.BlockOwnerDeletion)
assert.True(t, *sdOwnerRef.Controller)
assert.Equal(t, tc.sd.Kind, sdOwnerRef.Kind)
assert.Equal(t, tc.sd.Name, sdOwnerRef.Name)
assert.Equal(t, tc.sd.UID, sdOwnerRef.UID)
},
}
tc.run(t)
}
func TestCreatesLocationDescriptorNoLabel(t *testing.T) {
t.Parallel()
tc := testCase{
sd: &v1.ServiceDescriptor{
TypeMeta: meta_v1.TypeMeta{
Kind: v1.ServiceDescriptorResourceKind,
APIVersion: v1.ServiceDescriptorResourceVersion,
},
ObjectMeta: meta_v1.ObjectMeta{
Name: "test-sd",
Finalizers: []string{FinalizerServiceDescriptorComposition},
},
Spec: v1.ServiceDescriptorSpec{
Locations: []v1.ServiceDescriptorLocation{
locationNoLabel(),
},
},
},
test: func(t *testing.T, cntrlr *Controller, ctx *ctrl.ProcessContext, tc *testCase) {
_, err := cntrlr.Process(ctx)
require.NoError(t, err)
ld, ok := findCreatedLocationDescriptor(tc.formFake.Actions())
require.True(t, ok)
assert.Equal(t, tc.sd.Name, ld.Name, "Should have name set to sd name")
},
}
tc.run(t)
}
func TestCreatesLocationDescriptorWithTransformedResources(t *testing.T) {
t.Parallel()
location := locationNoLabel()
sdWithResources := &v1.ServiceDescriptor{
TypeMeta: meta_v1.TypeMeta{
Kind: v1.ServiceDescriptorResourceKind,
APIVersion: v1.ServiceDescriptorResourceVersion,
},
ObjectMeta: meta_v1.ObjectMeta{
Name: "test-sd",
Finalizers: []string{FinalizerServiceDescriptorComposition},
},
Spec: v1.ServiceDescriptorSpec{
Locations: []v1.ServiceDescriptorLocation{
location,
},
// it doesn't matter what we put here
// because it gets the output of the transformer
ResourceGroups: []v1.ServiceDescriptorResourceGroup{},
},
}
tc := testCase{
sd: sdWithResources,
transformedResources: []v1.ServiceDescriptorResource{
{
Name: "first-resource",
Type: "first-type",
},
{
Name: "second-resource",
Type: "second-type",
},
{
Name: "third-resource",
Type: "third-type",
DependsOn: []v1.ServiceDescriptorResourceDependency{
{
Name: "second-resource",
Attributes: map[string]interface{}{
"Bar": "blah",
"Other": "foo",
},
},
},
},
},
test: func(t *testing.T, cntrlr *Controller, ctx *ctrl.ProcessContext, tc *testCase) {
_, err := cntrlr.Process(ctx)
require.NoError(t, err)
ld, ok := findCreatedLocationDescriptor(tc.formFake.Actions())
require.True(t, ok)
assert.Equal(t, tc.sd.Name, ld.Name, "Should have name set to sd name")
assert.Equal(t, apisynchronization.DefaultServiceMetadataConfigMapName, ld.Spec.ConfigMapName)
assert.Len(t, ld.Spec.Resources, len(tc.transformedResources))
assert.Equal(t, tc.transformedResources[0].Name, ld.Spec.Resources[0].Name)
assert.Equal(t, tc.transformedResources[0].Type, ld.Spec.Resources[0].Type)
assert.Equal(t, tc.transformedResources[1].Name, ld.Spec.Resources[1].Name)
assert.Equal(t, tc.transformedResources[1].Type, ld.Spec.Resources[1].Type)
assert.Equal(t, tc.transformedResources[2].Name, ld.Spec.Resources[2].Name)
assert.Equal(t, tc.transformedResources[2].Type, ld.Spec.Resources[2].Type)
assert.Len(t, tc.transformedResources[2].DependsOn, len(tc.transformedResources[2].DependsOn))
assert.Equal(t, tc.transformedResources[2].DependsOn[0].Name, ld.Spec.Resources[2].DependsOn[0].Name)
assert.Equal(t, tc.transformedResources[2].DependsOn[0].Attributes, ld.Spec.Resources[2].DependsOn[0].Attributes)
},
}
tc.run(t)
}
func TestCreatesLocationDescriptorWithLabel(t *testing.T) {
t.Parallel()
sd := &v1.ServiceDescriptor{
TypeMeta: meta_v1.TypeMeta{
Kind: v1.ServiceDescriptorResourceKind,
APIVersion: v1.ServiceDescriptorResourceVersion,
},
ObjectMeta: meta_v1.ObjectMeta{
Name: "test-sd",
Finalizers: []string{FinalizerServiceDescriptorComposition},
},
Spec: v1.ServiceDescriptorSpec{
Locations: []v1.ServiceDescriptorLocation{
locationWithLabel(),
},
},
}
expectedName := fmt.Sprintf("%s--%s", sd.Name, sd.Spec.Locations[0].Label)
tc := testCase{
sd: sd,
test: func(t *testing.T, cntrlr *Controller, ctx *ctrl.ProcessContext, tc *testCase) {
_, err := cntrlr.Process(ctx)
require.NoError(t, err)
ld, ok := findCreatedLocationDescriptor(tc.formFake.Actions())
require.True(t, ok)
assert.Equal(t, expectedName, ld.Name, "Should have name set to sd name")
},
}
tc.run(t)
}
func TestUpdatesLocationDescriptorNoLabel(t *testing.T) {
t.Parallel()
existingSD := &v1.ServiceDescriptor{
TypeMeta: meta_v1.TypeMeta{
Kind: v1.ServiceDescriptorResourceKind,
APIVersion: core_v1.SchemeGroupVersion.String(),
},
ObjectMeta: meta_v1.ObjectMeta{
Name: "test-sd",
UID: "the-sd-uid",
Finalizers: []string{FinalizerServiceDescriptorComposition},
},
Spec: v1.ServiceDescriptorSpec{
Locations: []v1.ServiceDescriptorLocation{
locationNoLabel(),
},
// doesn't matter, since we get it from the transformer
ResourceGroups: []v1.ServiceDescriptorResourceGroup{},
},
}
trueVar := true
existingLocationDescriptor := &form_v1.LocationDescriptor{
TypeMeta: meta_v1.TypeMeta{
Kind: form_v1.LocationDescriptorResourceKind,
APIVersion: core_v1.SchemeGroupVersion.String(),
},
ObjectMeta: meta_v1.ObjectMeta{
Name: "test-sd",
Namespace: "test-sd",
UID: "some-uid",
},
Spec: form_v1.LocationDescriptorSpec{
ConfigMapName: "cm1",
Resources: []form_v1.LocationDescriptorResource{
{
Name: "old-resource",
Type: "some-type",
},
},
},
}
existingNamespace := &core_v1.Namespace{
TypeMeta: meta_v1.TypeMeta{
Kind: k8s.NamespaceKind,
APIVersion: core_v1.SchemeGroupVersion.String(),
},
ObjectMeta: meta_v1.ObjectMeta{
Name: "test-sd",
OwnerReferences: []meta_v1.OwnerReference{
{
Controller: &trueVar,
Name: existingSD.Name,
Kind: existingSD.Kind,
UID: existingSD.UID,
},
},
},
}
tc := testCase{
formClientObjects: []runtime.Object{existingLocationDescriptor},
mainClientObjects: []runtime.Object{existingNamespace},
sd: existingSD,
transformedResources: []v1.ServiceDescriptorResource{
{
Name: "first-resource",
Type: "first-type",
},
{
Name: "second-resource",
Type: "second-type",
},
},
test: func(t *testing.T, cntrlr *Controller, ctx *ctrl.ProcessContext, tc *testCase) {
_, err := cntrlr.Process(ctx)
require.NoError(t, err)
ld, ok := findUpdatedLocationDescriptor(tc.formFake.Actions())
require.True(t, ok)
assert.Equal(t, tc.sd.Name, ld.Name)
assert.Equal(t, existingLocationDescriptor.GetUID(), ld.GetUID())
// make sure it has the new resources we setup as expected
require.Len(t, ld.Spec.Resources, len(tc.transformedResources))
assert.Equal(t, tc.transformedResources[0].Name, ld.Spec.Resources[0].Name)
assert.Equal(t, tc.transformedResources[0].Type, ld.Spec.Resources[0].Type)
assert.Equal(t, tc.transformedResources[1].Name, ld.Spec.Resources[1].Name)
assert.Equal(t, tc.transformedResources[1].Type, ld.Spec.Resources[1].Type)
},
}
tc.run(t)
}
func TestDoesNotSkipLocationDescriptorUpdateWhenLocationDescriptorBeingDeleted(t *testing.T) {
t.Parallel()
now := meta_v1.Now()
existingSD := &v1.ServiceDescriptor{
TypeMeta: meta_v1.TypeMeta{
Kind: v1.ServiceDescriptorResourceKind,
APIVersion: core_v1.SchemeGroupVersion.String(),
},
ObjectMeta: meta_v1.ObjectMeta{
Name: "test-sd",
Finalizers: []string{FinalizerServiceDescriptorComposition},
},
Spec: v1.ServiceDescriptorSpec{
Locations: []v1.ServiceDescriptorLocation{
locationNoLabel(),
},
},
}
existingLocationDescriptor := &form_v1.LocationDescriptor{
TypeMeta: meta_v1.TypeMeta{
Kind: form_v1.LocationDescriptorResourceKind,
APIVersion: core_v1.SchemeGroupVersion.String(),
},
ObjectMeta: meta_v1.ObjectMeta{
Name: "test-sd",
Namespace: "test-sd",
UID: "some-uid",
DeletionTimestamp: &now,
},
}
trueVar := true
existingNamespace := &core_v1.Namespace{
TypeMeta: meta_v1.TypeMeta{
Kind: k8s.NamespaceKind,
APIVersion: core_v1.SchemeGroupVersion.String(),
},
ObjectMeta: meta_v1.ObjectMeta{
Name: "test-sd",
OwnerReferences: []meta_v1.OwnerReference{
{
Controller: &trueVar,
Name: existingSD.Name,
Kind: existingSD.Kind,
UID: existingSD.UID,
},
},
},
}
tc := testCase{
formClientObjects: []runtime.Object{existingLocationDescriptor},
mainClientObjects: []runtime.Object{existingNamespace},
sd: existingSD,
transformedResources: []v1.ServiceDescriptorResource{
{
Name: "first-resource",
Type: "first-type",
},
},
test: func(t *testing.T, cntrlr *Controller, ctx *ctrl.ProcessContext, tc *testCase) {
_, err := cntrlr.Process(ctx)
require.NoError(t, err)
_, ok := findCreatedLocationDescriptor(tc.formFake.Actions())
assert.False(t, ok)
_, ok = findUpdatedLocationDescriptor(tc.formFake.Actions())
assert.True(t, ok)
},
}
tc.run(t)
}
func TestLocationDescriptorErrorsWhenDifferentOwnerReference(t *testing.T) {
t.Parallel()
existingLocationDescriptor := &form_v1.LocationDescriptor{
TypeMeta: meta_v1.TypeMeta{
Kind: form_v1.LocationDescriptorResourceKind,
APIVersion: core_v1.SchemeGroupVersion.String(),
},
ObjectMeta: meta_v1.ObjectMeta{
Name: "test-sd",
Namespace: "test-sd",
UID: "some-uid",
},
}
existingNamespace := &core_v1.Namespace{
TypeMeta: meta_v1.TypeMeta{
Kind: k8s.NamespaceKind,
APIVersion: core_v1.SchemeGroupVersion.String(),
},
ObjectMeta: meta_v1.ObjectMeta{
Name: "test-sd",
// non matching owner reference
OwnerReferences: []meta_v1.OwnerReference{},
},
}
tc := testCase{
formClientObjects: []runtime.Object{existingLocationDescriptor},
mainClientObjects: []runtime.Object{existingNamespace},
sd: &v1.ServiceDescriptor{
TypeMeta: meta_v1.TypeMeta{
Kind: v1.ServiceDescriptorResourceKind,
APIVersion: core_v1.SchemeGroupVersion.String(),
},
ObjectMeta: meta_v1.ObjectMeta{
Name: "test-sd",
Finalizers: []string{FinalizerServiceDescriptorComposition},
},
Spec: v1.ServiceDescriptorSpec{
Locations: []v1.ServiceDescriptorLocation{
locationNoLabel(),
},
},
},
test: func(t *testing.T, cntrlr *Controller, ctx *ctrl.ProcessContext, tc *testCase) {
_, err := cntrlr.Process(ctx)
assert.Error(t, err)
_, ok := findCreatedLocationDescriptor(tc.formFake.Actions())
require.False(t, ok)
_, ok = findUpdatedLocationDescriptor(tc.formFake.Actions())
require.False(t, ok)
},
}
tc.run(t)
}
func TestSkipsLocationWhenControllerHasNamespace(t *testing.T) {
t.Parallel()
tc := testCase{
sd: &v1.ServiceDescriptor{
TypeMeta: meta_v1.TypeMeta{},
ObjectMeta: meta_v1.ObjectMeta{
Name: "test-sd",
},
Spec: v1.ServiceDescriptorSpec{
Locations: []v1.ServiceDescriptorLocation{
locationNoLabel(),
},
},
},
test: func(t *testing.T, cntrlr *Controller, ctx *ctrl.ProcessContext, tc *testCase) {
cntrlr.namespace = "some-random-ns"
_, err := cntrlr.Process(ctx)
require.NoError(t, err)
_, found := findCreatedNamespace(tc.mainFake.Actions())
require.False(t, found)
},
}
tc.run(t)
}
func TestServiceDescriptorUpdatedIfStatusChanges(t *testing.T) {
t.Parallel()
sd := &v1.ServiceDescriptor{
TypeMeta: meta_v1.TypeMeta{},
ObjectMeta: meta_v1.ObjectMeta{
Name: "test-sd",
Finalizers: []string{FinalizerServiceDescriptorComposition},
},
Spec: v1.ServiceDescriptorSpec{
Locations: []v1.ServiceDescriptorLocation{
locationNoLabel(),
},
ResourceGroups: []v1.ServiceDescriptorResourceGroup{
simpleResourceGroup(),
},
},
}
tc := testCase{
sd: sd,
test: func(t *testing.T, cntrlr *Controller, ctx *ctrl.ProcessContext, tc *testCase) {
_, err := cntrlr.Process(ctx)
require.NoError(t, err)
// Descriptor should have updated with a bunch of statuses
sd, ok := findUpdatedServiceDescriptor(tc.compFake.Actions())
require.True(t, ok)
require.Len(t, sd.Status.Conditions, 3)
require.Len(t, sd.Status.LocationStatuses, 1)
assert.Equal(t, sd.Spec.Locations[0].VoyagerLocation(), sd.Status.LocationStatuses[0].Location)
require.Len(t, sd.Status.LocationStatuses[0].Conditions, 3)
},
}
tc.run(t)
}
func TestServiceDescriptorFinalizerAdded(t *testing.T) {
t.Parallel()
sd := &v1.ServiceDescriptor{
TypeMeta: meta_v1.TypeMeta{},
ObjectMeta: meta_v1.ObjectMeta{
Name: "test-sd",
},
Spec: v1.ServiceDescriptorSpec{
Locations: []v1.ServiceDescriptorLocation{
locationNoLabel(),
},
ResourceGroups: []v1.ServiceDescriptorResourceGroup{
simpleResourceGroup(),
},
},
}
tc := testCase{
sd: sd,
test: func(t *testing.T, cntrlr *Controller, ctx *ctrl.ProcessContext, tc *testCase) {
_, err := cntrlr.Process(ctx)
require.NoError(t, err)
// Descriptor should have updated with a bunch of statuses
sd, ok := findUpdatedServiceDescriptor(tc.compFake.Actions())
require.True(t, ok)
require.True(t, hasServiceDescriptorFinalizer(sd))
},
}
tc.run(t)
}
func TestDeleteServiceDescriptorFinalizerRemoved(t *testing.T) {
t.Parallel()
ts, _ := time.Parse(time.RFC3339, "2018-08-01T01:10:00Z")
deletionTimestamp := meta_v1.NewTime(ts)
// emulate extra finalizer added by some third party, should be left untouched
thirdPartyFinalizer := "thirdParty/Finalizer"
sd := &v1.ServiceDescriptor{
TypeMeta: meta_v1.TypeMeta{},
ObjectMeta: meta_v1.ObjectMeta{
Name: "test-sd",
DeletionTimestamp: &deletionTimestamp,
Finalizers: []string{
FinalizerServiceDescriptorComposition,
thirdPartyFinalizer,
},
},
Spec: v1.ServiceDescriptorSpec{
Locations: []v1.ServiceDescriptorLocation{
locationNoLabel(),
},
ResourceGroups: []v1.ServiceDescriptorResourceGroup{
simpleResourceGroup(),
},
},
}
tc := testCase{
sd: sd,
test: func(t *testing.T, cntrlr *Controller, ctx *ctrl.ProcessContext, tc *testCase) {
_, err := cntrlr.Process(ctx)
require.NoError(t, err)
// Descriptor should have updated with a bunch of statuses
sd, ok := findUpdatedServiceDescriptor(tc.compFake.Actions())
require.True(t, ok)
require.False(t, hasServiceDescriptorFinalizer(sd))
require.True(t, resources.HasFinalizer(sd, thirdPartyFinalizer))
require.Len(t, sd.Status.Conditions, 3)
require.Len(t, sd.Status.LocationStatuses, 0)
},
}
tc.run(t)
}
func TestServiceDescriptorNotUpdatedIfStatusNotChanged(t *testing.T) {
t.Parallel()
voyagerLocation := locationNoLabel().VoyagerLocation()
sd := &v1.ServiceDescriptor{
TypeMeta: meta_v1.TypeMeta{},
ObjectMeta: meta_v1.ObjectMeta{
Name: "test-sd",
Finalizers: []string{FinalizerServiceDescriptorComposition},
},
Spec: v1.ServiceDescriptorSpec{
Locations: []v1.ServiceDescriptorLocation{
locationNoLabel(),
},
ResourceGroups: []v1.ServiceDescriptorResourceGroup{
simpleResourceGroup(),
},
},
// Yeah I'm going to fake this entire thing
Status: v1.ServiceDescriptorStatus{
Conditions: []cond_v1.Condition{
{
LastTransitionTime: meta_v1.Now(), // timestamp doesn't matter
Type: cond_v1.ConditionError,
Status: cond_v1.ConditionFalse,
},
{
LastTransitionTime: meta_v1.Now(),
Type: cond_v1.ConditionInProgress,
Status: cond_v1.ConditionFalse,
},
{
LastTransitionTime: meta_v1.Now(),
Type: cond_v1.ConditionReady,
Status: cond_v1.ConditionFalse,
},
},
LocationStatuses: []v1.LocationStatus{
v1.LocationStatus{
DescriptorName: "test-sd",
DescriptorNamespace: "test-sd",
Location: voyagerLocation,
Conditions: []cond_v1.Condition{
{
Type: cond_v1.ConditionError,
Status: cond_v1.ConditionFalse,
},
{
Type: cond_v1.ConditionInProgress,
Status: cond_v1.ConditionFalse,
},
{
Type: cond_v1.ConditionReady,
Status: cond_v1.ConditionFalse,
},
},
},
},
},
}
tc := testCase{
sd: sd,
test: func(t *testing.T, cntrlr *Controller, ctx *ctrl.ProcessContext, tc *testCase) {
_, err := cntrlr.Process(ctx)
require.NoError(t, err)
// Descriptor should have updated with a bunch of statuses
_, ok := findUpdatedServiceDescriptor(tc.compFake.Actions())
require.False(t, ok)
},
}
tc.run(t)
}
func TestServiceDescriptorCopiesLdStatus(t *testing.T) {
t.Parallel()
ts1 := meta_v1.Time{time.Now().Add(time.Second)}
ts2 := meta_v1.Time{time.Now().Add(2 * time.Second)}
ts3 := meta_v1.Time{time.Now().Add(3 * time.Second)}
voyagerLocation := locationNoLabel().VoyagerLocation()
sd := &v1.ServiceDescriptor{
TypeMeta: meta_v1.TypeMeta{},
ObjectMeta: meta_v1.ObjectMeta{
Name: "test-sd",
Finalizers: []string{FinalizerServiceDescriptorComposition},
},
Spec: v1.ServiceDescriptorSpec{
Locations: []v1.ServiceDescriptorLocation{
locationNoLabel(),
},
ResourceGroups: []v1.ServiceDescriptorResourceGroup{
simpleResourceGroup(),
},
},
Status: v1.ServiceDescriptorStatus{
Conditions: []cond_v1.Condition{
{
LastTransitionTime: meta_v1.Now(),
Type: cond_v1.ConditionError,
Status: cond_v1.ConditionFalse,
},
{
LastTransitionTime: meta_v1.Now(),
Type: cond_v1.ConditionInProgress,
Status: cond_v1.ConditionFalse,
},
{
LastTransitionTime: meta_v1.Now(),
Type: cond_v1.ConditionReady,
Status: cond_v1.ConditionFalse,
},
},
LocationStatuses: []v1.LocationStatus{
{
DescriptorName: "test-sd",
DescriptorNamespace: "test-sd",
Location: voyagerLocation,
Conditions: []cond_v1.Condition{
{
Type: cond_v1.ConditionError,
Status: cond_v1.ConditionFalse,
},
{
Type: cond_v1.ConditionInProgress,
Status: cond_v1.ConditionFalse,
},
{
Type: cond_v1.ConditionReady,
Status: cond_v1.ConditionFalse,
},
},
},
},
},
}
ld := &form_v1.LocationDescriptor{
TypeMeta: meta_v1.TypeMeta{
Kind: form_v1.LocationDescriptorResourceKind,
APIVersion: core_v1.SchemeGroupVersion.String(),
},
ObjectMeta: meta_v1.ObjectMeta{
Name: "test-sd",
Namespace: "test-sd",
UID: "some-uid",
// just setting any owner reference value means it shouldn't match
},
Spec: form_v1.LocationDescriptorSpec{
ConfigMapName: "service-metadata",
ConfigMapNames: form_v1.LocationDescriptorConfigMapNames{
Release: "service-release",
},
},
Status: form_v1.LocationDescriptorStatus{
Conditions: []cond_v1.Condition{
{
LastTransitionTime: ts1,
Type: cond_v1.ConditionError,
Status: cond_v1.ConditionTrue,
Message: "oh no",
Reason: "TerminalError",
},
{
LastTransitionTime: ts2,
Type: cond_v1.ConditionInProgress,
Status: cond_v1.ConditionTrue,
},
{
LastTransitionTime: ts3,
Type: cond_v1.ConditionReady,
Status: cond_v1.ConditionFalse,
},
},
},
}
unreferencedLd := &form_v1.LocationDescriptor{
TypeMeta: meta_v1.TypeMeta{
Kind: form_v1.LocationDescriptorResourceKind,
APIVersion: core_v1.SchemeGroupVersion.String(),
},
ObjectMeta: meta_v1.ObjectMeta{
Name: "test-sd--mylabel",
Namespace: "test-sd--mylabel",
UID: "some-uid",
// just setting any owner reference value means it shouldn't match
},
Spec: form_v1.LocationDescriptorSpec{
ConfigMapName: "service-metadata",
ConfigMapNames: form_v1.LocationDescriptorConfigMapNames{
Release: "service-release",
},
},
Status: form_v1.LocationDescriptorStatus{
Conditions: []cond_v1.Condition{
{
LastTransitionTime: ts1,
Type: cond_v1.ConditionError,
Status: cond_v1.ConditionTrue,
Message: "oh no",
Reason: "TerminalError",
},
{
LastTransitionTime: ts2,
Type: cond_v1.ConditionInProgress,
Status: cond_v1.ConditionTrue,
},
{
LastTransitionTime: ts3,
Type: cond_v1.ConditionReady,
Status: cond_v1.ConditionFalse,
},
},
},
}
trueVar := true
existingNamespace := &core_v1.Namespace{
TypeMeta: meta_v1.TypeMeta{
Kind: k8s.NamespaceKind,
APIVersion: core_v1.SchemeGroupVersion.String(),
},
ObjectMeta: meta_v1.ObjectMeta{
Name: "test-sd",
OwnerReferences: []meta_v1.OwnerReference{
{
Controller: &trueVar,
Name: sd.Name,
Kind: sd.Kind,
UID: sd.UID,
},
},
Labels: map[string]string{
voyager.ServiceNameLabel: "test-sd",
voyager.ServiceLabelLabel: "",
},
},
}
unreferencedNamespace := &core_v1.Namespace{
TypeMeta: meta_v1.TypeMeta{
Kind: k8s.NamespaceKind,
APIVersion: core_v1.SchemeGroupVersion.String(),
},
ObjectMeta: meta_v1.ObjectMeta{
Name: "test-sd--mylabel",
OwnerReferences: []meta_v1.OwnerReference{
{
Controller: &trueVar,
Name: sd.Name,
Kind: sd.Kind,
UID: sd.UID,
},
},
Labels: map[string]string{
voyager.ServiceNameLabel: "test-sd",
voyager.ServiceLabelLabel: "mylabel",
},
},
}
tc := testCase{
sd: sd,
formClientObjects: []runtime.Object{ld, unreferencedLd},
mainClientObjects: []runtime.Object{existingNamespace, unreferencedNamespace},
test: func(t *testing.T, cntrlr *Controller, ctx *ctrl.ProcessContext, tc *testCase) {
_, err := cntrlr.Process(ctx)
require.NoError(t, err)
// Descriptor should have updated with a bunch of statuses
sd, ok := findUpdatedServiceDescriptor(tc.compFake.Actions())
require.True(t, ok)
require.Len(t, sd.Status.Conditions, 3)
require.Len(t, sd.Status.LocationStatuses, 2)
baseLocation := sd.Spec.Locations[0].VoyagerLocation()
assert.Equal(t, baseLocation, sd.Status.LocationStatuses[0].Location)
assert.Equal(t, baseLocation.ClusterLocation().Location("mylabel"), sd.Status.LocationStatuses[1].Location)
for _, locStatus := range sd.Status.LocationStatuses {
ldConditions := locStatus.Conditions
require.Len(t, ldConditions, 3)
_, errCond := cond_v1.FindCondition(ldConditions, cond_v1.ConditionError)
assert.Equal(t, &cond_v1.Condition{
LastTransitionTime: ts1,
Message: "oh no",
Reason: "TerminalError",
Status: cond_v1.ConditionTrue,
Type: cond_v1.ConditionError,
}, errCond)
_, inProgressCond := cond_v1.FindCondition(ldConditions, cond_v1.ConditionInProgress)
assert.Equal(t, &cond_v1.Condition{
LastTransitionTime: ts2,
Status: cond_v1.ConditionTrue,
Type: cond_v1.ConditionInProgress,
}, inProgressCond)
_, readyCond := cond_v1.FindCondition(ldConditions, cond_v1.ConditionReady)
assert.Equal(t, &cond_v1.Condition{
LastTransitionTime: ts3,
Status: cond_v1.ConditionFalse,
Type: cond_v1.ConditionReady,
}, readyCond)
}
},
}
tc.run(t)
}
func TestServiceDescriptorCopiesLdStatusWhenDeleting(t *testing.T) {
t.Parallel()
ts1 := meta_v1.Time{time.Now().Add(time.Second)}
ts2 := meta_v1.Time{time.Now().Add(2 * time.Second)}
ts3 := meta_v1.Time{time.Now().Add(3 * time.Second)}
voyagerLocation := locationNoLabel().VoyagerLocation()
sd := &v1.ServiceDescriptor{
TypeMeta: meta_v1.TypeMeta{},
ObjectMeta: meta_v1.ObjectMeta{
Name: "test-sd",
DeletionTimestamp: &ts1,
Finalizers: []string{FinalizerServiceDescriptorComposition},
},
Spec: v1.ServiceDescriptorSpec{
Locations: []v1.ServiceDescriptorLocation{
locationNoLabel(),
},
ResourceGroups: []v1.ServiceDescriptorResourceGroup{
simpleResourceGroup(),
},
},
Status: v1.ServiceDescriptorStatus{
Conditions: []cond_v1.Condition{
{
LastTransitionTime: meta_v1.Now(),
Type: cond_v1.ConditionError,
Status: cond_v1.ConditionFalse,
},
{
LastTransitionTime: meta_v1.Now(),
Type: cond_v1.ConditionInProgress,
Status: cond_v1.ConditionFalse,
},
{
LastTransitionTime: meta_v1.Now(),
Type: cond_v1.ConditionReady,
Status: cond_v1.ConditionFalse,
},
},
LocationStatuses: []v1.LocationStatus{
{
DescriptorName: "test-sd",
DescriptorNamespace: "test-sd",
Location: voyagerLocation,
Conditions: []cond_v1.Condition{
{
Type: cond_v1.ConditionError,
Status: cond_v1.ConditionFalse,
},
{
Type: cond_v1.ConditionInProgress,
Status: cond_v1.ConditionFalse,
},
{
Type: cond_v1.ConditionReady,
Status: cond_v1.ConditionFalse,
},
},
},
},
},
}
ld := &form_v1.LocationDescriptor{
TypeMeta: meta_v1.TypeMeta{
Kind: form_v1.LocationDescriptorResourceKind,
APIVersion: core_v1.SchemeGroupVersion.String(),
},
ObjectMeta: meta_v1.ObjectMeta{
Name: "test-sd",
Namespace: "test-sd",
UID: "some-uid",
Finalizers: []string{"someFinalizer"},
},
// Whatever, don't care about Spec, we just need an existing LD with a matching name
Status: form_v1.LocationDescriptorStatus{
Conditions: []cond_v1.Condition{
{
LastTransitionTime: ts1,
Type: cond_v1.ConditionError,
Status: cond_v1.ConditionTrue,
Message: "oh no",
Reason: "TerminalError",
},
{
LastTransitionTime: ts2,
Type: cond_v1.ConditionInProgress,
Status: cond_v1.ConditionTrue,
},
{
LastTransitionTime: ts3,
Type: cond_v1.ConditionReady,
Status: cond_v1.ConditionFalse,
},
},
},
}
trueVar := true
existingNamespace := &core_v1.Namespace{
TypeMeta: meta_v1.TypeMeta{
Kind: k8s.NamespaceKind,
APIVersion: core_v1.SchemeGroupVersion.String(),
},
ObjectMeta: meta_v1.ObjectMeta{
Name: "test-sd",
OwnerReferences: []meta_v1.OwnerReference{
{
Controller: &trueVar,
Name: sd.Name,
Kind: sd.Kind,
UID: sd.UID,
},
},
},
}
tc := testCase{
sd: sd,
formClientObjects: []runtime.Object{ld},
mainClientObjects: []runtime.Object{existingNamespace},
test: func(t *testing.T, cntrlr *Controller, ctx *ctrl.ProcessContext, tc *testCase) {
// Make LD deletion a no-op rather than deleting it from cache straight away, to emulate it being asynchronously deleted
tc.formFake.PrependReactor("delete", "locationdescriptors", func(action kube_testing.Action) (bool, runtime.Object, error) {
return true, nil, nil
})
_, err := cntrlr.Process(ctx)
require.NoError(t, err)
// Descriptor should have updated with a bunch of statuses
sd, ok := findUpdatedServiceDescriptor(tc.compFake.Actions())
require.True(t, ok)
require.Len(t, sd.Status.Conditions, 3)
require.Len(t, sd.Status.LocationStatuses, 1)
assert.Equal(t, sd.Spec.Locations[0].VoyagerLocation(), sd.Status.LocationStatuses[0].Location)
ldConditions := sd.Status.LocationStatuses[0].Conditions
require.Len(t, ldConditions, 3)
_, errCond := cond_v1.FindCondition(ldConditions, cond_v1.ConditionError)
assert.Equal(t, &cond_v1.Condition{
LastTransitionTime: ts1,
Message: "oh no",
Reason: "TerminalError",
Status: cond_v1.ConditionTrue,
Type: cond_v1.ConditionError,
}, errCond)
_, inProgressCond := cond_v1.FindCondition(ldConditions, cond_v1.ConditionInProgress)
assert.Equal(t, &cond_v1.Condition{
LastTransitionTime: ts2,
Status: cond_v1.ConditionTrue,
Type: cond_v1.ConditionInProgress,
}, inProgressCond)
_, readyCond := cond_v1.FindCondition(ldConditions, cond_v1.ConditionReady)
assert.Equal(t, &cond_v1.Condition{
LastTransitionTime: ts3,
Status: cond_v1.ConditionFalse,
Type: cond_v1.ConditionReady,
}, readyCond)
// SD should still have a finalizers
require.True(t, hasServiceDescriptorFinalizer(sd))
},
}
tc.run(t)
}
func simpleResourceGroup() v1.ServiceDescriptorResourceGroup {
return v1.ServiceDescriptorResourceGroup{
Locations: []v1.ServiceDescriptorLocationName{"some-location"},
Name: "some-resource-group",
Resources: []v1.ServiceDescriptorResource{
v1.ServiceDescriptorResource{
Name: "resource1",
Type: "EC2Compute",
},
},
}
}
func locationNoLabel() v1.ServiceDescriptorLocation {
return v1.ServiceDescriptorLocation{
Name: "some-location",
Account: "12345",
Region: "ap-eastwest-1",
EnvType: "test",
}
}
func locationWithLabel() v1.ServiceDescriptorLocation {
location := locationNoLabel()
location.Label = "my-expected-label"
return location
}
type testCase struct {
logger *zap.Logger
clock *clock.FakeClock
mainClientObjects []runtime.Object
formClientObjects []runtime.Object
compClientObjects []runtime.Object
sd *v1.ServiceDescriptor
transformedResources []v1.ServiceDescriptorResource
test func(*testing.T, *Controller, *ctrl.ProcessContext, *testCase)
mainFake *kube_testing.Fake
compFake *kube_testing.Fake
formFake *kube_testing.Fake
}
func (tc *testCase) run(t *testing.T) {
mainClientObjects := tc.mainClientObjects
mainClient := k8s_fake.NewSimpleClientset(mainClientObjects...)
tc.mainFake = &mainClient.Fake
formationClient := formclient_fake.NewSimpleClientset(tc.formClientObjects...)
tc.formFake = &formationClient.Fake
compClientObjects := append(tc.compClientObjects, tc.sd)
compositionClient := compclient_fake.NewSimpleClientset(compClientObjects...)
tc.compFake = &compositionClient.Fake
logger := zaptest.NewLogger(t)
config := &ctrl.Config{
Logger: logger,
ResyncPeriod: time.Second * 60,
MainClient: mainClient,
}
nsInformer := core_v1inf.NewNamespaceInformer(mainClient, config.ResyncPeriod, cache.Indexers{
nsServiceNameIndex: nsServiceNameIndexFunc,
})
ldInformer := formInf.LocationDescriptorInformer(formationClient, meta_v1.NamespaceAll, config.ResyncPeriod)
err := ldInformer.AddIndexers(cache.Indexers{
cache.NamespaceIndex: cache.MetaNamespaceIndexFunc,
})
require.NoError(t, err)
informers := []cache.SharedIndexInformer{ldInformer, nsInformer}
// Spec check
store := store.NewMultiBasic()
specCheck := specchecker.New(store)
// Object Updaters
// Copy and pasted from the constructor...
ldUpdater := compUpdater.LocationDescriptorUpdater(ldInformer.GetIndexer(), specCheck, formationClient)
namespaceUpdater := updater.NamespaceUpdater(nsInformer.GetIndexer(), specCheck, config.MainClient)
stgr := stager.New()
defer stgr.Shutdown()
ctx, cancel := context.WithTimeout(context.Background(), 1*time.Minute)
defer cancel()
stage := stgr.NextStage()
// Start all informers then wait on them
for _, inf := range informers {
stage.StartWithChannel(inf.Run)
}
for _, inf := range informers {
require.True(t, cache.WaitForCacheSync(ctx.Done(), inf.HasSynced))
}
fakeSd := fakeSdTransformer{}
objectInfos := make([]FormationObjectInfo, 0, len(tc.sd.Spec.Locations))
for i := range tc.sd.Spec.Locations {
serviceLocation := v1.ServiceDescriptorLocation{
Region: tc.sd.Spec.Locations[i].Region,
EnvType: tc.sd.Spec.Locations[i].EnvType,
Account: tc.sd.Spec.Locations[i].Account,
Name: tc.sd.Spec.Locations[i].Name,
Label: tc.sd.Spec.Locations[i].Label,
}
nsName := generateNamespaceName(tc.sd.Name, serviceLocation.Label)
objectInfos = append(objectInfos, FormationObjectInfo{
Name: nsName,
Namespace: nsName,
ServiceName: voyager.ServiceName(tc.sd.Name),
Location: serviceLocation.VoyagerLocation(),
Resources: tc.transformedResources,
})
}
fakeSd.On("CreateFormationObjectDef", mock.Anything).Return(objectInfos, nil)
serviceDescriptorTransitionsCounter := prometheus.NewCounterVec(
prometheus.CounterOpts{
Namespace: config.AppName,
Name: "service_descriptor_transitions_total",
Help: "Records the number of times a ServiceDescriptor transitions into a new condition",
},
[]string{"name", "type", "reason"},
)
testClock := tc.clock
if testClock == nil {
testClock = clock.NewFakeClock(time.Unix(0, 0))
}
cntrlr := &Controller{
logger: logger,
clock: testClock,
formationClient: formationClient,
compositionClient: compositionClient,
sdTransformer: &fakeSd,
location: options.Location{
Account: objectInfos[0].Location.Account,
Region: objectInfos[0].Location.Region,
EnvType: objectInfos[0].Location.EnvType,
},
nsUpdater: namespaceUpdater,
ldUpdater: ldUpdater,
ldIndexer: ldInformer.GetIndexer(),
nsIndexer: nsInformer.GetIndexer(),
serviceDescriptorTransitionsCounter: serviceDescriptorTransitionsCounter,
}
// we don't control the workQueue, so we call Process directly
pctx := &ctrl.ProcessContext{
Logger: logger,
Object: tc.sd,
}
tc.test(t, cntrlr, pctx, tc)
}
type fakeSdTransformer struct {
mock.Mock
}
func (m *fakeSdTransformer) CreateFormationObjectDef(serviceDescriptor *v1.ServiceDescriptor) ([]FormationObjectInfo, error) {
args := m.Called(serviceDescriptor)
return args.Get(0).([]FormationObjectInfo), args.Error(1)
}
func TestGenerateNamespaceName(t *testing.T) {
t.Parallel()
cases := []struct {
name string
label voyager.Label
want string
}{
{
name: "foo",
label: "",
want: "foo",
},
{
name: "foo",
label: "bar",
want: "foo--bar",
},
}
for i, c := range cases {
t.Run(strconv.Itoa(i), func(t *testing.T) {
got := generateNamespaceName(c.name, c.label)
assert.Equal(t, c.want, got)
})
}
}
func findCreatedLocationDescriptor(actions []kube_testing.Action) (*form_v1.LocationDescriptor, bool) {
for _, action := range k8s_testing.FilterCreateActions(actions) {
if ld, ok := action.GetObject().(*form_v1.LocationDescriptor); ok {
return ld, true
}
}
return nil, false
}
func findCreatedNamespace(actions []kube_testing.Action) (*core_v1.Namespace, bool) {
for _, action := range k8s_testing.FilterCreateActions(actions) {
if ns, ok := action.GetObject().(*core_v1.Namespace); ok {
return ns, true
}
}
return nil, false
}
func findCreatedRoleBindings(actions []kube_testing.Action) map[string]*rbac_v1.RoleBinding {
result := make(map[string]*rbac_v1.RoleBinding)
for _, action := range k8s_testing.FilterCreateActions(actions) {
if rb, ok := action.GetObject().(*rbac_v1.RoleBinding); ok {
result[rb.Name] = rb
}
}
return result
}
func findUpdatedLocationDescriptor(actions []kube_testing.Action) (*form_v1.LocationDescriptor, bool) {
for _, action := range k8s_testing.FilterUpdateActions(actions) {
if ld, ok := action.GetObject().(*form_v1.LocationDescriptor); ok {
return ld, true
}
}
return nil, false
}
func findUpdatedRoleBindings(actions []kube_testing.Action) map[string]*rbac_v1.RoleBinding {
result := make(map[string]*rbac_v1.RoleBinding)
for _, action := range k8s_testing.FilterUpdateActions(actions) {
if rb, ok := action.GetObject().(*rbac_v1.RoleBinding); ok {
result[rb.Name] = rb
}
}
return result
}
func findUpdatedServiceDescriptor(actions []kube_testing.Action) (*comp_v1.ServiceDescriptor, bool) {
for _, action := range k8s_testing.FilterUpdateActions(actions) {
if sd, ok := action.GetObject().(*comp_v1.ServiceDescriptor); ok {
return sd, true
}
}
return nil, false
}
|
# Edit these for project-wide testing
WGET="wget --retry-connrefused --waitretry=5 --read-timeout=20 --timeout=15 -t 0 -q"
LOCAL_PATCH=/home/mcieslak/projects/qsiprep/qsiprep
IMAGE=pennbbl/qsiprep:latest
# Determine if we're in a CI test
if [[ "${CIRCLECI}" = "true" ]]; then
IN_CI=true
NTHREADS=2
OMP_NTHREADS=2
if [[ -n "${CIRCLE_CPUS}" ]]; then
NTHREADS=${CIRCLE_CPUS}
OMP_NTHREADS=$(expr $NTHREADS - 1)
fi
else
IN_CI="false"
NTHREADS=9
OMP_NTHREADS=8
fi
export IN_CI NTHREADS OMP_NTHREADS
run_qsiprep_cmd () {
bids_dir="$1"
output_dir="$2"
# Defines a call to qsiprep that works on circleci OR for a local
# test that uses
if [[ "${CIRCLECI}" = "true" ]]; then
# In circleci we're running from inside the container. call directly
QSIPREP_RUN="/usr/local/miniconda/bin/qsiprep ${bids_dir} ${output_dir} participant"
else
# Otherwise we're going to use docker from the outside
QSIPREP_RUN="qsiprep-docker ${bids_dir} ${output_dir} participant -e qsiprep_DEV 1 -u $(id -u)"
CFG=$(printenv NIPYPE_CONFIG)
if [[ -n "${CFG}" ]]; then
QSIPREP_RUN="${QSIPREP_RUN} --config ${CFG}"
fi
if [[ -n "${LOCAL_PATCH}" ]]; then
#echo "Using qsiprep patch: ${LOCAL_PATCH}"
QSIPREP_RUN="${QSIPREP_RUN} --patch-qsiprep ${LOCAL_PATCH}"
fi
fi
echo "${QSIPREP_RUN} --nthreads ${NTHREADS} --omp-nthreads ${OMP_NTHREADS}"
}
cat << DOC
Create input data for tests. A few files are automatically
created because they're used in all/most of the tests.
Imaging data is only downloaded as needed based on the
second argument to the function.
Default data:
-------------
data/nipype.cfg
Instructs nipype to stop on the first crash
data/eddy_config.json
Configures eddy to perform few iterations so it
finishes quickly.
data/license.txt
A freesurfer license file
DOC
get_config_data() {
WORKDIR=$1
ENTRYDIR=`pwd`
mkdir -p ${WORKDIR}/data
cd ${WORKDIR}/data
# Write the config file
CFG=${WORKDIR}/data/nipype.cfg
printf "[execution]\nstop_on_first_crash = true\n" > ${CFG}
echo "poll_sleep_duration = 0.01" >> ${CFG}
echo "hash_method = content" >> ${CFG}
export NIPYPE_CONFIG=$CFG
# Get an eddy config. It's used for some tests
cat > ${WORKDIR}/data/eddy_config.json << "EOT"
{
"flm": "linear",
"slm": "linear",
"fep": false,
"interp": "spline",
"nvoxhp": 100,
"fudge_factor": 10,
"dont_sep_offs_move": false,
"dont_peas": false,
"niter": 2,
"method": "jac",
"repol": true,
"num_threads": 1,
"is_shelled": true,
"use_cuda": false,
"cnr_maps": true,
"residuals": false,
"output_type": "NIFTI_GZ",
"args": ""
}
EOT
chmod a+r ${WORKDIR}/data/eddy_config.json
# We always need a freesurfer license
echo "cHJpbnRmICJtYXR0aGV3LmNpZXNsYWtAcHN5Y2gudWNzYi5lZHVcbjIwNzA2XG4gKkNmZVZkSDVVVDhyWVxuIEZTQllaLlVrZVRJQ3dcbiIgPiBsaWNlbnNlLnR4dAo=" | base64 -d | sh
cd ${ENTRYDIR}
}
cat << DOC
DSDTI:
------
Downsampled DTI (single shell) data along with an EPI
fieldmap.
Contents:
^^^^^^^^^
- data/DSDTI/dataset_description.json
- data/DSDTI/README
- data/DSDTI/sub-PNC
- data/DSDTI/sub-PNC/anat
- data/DSDTI/sub-PNC/anat/sub-PNC_T1w.json
- data/DSDTI/sub-PNC/anat/sub-PNC_T1w.nii.gz
- data/DSDTI/sub-PNC/dwi
- data/DSDTI/sub-PNC/dwi/sub-PNC_acq-realistic_dwi.bval
- data/DSDTI/sub-PNC/dwi/sub-PNC_acq-realistic_dwi.bvec
- data/DSDTI/sub-PNC/dwi/sub-PNC_acq-realistic_dwi.json
- data/DSDTI/sub-PNC/dwi/sub-PNC_acq-realistic_dwi.nii.gz
- data/DSDTI/sub-PNC/fmap
- data/DSDTI/sub-PNC/fmap/sub-PNC_dir-PA_epi.json
- data/DSDTI/sub-PNC/fmap/sub-PNC_dir-PA_epi.nii.gz
DSCSDSI:
--------
Downsampled CS-DSI data.
Contents:
^^^^^^^^^
- data/DSCSDSI_nofmap/dataset_description.json
- data/DSCSDSI_nofmap/README
- data/DSCSDSI_nofmap/sub-tester
- data/DSCSDSI_nofmap/sub-tester/anat
- data/DSCSDSI_nofmap/sub-tester/anat/sub-tester_T1w.json
- data/DSCSDSI_nofmap/sub-tester/anat/sub-tester_T1w.nii.gz
- data/DSCSDSI_nofmap/sub-tester/dwi
- data/DSCSDSI_nofmap/sub-tester/dwi/sub-tester_acq-HASC55AP_dwi.bval
- data/DSCSDSI_nofmap/sub-tester/dwi/sub-tester_acq-HASC55AP_dwi.bvec
- data/DSCSDSI_nofmap/sub-tester/dwi/sub-tester_acq-HASC55AP_dwi.json
- data/DSCSDSI_nofmap/sub-tester/dwi/sub-tester_acq-HASC55AP_dwi.nii.gz
DSCSDSI_BUDS:
-------------
Downsampled CS-DSI data with blip-up and blip-down DWI series.
Contents:
^^^^^^^^^
- data/DSCSDSI_BUDS
- data/DSCSDSI_BUDS/dataset_description.json
- data/DSCSDSI_BUDS/README
- data/DSCSDSI_BUDS/sub-tester
- data/DSCSDSI_BUDS/sub-tester/anat
- data/DSCSDSI_BUDS/sub-tester/anat/sub-tester_T1w.json
- data/DSCSDSI_BUDS/sub-tester/anat/sub-tester_T1w.nii.gz
- data/DSCSDSI_BUDS/sub-tester/dwi
- data/DSCSDSI_BUDS/sub-tester/dwi/sub-tester_acq-HASC55AP_dwi.bval
- data/DSCSDSI_BUDS/sub-tester/dwi/sub-tester_acq-HASC55AP_dwi.bvec
- data/DSCSDSI_BUDS/sub-tester/dwi/sub-tester_acq-HASC55AP_dwi.json
- data/DSCSDSI_BUDS/sub-tester/dwi/sub-tester_acq-HASC55AP_dwi.nii.gz
- data/DSCSDSI_BUDS/sub-tester/dwi/sub-tester_acq-HASC55PA_dwi.bval
- data/DSCSDSI_BUDS/sub-tester/dwi/sub-tester_acq-HASC55PA_dwi.bvec
- data/DSCSDSI_BUDS/sub-tester/dwi/sub-tester_acq-HASC55PA_dwi.json
- data/DSCSDSI_BUDS/sub-tester/dwi/sub-tester_acq-HASC55PA_dwi.nii.gz
- data/DSCSDSI_BUDS/sub-tester/fmap
- data/DSCSDSI_BUDS/sub-tester/fmap/sub-tester_dir-AP_epi.json
- data/DSCSDSI_BUDS/sub-tester/fmap/sub-tester_dir-AP_epi.nii.gz
- data/DSCSDSI_BUDS/sub-tester/fmap/sub-tester_dir-PA_epi.json
- data/DSCSDSI_BUDS/sub-tester/fmap/sub-tester_dir-PA_epi.nii.gz
twoses:
-------
Data containing two sessions.
Contents:
^^^^^^^^^
- data/twoses/dataset_description.json
- data/twoses/README
- data/twoses/sub-tester/ses-1/anat/sub-tester_ses-1_T1w.json
- data/twoses/sub-tester/ses-1/anat/sub-tester_ses-1_T1w.nii.gz
- data/twoses/sub-tester/ses-1/dwi
- data/twoses/sub-tester/ses-1/dwi/sub-tester_ses-1_acq-HASC55PA_dwi.bval
- data/twoses/sub-tester/ses-1/dwi/sub-tester_ses-1_acq-HASC55PA_dwi.bvec
- data/twoses/sub-tester/ses-1/dwi/sub-tester_ses-1_acq-HASC55PA_dwi.json
- data/twoses/sub-tester/ses-1/dwi/sub-tester_ses-1_acq-HASC55PA_dwi.nii.gz
- data/twoses/sub-tester/ses-2
- data/twoses/sub-tester/ses-2/dwi
- data/twoses/sub-tester/ses-2/dwi/sub-tester_ses-2_acq-HASC55AP_dwi.bval
- data/twoses/sub-tester/ses-2/dwi/sub-tester_ses-2_acq-HASC55AP_dwi.bvec
- data/twoses/sub-tester/ses-2/dwi/sub-tester_ses-2_acq-HASC55AP_dwi.json
- data/twoses/sub-tester/ses-2/dwi/sub-tester_ses-2_acq-HASC55AP_dwi.nii.gz
multishell_output:
------------------
Results from running qsiprep on a simulated ABCD (multi-shell) dataset
Contents:
^^^^^^^^^
- data/multishell_output/qsiprep/sub-ABCD/anat/sub-ABCD_desc-brain_mask.nii.gz
- data/multishell_output/qsiprep/sub-ABCD/anat/sub-ABCD_desc-preproc_T1w.nii.gz
- data/multishell_output/qsiprep/sub-ABCD/anat/sub-ABCD_dseg.nii.gz
- data/multishell_output/qsiprep/sub-ABCD/anat/sub-ABCD_from-MNI152NLin2009cAsym_to-T1w_mode-image_xfm.h5
- data/multishell_output/qsiprep/sub-ABCD/anat/sub-ABCD_from-orig_to-T1w_mode-image_xfm.txt
- data/multishell_output/qsiprep/sub-ABCD/anat/sub-ABCD_from-T1w_to-MNI152NLin2009cAsym_mode-image_xfm.h5
- data/multishell_output/qsiprep/sub-ABCD/anat/sub-ABCD_label-CSF_probseg.nii.gz
- data/multishell_output/qsiprep/sub-ABCD/anat/sub-ABCD_label-GM_probseg.nii.gz
- data/multishell_output/qsiprep/sub-ABCD/anat/sub-ABCD_label-WM_probseg.nii.gz
- data/multishell_output/qsiprep/sub-ABCD/anat/sub-ABCD_space-MNI152NLin2009cAsym_desc-brain_mask.nii.gz
- data/multishell_output/qsiprep/sub-ABCD/anat/sub-ABCD_space-MNI152NLin2009cAsym_desc-preproc_T1w.nii.gz
- data/multishell_output/qsiprep/sub-ABCD/anat/sub-ABCD_space-MNI152NLin2009cAsym_dseg.nii.gz
- data/multishell_output/qsiprep/sub-ABCD/anat/sub-ABCD_space-MNI152NLin2009cAsym_label-CSF_probseg.nii.gz
- data/multishell_output/qsiprep/sub-ABCD/anat/sub-ABCD_space-MNI152NLin2009cAsym_label-GM_probseg.nii.gz
- data/multishell_output/qsiprep/sub-ABCD/anat/sub-ABCD_space-MNI152NLin2009cAsym_label-WM_probseg.nii.gz
- data/multishell_output/qsiprep/sub-ABCD/dwi/sub-ABCD_acq-10per000_confounds.tsv
- data/multishell_output/qsiprep/sub-ABCD/dwi/sub-ABCD_acq-10per000_space-T1w_b0series.nii.gz
- data/multishell_output/qsiprep/sub-ABCD/dwi/sub-ABCD_acq-10per000_space-T1w_desc-brain_mask.nii.gz
- data/multishell_output/qsiprep/sub-ABCD/dwi/sub-ABCD_acq-10per000_space-T1w_desc-eddy_cnr.nii.gz
- data/multishell_output/qsiprep/sub-ABCD/dwi/sub-ABCD_acq-10per000_space-T1w_desc-preproc_dwi.b
- data/multishell_output/qsiprep/sub-ABCD/dwi/sub-ABCD_acq-10per000_space-T1w_desc-preproc_dwi.bval
- data/multishell_output/qsiprep/sub-ABCD/dwi/sub-ABCD_acq-10per000_space-T1w_desc-preproc_dwi.bvec
- data/multishell_output/qsiprep/sub-ABCD/dwi/sub-ABCD_acq-10per000_space-T1w_desc-preproc_dwi.nii.gz
- data/multishell_output/qsiprep/sub-ABCD/dwi/sub-ABCD_acq-10per000_space-T1w_dwiref.nii.gz
singleshell_output:
-------------------
Preprocessed data from a single-shell dataset
Contents:
^^^^^^^^^
- data/singleshell_output/qsiprep/dataset_description.json
- data/singleshell_output/qsiprep/logs/CITATION.html
- data/singleshell_output/qsiprep/logs/CITATION.md
- data/singleshell_output/qsiprep/logs/CITATION.tex
- data/singleshell_output/qsiprep/sub-PNC/anat/sub-PNC_desc-brain_mask.nii.gz
- data/singleshell_output/qsiprep/sub-PNC/anat/sub-PNC_desc-preproc_T1w.nii.gz
- data/singleshell_output/qsiprep/sub-PNC/anat/sub-PNC_dseg.nii.gz
- data/singleshell_output/qsiprep/sub-PNC/anat/sub-PNC_from-MNI152NLin2009cAsym_to-T1w_mode-image_xfm.h5
- data/singleshell_output/qsiprep/sub-PNC/anat/sub-PNC_from-orig_to-T1w_mode-image_xfm.txt
- data/singleshell_output/qsiprep/sub-PNC/anat/sub-PNC_from-T1w_to-MNI152NLin2009cAsym_mode-image_xfm.h5
- data/singleshell_output/qsiprep/sub-PNC/anat/sub-PNC_label-CSF_probseg.nii.gz
- data/singleshell_output/qsiprep/sub-PNC/anat/sub-PNC_label-GM_probseg.nii.gz
- data/singleshell_output/qsiprep/sub-PNC/anat/sub-PNC_label-WM_probseg.nii.gz
- data/singleshell_output/qsiprep/sub-PNC/anat/sub-PNC_space-MNI152NLin2009cAsym_desc-brain_mask.nii.gz
- data/singleshell_output/qsiprep/sub-PNC/anat/sub-PNC_space-MNI152NLin2009cAsym_desc-preproc_T1w.nii.gz
- data/singleshell_output/qsiprep/sub-PNC/anat/sub-PNC_space-MNI152NLin2009cAsym_dseg.nii.gz
- data/singleshell_output/qsiprep/sub-PNC/anat/sub-PNC_space-MNI152NLin2009cAsym_label-CSF_probseg.nii.gz
- data/singleshell_output/qsiprep/sub-PNC/anat/sub-PNC_space-MNI152NLin2009cAsym_label-GM_probseg.nii.gz
- data/singleshell_output/qsiprep/sub-PNC/anat/sub-PNC_space-MNI152NLin2009cAsym_label-WM_probseg.nii.gz
- data/singleshell_output/qsiprep/sub-PNC/dwi/sub-PNC_acq-realistic_confounds.tsv
- data/singleshell_output/qsiprep/sub-PNC/dwi/sub-PNC_acq-realistic_space-T1w_b0series.nii.gz
- data/singleshell_output/qsiprep/sub-PNC/dwi/sub-PNC_acq-realistic_space-T1w_desc-brain_mask.nii.gz
- data/singleshell_output/qsiprep/sub-PNC/dwi/sub-PNC_acq-realistic_space-T1w_desc-eddy_cnr.nii.gz
- data/singleshell_output/qsiprep/sub-PNC/dwi/sub-PNC_acq-realistic_space-T1w_desc-preproc_dwi.b
- data/singleshell_output/qsiprep/sub-PNC/dwi/sub-PNC_acq-realistic_space-T1w_desc-preproc_dwi.bval
- data/singleshell_output/qsiprep/sub-PNC/dwi/sub-PNC_acq-realistic_space-T1w_desc-preproc_dwi.bvec
- data/singleshell_output/qsiprep/sub-PNC/dwi/sub-PNC_acq-realistic_space-T1w_desc-preproc_dwi.nii.gz
- data/singleshell_output/qsiprep/sub-PNC/dwi/sub-PNC_acq-realistic_space-T1w_dwiref.nii.gz
- data/singleshell_output/qsiprep/sub-PNC/figures/sub-PNC_acq-realistic_carpetplot.svg
- data/singleshell_output/qsiprep/sub-PNC/figures/sub-PNC_acq-realistic_coreg.svg
- data/singleshell_output/qsiprep/sub-PNC/figures/sub-PNC_acq-realistic_desc-sdc_b0.svg
- data/singleshell_output/qsiprep/sub-PNC/figures/sub-PNC_acq-realistic_sampling_scheme.gif
- data/singleshell_output/qsiprep/sub-PNC/figures/sub-PNC_seg_brainmask.svg
- data/singleshell_output/qsiprep/sub-PNC/figures/sub-PNC_t1_2_mni.svg
- data/singleshell_output/qsiprep/sub-PNC.html
DOC
get_bids_data() {
WORKDIR=$1
DS=$2
ENTRYDIR=`pwd`
mkdir -p ${WORKDIR}/data
cd ${WORKDIR}/data
# Down-sampled compressed sensing DSI
if [[ ${DS} = DSCSDSI ]]; then
${WGET} \
-O DSCSDSI_nofmap.tar.xz \
"https://upenn.box.com/shared/static/eq6nvnyazi2zlt63uowqd0zhnlh6z4yv.xz"
tar xvfJ DSCSDSI_nofmap.tar.xz -C ${WORKDIR}/data/
rm DSCSDSI_nofmap.tar.xz
fi
# Get BUDS scans from downsampled CS-DSI
if [[ ${DS} = DSCSDSI_BUDS ]]; then
${WGET} \
-O dscsdsi_buds.tar.xz \
"https://upenn.box.com/shared/static/bvhs3sw2swdkdyekpjhnrhvz89x3k87t.xz"
tar xvfJ dscsdsi_buds.tar.xz -C ${WORKDIR}/data/
rm dscsdsi_buds.tar.xz
fi
# Get downsampled DTI
if [[ ${DS} = DSDTI ]]; then
${WGET} \
-O DSDTI.tar.xz \
"https://upenn.box.com/shared/static/iefjtvfez0c2oug0g1a9ulozqe5il5xy.xz"
tar xvfJ DSDTI.tar.xz -C ${WORKDIR}/data/
rm DSDTI.tar.xz
fi
# Get multisession CS-DSI
if [[ ${DS} = twoses ]]; then
${WGET} \
-O twoses.tar.xz \
"https://upenn.box.com/shared/static/c949fjjhhen3ihgnzhkdw5jympm327pp.xz"
tar xvfJ twoses.tar.xz -C ${WORKDIR}/data/
rm twoses.tar.xz
fi
# Get Multi Shell outputs
if [[ ${DS} = multishell_output ]]; then
${WGET} \
-O multishell_output.tar.gz \
"https://upenn.box.com/shared/static/nwxdn4ale8dkebvpjmxbx99dqjzwvlmh.gz"
tar xvfz multishell_output.tar.gz -C ${WORKDIR}/data/
rm multishell_output.tar.gz
fi
# name: Get Single Shell outputs
if [[ ${DS} = singleshell_output ]]; then
mkdir -p ${WORKDIR}/data/singleshell_output
${WGET} \
-O singleshell_output.tar.gz \
"https://upenn.box.com/shared/static/9jhf0eo3ml6ojrlxlz6lej09ny12efgg.gz"
tar xvfz singleshell_output.tar.gz -C ${WORKDIR}/data/singleshell_output
rm singleshell_output.tar.gz
fi
# name: Get data for fieldmap tests
if [[ ${DS} = fmaps ]]; then
mkdir -p ${WORKDIR}/data/fmaptests
# Get shelled data that will go to TOPUP/eddy
${WGET} \
-O DSDTI_fmap.tar.gz \
"https://upenn.box.com/shared/static/rxr6qbi6ezku9gw3esfpnvqlcxaw7n5n.gz"
tar xvfz DSDTI_fmap.tar.gz -C ${WORKDIR}/data/fmaptests
rm DSDTI_fmap.tar.gz
# Get non-shelled data that will go through SHORELine/sdcflows
${WGET} \
-O DSCSDSI_fmap.tar.gz \
"https://upenn.box.com/shared/static/l561psez1ojzi4p3a12eidaw9vbizwdc.gz"
tar xvfz DSCSDSI_fmap.tar.gz -C ${WORKDIR}/data/fmaptests
rm DSCSDSI_fmap.tar.gz
fi
cd ${ENTRYDIR}
}
cat << DOC
Docker can be tricky with permissions, so this function will
create two directories under the specified directory that
have accessible group and user permissions. eg
setup_dir my_test
will create:
- my_test/derivatives
- my_test/work
with all the permissions set such that they will be accessible
regardless of what docker does
DOC
setup_dir(){
# Create the output and working directories for
DIR=$1
mkdir -p ${DIR}/derivatives
mkdir -p ${DIR}/work
setfacl -d -m group:$(id -gn):rwx ${DIR}/derivatives && \
setfacl -m group:$(id -gn):rwx ${DIR}/derivatives
setfacl -d -m group:$(id -gn):rwx ${DIR}/work && \
setfacl -m group:$(id -gn):rwx ${DIR}/work
}
|
#!/bin/bash
# Secure OpenVPN server installer for Debian
# CUSTOM CIMINFO
# https://github.com/angristan/openvpn-install
function isRoot () {
if [ "$EUID" -ne 0 ]; then
return 1
fi
}
function tunAvailable () {
if [ ! -e /dev/net/tun ]; then
return 1
fi
}
function checkOS () {
if [[ -e /etc/debian_version ]]; then
OS="debian"
# shellcheck disable=SC1091
source /etc/os-release
if [[ "$ID" == "debian" || "$ID" == "raspbian" ]]; then
if [[ ! $VERSION_ID =~ (8|9|10) ]]; then
echo "⚠️ Your version of Debian is not supported."
echo ""
echo "However, if you're using Debian >= 9 or unstable/testing then you can continue."
echo "Keep in mind they are not supported, though."
echo ""
until [[ $CONTINUE =~ (y|n) ]]; do
read -rp "Continue? [y/n]: " -e CONTINUE
done
if [[ "$CONTINUE" = "n" ]]; then
exit 1
fi
fi
fi
else
echo "Looks like you aren't running this installer on a Debian system"
exit 1
fi
}
function initialCheck () {
if ! isRoot; then
echo "Sorry, you need to run this as root"
exit 1
fi
if ! tunAvailable; then
echo "TUN is not available"
exit 1
fi
checkOS
}
function installUnbound () {
if [[ ! -e /etc/unbound/unbound.conf ]]; then
if [[ "$OS" =~ (debian|ubuntu) ]]; then
apt-get install -y unbound
# Configuration
echo 'interface: 10.8.0.1
access-control: 10.8.0.1/24 allow
hide-identity: yes
hide-version: yes
use-caps-for-id: yes
prefetch: yes' >> /etc/unbound/unbound.conf
elif [[ "$OS" =~ (centos|amzn) ]]; then
yum install -y unbound
# Configuration
sed -i 's|# interface: 0.0.0.0$|interface: 10.8.0.1|' /etc/unbound/unbound.conf
sed -i 's|# access-control: 127.0.0.0/8 allow|access-control: 10.8.0.1/24 allow|' /etc/unbound/unbound.conf
sed -i 's|# hide-identity: no|hide-identity: yes|' /etc/unbound/unbound.conf
sed -i 's|# hide-version: no|hide-version: yes|' /etc/unbound/unbound.conf
sed -i 's|use-caps-for-id: no|use-caps-for-id: yes|' /etc/unbound/unbound.conf
elif [[ "$OS" = "fedora" ]]; then
dnf install -y unbound
# Configuration
sed -i 's|# interface: 0.0.0.0$|interface: 10.8.0.1|' /etc/unbound/unbound.conf
sed -i 's|# access-control: 127.0.0.0/8 allow|access-control: 10.8.0.1/24 allow|' /etc/unbound/unbound.conf
sed -i 's|# hide-identity: no|hide-identity: yes|' /etc/unbound/unbound.conf
sed -i 's|# hide-version: no|hide-version: yes|' /etc/unbound/unbound.conf
sed -i 's|# use-caps-for-id: no|use-caps-for-id: yes|' /etc/unbound/unbound.conf
elif [[ "$OS" = "arch" ]]; then
pacman -Syu --noconfirm unbound
# Get root servers list
curl -o /etc/unbound/root.hints https://www.internic.net/domain/named.cache
mv /etc/unbound/unbound.conf /etc/unbound/unbound.conf.old
echo 'server:
use-syslog: yes
do-daemonize: no
username: "unbound"
directory: "/etc/unbound"
trust-anchor-file: trusted-key.key
root-hints: root.hints
interface: 10.8.0.1
access-control: 10.8.0.1/24 allow
port: 53
num-threads: 2
use-caps-for-id: yes
harden-glue: yes
hide-identity: yes
hide-version: yes
qname-minimisation: yes
prefetch: yes' > /etc/unbound/unbound.conf
fi
if [[ ! "$OS" =~ (fedora|centos|amzn) ]];then
# DNS Rebinding fix
echo "private-address: 10.0.0.0/8
private-address: 172.16.0.0/12
private-address: 192.168.0.0/16
private-address: 169.254.0.0/16
private-address: fd00::/8
private-address: fe80::/10
private-address: 127.0.0.0/8
private-address: ::ffff:0:0/96" >> /etc/unbound/unbound.conf
fi
else # Unbound is already installed
echo 'include: /etc/unbound/openvpn.conf' >> /etc/unbound/unbound.conf
# Add Unbound 'server' for the OpenVPN subnet
echo 'server:
interface: 10.8.0.1
access-control: 10.8.0.1/24 allow
hide-identity: yes
hide-version: yes
use-caps-for-id: yes
prefetch: yes
private-address: 10.0.0.0/8
private-address: 172.16.0.0/12
private-address: 192.168.0.0/16
private-address: 169.254.0.0/16
private-address: fd00::/8
private-address: fe80::/10
private-address: 127.0.0.0/8
private-address: ::ffff:0:0/96' > /etc/unbound/openvpn.conf
fi
systemctl enable unbound
systemctl restart unbound
}
function installQuestions () {
echo "Welcome to the OpenVPN installer!"
echo "The git repository is available at: https://github.com/angristan/openvpn-install"
echo ""
echo "I need to ask you a few questions before starting the setup."
echo "You can leave the default options and just press enter if you are ok with them."
echo ""
echo "I need to know the IPv4 address of the network interface you want OpenVPN listening to."
echo "Unless your server is behind NAT, it should be your public IPv4 address."
# Detect public IPv4 address and pre-fill for the user
IP=$(ip addr | grep 'inet' | grep -v inet6 | grep -vE '127\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}' | grep -oE '[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}' | head -1)
APPROVE_IP=${APPROVE_IP:-n}
if [[ $APPROVE_IP =~ n ]]; then
read -rp "IP address: " -e -i "$IP" IP
fi
# If $IP is a private IP address, the server must be behind NAT
if echo "$IP" | grep -qE '^(10\.|172\.1[6789]\.|172\.2[0-9]\.|172\.3[01]\.|192\.168)'; then
echo ""
echo "It seems this server is behind NAT. What is its public IPv4 address or hostname?"
echo "We need it for the clients to connect to the server."
until [[ "$ENDPOINT" != "" ]]; do
read -rp "Public IPv4 address or hostname: " -e ENDPOINT
done
fi
IPV6_SUPPORT="n"
PORT="1194"
PROTOCOL="udp"
DNS=1
COMPRESSION_ENABLED="n"
CIPHER="AES-128-GCM"
CERT_TYPE="1" # ECDSA
CERT_CURVE="prime256v1"
CC_CIPHER="TLS-ECDHE-ECDSA-WITH-AES-128-GCM-SHA256"
DH_TYPE="1" # ECDH
DH_CURVE="prime256v1"
HMAC_ALG="SHA256"
TLS_SIG="1" # tls-crypt
echo ""
echo "Okay, that was all I needed. We are ready to setup your OpenVPN server now."
echo "You will be able to generate a client at the end of the installation."
APPROVE_INSTALL=${APPROVE_INSTALL:-n}
if [[ $APPROVE_INSTALL =~ n ]]; then
read -n1 -r -p "Press any key to continue..."
fi
}
function installOpenVPN () {
if [[ $AUTO_INSTALL == "y" ]]; then
# Set default choices so that no questions will be asked.
APPROVE_INSTALL=${APPROVE_INSTALL:-y}
APPROVE_IP=${APPROVE_IP:-y}
IPV6_SUPPORT=${IPV6_SUPPORT:-n}
PORT_CHOICE=${PORT_CHOICE:-1}
PROTOCOL_CHOICE=${PROTOCOL_CHOICE:-1}
DNS=${DNS:-1}
COMPRESSION_ENABLED=${COMPRESSION_ENABLED:-n}
CUSTOMIZE_ENC=${CUSTOMIZE_ENC:-n}
CLIENT=${CLIENT:-client}
PASS=${PASS:-1}
CONTINUE=${CONTINUE:-y}
# Behind NAT, we'll default to the publicly reachable IPv4.
PUBLIC_IPV4=$(curl ifconfig.co)
ENDPOINT=${ENDPOINT:-$PUBLIC_IPV4}
fi
# Run setup questions first, and set other variales if auto-install
installQuestions
# Get the "public" interface from the default route
NIC=$(ip -4 route ls | grep default | grep -Po '(?<=dev )(\S+)' | head -1)
if [[ "$OS" =~ (debian|ubuntu) ]]; then
apt-get update
apt-get -y install ca-certificates gnupg
# We add the OpenVPN repo to get the latest version.
if [[ "$VERSION_ID" = "8" ]]; then
echo "deb http://build.openvpn.net/debian/openvpn/stable jessie main" > /etc/apt/sources.list.d/openvpn.list
wget -O - https://swupdate.openvpn.net/repos/repo-public.gpg | apt-key add -
apt-get update
fi
if [[ "$VERSION_ID" = "16.04" ]]; then
echo "deb http://build.openvpn.net/debian/openvpn/stable xenial main" > /etc/apt/sources.list.d/openvpn.list
wget -O - https://swupdate.openvpn.net/repos/repo-public.gpg | apt-key add -
apt-get update
fi
# Ubuntu > 16.04 and Debian > 8 have OpenVPN >= 2.4 without the need of a third party repository.
apt-get install -y openvpn iptables openssl wget ca-certificates curl bind9 dnsutils
else
exit 1
fi
# Find out if the machine uses nogroup or nobody for the permissionless group
if grep -qs "^nogroup:" /etc/group; then
NOGROUP=nogroup
else
NOGROUP=nobody
fi
# An old version of easy-rsa was available by default in some openvpn packages
if [[ -d /etc/openvpn/easy-rsa/ ]]; then
rm -rf /etc/openvpn/easy-rsa/
fi
# Install the latest version of easy-rsa from source
local version="3.0.6"
wget -O ~/EasyRSA-unix-v${version}.tgz https://github.com/OpenVPN/easy-rsa/releases/download/v${version}/EasyRSA-unix-v${version}.tgz
tar xzf ~/EasyRSA-unix-v${version}.tgz -C ~/
mv ~/EasyRSA-v${version} /etc/openvpn/easy-rsa
chown -R root:root /etc/openvpn/easy-rsa/
rm -f ~/EasyRSA-unix-v${version}.tgz
cd /etc/openvpn/easy-rsa/ || return
case $CERT_TYPE in
1)
echo "set_var EASYRSA_ALGO ec" > vars
echo "set_var EASYRSA_CURVE $CERT_CURVE" >> vars
;;
2)
echo "set_var EASYRSA_KEY_SIZE $RSA_KEY_SIZE" > vars
;;
esac
# Generate a random, alphanumeric identifier of 16 characters for CN and one for server name
SERVER_CN="cn_cimvision_$(head /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 16 | head -n 1)"
SERVER_NAME="server_cimvision_$(head /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 16 | head -n 1)"
echo "set_var EASYRSA_REQ_CN $SERVER_CN" >> vars
# Create the PKI, set up the CA, the DH params and the server certificate
./easyrsa init-pki
# Workaround to remove unharmful error until easy-rsa 3.0.7
# https://github.com/OpenVPN/easy-rsa/issues/261
sed -i 's/^RANDFILE/#RANDFILE/g' pki/openssl-easyrsa.cnf
./easyrsa --batch build-ca nopass
if [[ $DH_TYPE == "2" ]]; then
# ECDH keys are generated on-the-fly so we don't need to generate them beforehand
openssl dhparam -out dh.pem $DH_KEY_SIZE
fi
./easyrsa build-server-full "$SERVER_NAME" nopass
EASYRSA_CRL_DAYS=3650 ./easyrsa gen-crl
case $TLS_SIG in
1)
# Generate tls-crypt key
openvpn --genkey --secret /etc/openvpn/tls-crypt.key
;;
2)
# Generate tls-auth key
openvpn --genkey --secret /etc/openvpn/tls-auth.key
;;
esac
# Move all the generated files
cp pki/ca.crt pki/private/ca.key "pki/issued/$SERVER_NAME.crt" "pki/private/$SERVER_NAME.key" /etc/openvpn/easy-rsa/pki/crl.pem /etc/openvpn
if [[ $DH_TYPE == "2" ]]; then
cp dh.pem /etc/openvpn
fi
# Make cert revocation list readable for non-root
chmod 644 /etc/openvpn/crl.pem
# Generate server.conf
echo "port $PORT" > /etc/openvpn/server.conf
if [[ "$IPV6_SUPPORT" = 'n' ]]; then
echo "proto $PROTOCOL" >> /etc/openvpn/server.conf
elif [[ "$IPV6_SUPPORT" = 'y' ]]; then
echo "proto ${PROTOCOL}6" >> /etc/openvpn/server.conf
fi
echo "dev tun
user nobody
group $NOGROUP
persist-key
persist-tun
keepalive 10 120
topology subnet
server 10.8.0.0 255.255.255.0
ifconfig-pool-persist ipp.txt" >> /etc/openvpn/server.conf
if [[ $DH_TYPE == "1" ]]; then
echo "dh none" >> /etc/openvpn/server.conf
echo "ecdh-curve $DH_CURVE" >> /etc/openvpn/server.conf
elif [[ $DH_TYPE == "2" ]]; then
echo "dh dh.pem" >> /etc/openvpn/server.conf
fi
case $TLS_SIG in
1)
echo "tls-crypt tls-crypt.key 0" >> /etc/openvpn/server.conf
;;
2)
echo "tls-auth tls-auth.key 0" >> /etc/openvpn/server.conf
;;
esac
echo "crl-verify crl.pem
ca ca.crt
cert $SERVER_NAME.crt
key $SERVER_NAME.key
auth $HMAC_ALG
cipher $CIPHER
ncp-ciphers $CIPHER
tls-server
tls-version-min 1.2
tls-cipher $CC_CIPHER
push \"dhcp-option DNS 10.8.0.1\"
push \"route 10.8.0.0 255.255.255.0\"
status /var/log/openvpn/status.log
verb 3
client-config-dir ccd" >> /etc/openvpn/server.conf
# Create log dir
mkdir -p /var/log/openvpn
# Create ccd dir
mkdir -p /etc/openvpn/ccd
# Enable routing
echo 'net.ipv4.ip_forward=1' >> /etc/sysctl.d/20-openvpn.conf
if [[ "$IPV6_SUPPORT" = 'y' ]]; then
echo 'net.ipv6.conf.all.forwarding=1' >> /etc/sysctl.d/20-openvpn.conf
fi
# Apply sysctl rules
sysctl --system
# If SELinux is enabled and a custom port was selected, we need this
if hash sestatus 2>/dev/null; then
if sestatus | grep "Current mode" | grep -qs "enforcing"; then
if [[ "$PORT" != '1194' ]]; then
semanage port -a -t openvpn_port_t -p "$PROTOCOL" "$PORT"
fi
fi
fi
# Finally, restart and enable OpenVPN
if [[ "$OS" = 'arch' || "$OS" = 'fedora' || "$OS" = 'centos' ]]; then
# Don't modify package-provided service
cp /usr/lib/systemd/system/openvpn-server@.service /etc/systemd/system/openvpn-server@.service
# Workaround to fix OpenVPN service on OpenVZ
sed -i 's|LimitNPROC|#LimitNPROC|' /etc/systemd/system/openvpn-server@.service
# Another workaround to keep using /etc/openvpn/
sed -i 's|/etc/openvpn/server|/etc/openvpn|' /etc/systemd/system/openvpn-server@.service
# On fedora, the service hardcodes the ciphers. We want to manage the cipher ourselves, so we remove it from the service
if [[ "$OS" == "fedora" ]];then
sed -i 's|--cipher AES-256-GCM --ncp-ciphers AES-256-GCM:AES-128-GCM:AES-256-CBC:AES-128-CBC:BF-CBC||' /etc/systemd/system/openvpn-server@.service
fi
systemctl daemon-reload
systemctl restart openvpn-server@server
systemctl enable openvpn-server@server
elif [[ "$OS" == "ubuntu" ]] && [[ "$VERSION_ID" == "16.04" ]]; then
# On Ubuntu 16.04, we use the package from the OpenVPN repo
# This package uses a sysvinit service
systemctl enable openvpn
systemctl start openvpn
else
# Don't modify package-provided service
cp /lib/systemd/system/openvpn\@.service /etc/systemd/system/openvpn\@.service
# Workaround to fix OpenVPN service on OpenVZ
sed -i 's|LimitNPROC|#LimitNPROC|' /etc/systemd/system/openvpn\@.service
# Another workaround to keep using /etc/openvpn/
sed -i 's|/etc/openvpn/server|/etc/openvpn|' /etc/systemd/system/openvpn\@.service
systemctl daemon-reload
systemctl restart openvpn@server
systemctl enable openvpn@server
fi
if [[ $DNS == 2 ]];then
installUnbound
fi
# Add iptables rules in two scripts
mkdir /etc/iptables
read -p "IP check_mk master :" CME_IP
# Script to add rules
echo "#!/bin/sh
iptables -I INPUT 1 -i $NIC -p $PROTOCOL --dport $PORT -j ACCEPT
iptables -I INPUT 1 -i tun0 -j ACCEPT
iptables -I FORWARD 1 -o tun0 -i tun0 -j DROP
iptables -I FORWARD 1 -d $CME_IP -i tun0 -o tun0 -j ACCEPT
iptables -I FORWARD 1 -s $CME_IP -i tun0 -o tun0 -j ACCEPT" > /etc/iptables/add-openvpn-rules.sh
# Script to remove rules
echo "#!/bin/sh
iptables -D INPUT -i tun0 -j ACCEPT
iptables -D FORWARD -o tun0 -i tun0 -j DROP
iptables -D FORWARD -d $CME_IP -i tun0 -o tun0 -j ACCEPT
iptables -D FORWARD -s $CME_IP -i tun0 -o tun0 -j ACCEPT
iptables -D INPUT -i $NIC -p $PROTOCOL --dport $PORT -j ACCEPT" > /etc/iptables/rm-openvpn-rules.sh
chmod +x /etc/iptables/add-openvpn-rules.sh
chmod +x /etc/iptables/rm-openvpn-rules.sh
# Handle the rules via a systemd script
echo "[Unit]
Description=iptables rules for OpenVPN
Before=network-online.target
Wants=network-online.target
[Service]
Type=oneshot
ExecStart=/etc/iptables/add-openvpn-rules.sh
ExecStop=/etc/iptables/rm-openvpn-rules.sh
RemainAfterExit=yes
[Install]
WantedBy=multi-user.target" > /etc/systemd/system/iptables-openvpn.service
# Enable service and apply rules
systemctl daemon-reload
systemctl enable iptables-openvpn
systemctl start iptables-openvpn
# If the server is behind a NAT, use the correct IP address for the clients to connect to
if [[ "$ENDPOINT" != "" ]]; then
IP=$ENDPOINT
fi
# client-template.txt is created so we have a template to add further users later
echo "client" > /etc/openvpn/client-template.txt
if [[ "$PROTOCOL" = 'udp' ]]; then
echo "proto udp" >> /etc/openvpn/client-template.txt
elif [[ "$PROTOCOL" = 'tcp' ]]; then
echo "proto tcp-client" >> /etc/openvpn/client-template.txt
fi
echo "remote $IP $PORT
dev tun
resolv-retry infinite
nobind
persist-key
persist-tun
remote-cert-tls server
verify-x509-name $SERVER_NAME name
auth $HMAC_ALG
auth-nocache
cipher $CIPHER
tls-client
tls-version-min 1.2
tls-cipher $CC_CIPHER
setenv opt block-outside-dns # Prevent Windows 10 DNS leak
verb 3" >> /etc/openvpn/client-template.txt
echo "Configuration bind9"
echo "domain cimvision.local
search cimvision.local
nameserver 127.0.0.1" > /etc/resolv.conf
cat >/etc/bind/cimvision.local.hosts <<FIN
zone "cimvision.local" {
type master;
file "/etc/bind/dynamic/db.cimvision";
allow-update {127.0.0.1;};
};
FIN
cat >>/etc/bind/named.conf.local <<FIN
include "/etc/bind/cimvision.local.hosts";
FIN
mkdir /etc/bind/dynamic
chown bind /etc/bind/dynamic
cat >/etc/bind/dynamic/db.cimvision <<FIN
\$TTL 1800 ;; 30 minutes
@ IN SOA cimvision.local. ciminfo.ciminfo.fr. (
1 ; Serial
604800 ; Refresh
86400 ; Retry
2419200 ; Expire
604800 ) ; Negative Cache TTL
;
NS localhost.
openvpn A 10.8.0.1
FIN
systemctl enable bind9
systemctl start bind9
if [[ $COMPRESSION_ENABLED == "y" ]]; then
echo "compress $COMPRESSION_ALG" >> /etc/openvpn/client-template.txt
fi
# Generate the custom client.ovpn
newClient
echo "If you want to add more clients, you simply need to run this script another time!"
}
function newClient () {
echo ""
echo "Tell me a name for the client."
echo "Use one word only, no special characters."
until [[ "$CLIENT" =~ ^[a-zA-Z0-9_]+$ ]]; do
read -rp "Client name: " -e CLIENT
done
cd /etc/openvpn/easy-rsa/
./easyrsa build-client-full "$CLIENT" nopass
# Home directory of the user, where the client configuration (.ovpn) will be written
if [ -e "/home/$CLIENT" ]; then # if $1 is a user name
homeDir="/home/$CLIENT"
elif [ "${SUDO_USER}" ]; then # if not, use SUDO_USER
homeDir="/home/${SUDO_USER}"
else # if not SUDO_USER, use /root
homeDir="/root"
fi
# Determine if we use tls-auth or tls-crypt
if grep -qs "^tls-crypt" /etc/openvpn/server.conf; then
TLS_SIG="1"
elif grep -qs "^tls-auth" /etc/openvpn/server.conf; then
TLS_SIG="2"
fi
# Generates the custom client.ovpn
cp /etc/openvpn/client-template.txt "$homeDir/$CLIENT.ovpn"
{
echo "<ca>"
cat "/etc/openvpn/easy-rsa/pki/ca.crt"
echo "</ca>"
echo "<cert>"
awk '/BEGIN/,/END/' "/etc/openvpn/easy-rsa/pki/issued/$CLIENT.crt"
echo "</cert>"
echo "<key>"
cat "/etc/openvpn/easy-rsa/pki/private/$CLIENT.key"
echo "</key>"
case $TLS_SIG in
1)
echo "<tls-crypt>"
cat /etc/openvpn/tls-crypt.key
echo "</tls-crypt>"
;;
2)
echo "key-direction 1"
echo "<tls-auth>"
cat /etc/openvpn/tls-auth.key
echo "</tls-auth>"
;;
esac
} >> "$homeDir/$CLIENT.ovpn"
echo ""
until [[ "$STATIC_IP" =~ ^10.8.0.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$ ]]; do
read -rp "Adresse IP statique ? (10.8.0.0/24): " -i "10.8.0." STATIC_IP
done
cat > /etc/openvpn/ccd/$CLIENT <<FIN
ifconfig-push $STATIC_IP 255.255.255.0
FIN
nsupdate <<FIN
server 127.0.0.1
zone cimvision.local
update add $CLIENT.cimvision.local. 1800 A $STATIC_IP
send
FIN
echo 'Ip statique et enregistrement DNS ajouté avec succès !'
echo ""
echo "Client $CLIENT added, the configuration file is available at $homeDir/$CLIENT.ovpn."
echo "Download the .ovpn file and import it in your OpenVPN client."
exit 0
}
function revokeClient () {
NUMBEROFCLIENTS=$(tail -n +2 /etc/openvpn/easy-rsa/pki/index.txt | grep -c "^V")
if [[ "$NUMBEROFCLIENTS" = '0' ]]; then
echo ""
echo "You have no existing clients!"
exit 1
fi
echo ""
echo "Select the existing client certificate you want to revoke"
tail -n +2 /etc/openvpn/easy-rsa/pki/index.txt | grep "^V" | cut -d '=' -f 2 | nl -s ') '
if [[ "$NUMBEROFCLIENTS" = '1' ]]; then
read -rp "Select one client [1]: " CLIENTNUMBER
else
read -rp "Select one client [1-$NUMBEROFCLIENTS]: " CLIENTNUMBER
fi
CLIENT=$(tail -n +2 /etc/openvpn/easy-rsa/pki/index.txt | grep "^V" | cut -d '=' -f 2 | sed -n "$CLIENTNUMBER"p)
cd /etc/openvpn/easy-rsa/ || return
./easyrsa --batch revoke "$CLIENT"
EASYRSA_CRL_DAYS=3650 ./easyrsa gen-crl
# Cleanup
if [[ -f "/etc/openvpn/ccd/$CLIENT" ]]; then rm -f "/etc/openvpn/ccd/$CLIENT"; fi
rm -f "pki/reqs/$CLIENT.req"
rm -f "pki/private/$CLIENT.key"
rm -f "pki/issued/$CLIENT.crt"
rm -f /etc/openvpn/crl.pem
cp /etc/openvpn/easy-rsa/pki/crl.pem /etc/openvpn/crl.pem
chmod 644 /etc/openvpn/crl.pem
find /home/ -maxdepth 2 -name "$CLIENT.ovpn" -delete
rm -f "/root/$CLIENT.ovpn"
sed -i "s|^$CLIENT,.*||" /etc/openvpn/ipp.txt
nsupdate <<FIN
server 127.0.0.1
zone cimvision.local
update remove $CLIENT.cimvision.local.
send
FIN
echo ""
echo "Certificate for client $CLIENT revoked."
}
function removeUnbound () {
# Remove OpenVPN-related config
sed -i 's|include: \/etc\/unbound\/openvpn.conf||' /etc/unbound/unbound.conf
rm /etc/unbound/openvpn.conf
systemctl restart unbound
until [[ $REMOVE_UNBOUND =~ (y|n) ]]; do
echo ""
echo "If you were already using Unbound before installing OpenVPN, I removed the configuration related to OpenVPN."
read -rp "Do you want to completely remove Unbound? [y/n]: " -e REMOVE_UNBOUND
done
if [[ "$REMOVE_UNBOUND" = 'y' ]]; then
# Stop Unbound
systemctl stop unbound
if [[ "$OS" =~ (debian|ubuntu) ]]; then
apt-get autoremove --purge -y unbound
elif [[ "$OS" = 'arch' ]]; then
pacman --noconfirm -R unbound
elif [[ "$OS" =~ (centos|amzn) ]]; then
yum remove -y unbound
elif [[ "$OS" = 'fedora' ]]; then
dnf remove -y unbound
fi
rm -rf /etc/unbound/
echo ""
echo "Unbound removed!"
else
echo ""
echo "Unbound wasn't removed."
fi
}
function removeOpenVPN () {
echo ""
# shellcheck disable=SC2034
read -rp "Do you really want to remove OpenVPN? [y/n]: " -e -i n REMOVE
if [[ "$REMOVE" = 'y' ]]; then
# Get OpenVPN port from the configuration
PORT=$(grep '^port ' /etc/openvpn/server.conf | cut -d " " -f 2)
# Stop OpenVPN
if [[ "$OS" =~ (fedora|arch|centos) ]]; then
systemctl disable openvpn-server@server
systemctl stop openvpn-server@server
# Remove customised service
rm /etc/systemd/system/openvpn-server@.service
elif [[ "$OS" == "ubuntu" ]] && [[ "$VERSION_ID" == "16.04" ]]; then
systemctl disable openvpn
systemctl stop openvpn
else
systemctl disable openvpn@server
systemctl stop openvpn@server
# Remove customised service
rm /etc/systemd/system/openvpn\@.service
fi
# Remove the iptables rules related to the script
systemctl stop iptables-openvpn
# Cleanup
systemctl disable iptables-openvpn
rm /etc/systemd/system/iptables-openvpn.service
systemctl daemon-reload
rm /etc/iptables/add-openvpn-rules.sh
rm /etc/iptables/rm-openvpn-rules.sh
# SELinux
if hash sestatus 2>/dev/null; then
if sestatus | grep "Current mode" | grep -qs "enforcing"; then
if [[ "$PORT" != '1194' ]]; then
semanage port -d -t openvpn_port_t -p udp "$PORT"
fi
fi
fi
if [[ "$OS" =~ (debian|ubuntu) ]]; then
apt-get autoremove --purge -y openvpn
if [[ -e /etc/apt/sources.list.d/openvpn.list ]];then
rm /etc/apt/sources.list.d/openvpn.list
apt-get update
fi
elif [[ "$OS" = 'arch' ]]; then
pacman --noconfirm -R openvpn
elif [[ "$OS" =~ (centos|amzn) ]]; then
yum remove -y openvpn
elif [[ "$OS" = 'fedora' ]]; then
dnf remove -y openvpn
fi
# Cleanup
find /home/ -maxdepth 2 -name "*.ovpn" -delete
find /root/ -maxdepth 1 -name "*.ovpn" -delete
rm -rf /etc/openvpn
rm -rf /usr/share/doc/openvpn*
rm -f /etc/sysctl.d/20-openvpn.conf
rm -rf /var/log/openvpn
# Unbound
if [[ -e /etc/unbound/openvpn.conf ]]; then
removeUnbound
fi
echo ""
echo "OpenVPN removed!"
else
echo ""
echo "Removal aborted!"
fi
}
function manageMenu () {
clear
echo "Welcome to OpenVPN-install!"
echo "The git repository is available at: https://github.com/angristan/openvpn-install"
echo ""
echo "It looks like OpenVPN is already installed."
echo ""
echo "What do you want to do?"
echo " 1) Add a new user"
echo " 2) Revoke existing user"
echo " 3) Remove OpenVPN"
echo " 4) Exit"
until [[ "$MENU_OPTION" =~ ^[1-4]$ ]]; do
read -rp "Select an option [1-4]: " MENU_OPTION
done
case $MENU_OPTION in
1)
newClient
;;
2)
revokeClient
;;
3)
removeOpenVPN
;;
4)
exit 0
;;
esac
}
# Check for root, TUN, OS...
initialCheck
# Check if OpenVPN is already installed
if [[ -e /etc/openvpn/server.conf ]]; then
manageMenu
else
installOpenVPN
fi
|
require 'twilio-ruby'
# Initialize the client
# To set up environmental variables, see http://twil.io/secure
account_sid = ENV['TWILIO_ACCOUNT_SID']
auth_token = ENV['TWILIO_AUTH_TOKEN']
client = Twilio::REST::Client.new(account_sid, auth_token)
# Retreive the service
service = client.sync.v1.services('ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX')
# Create a Sync List Item
response = service.sync_lists('MyCollection').sync_list_items(1).update(
data: "{ 'number': '001', 'name': 'Bulbasaur', 'attack':'185'}"
)
puts response
|
A 20x20 matrix should be generated, containing random values between 0 and 1. This matrix will be used as an input for a linear regression algorithm. |
#!/bin/bash
PREFIX=xsanew
lstmtraining \
--stop_training \
--continue_from data/${PREFIX}/checkpoints/${PREFIX}Layer_checkpoint \
--traineddata data/${PREFIX}/${PREFIX}.traineddata \
--model_output data/${PREFIX}Layer.traineddata
|
package com.coltsoftware.liquidsledgehammer.subtransactions.strategies;
import com.coltsoftware.liquidsledgehammer.model.FinancialTransaction;
public interface UnassignedValueStrategy {
String unassigned(FinancialTransaction transaction);
}
|
/*****************************************************************************
* Licensed to Qualys, Inc. (QUALYS) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* QUALYS licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
****************************************************************************/
/**
* @file
* @brief IronBee --- Example Module: Set (C Version)
*
* This file is the C implementation of the Set example module. There is
* also a C++ (ibmod_set.cpp) implementation.
*
* @par Summary
* This module provides set membership of named sets. It is similar
* to the `@match` and `@imatch` operators except that sets are defined
* outside of rules via directives rather than inline as arguments to the
* operator. Defining sets via directives is superior when sets will be
* reused across multiple rules.
*
* @par Operators
* - `@set_match set` -- True iff input is in set named `set`. Supports
* streaming and non-streaming rules as well as NULL input but does not
* capture.
*
* @par Directives
* - `SetDefine set member1...` -- Create a case sensitive set named `set`
* with members given by later arguments.
* - `SetDefineInsensitive set member1...` -- As `SetDefine` but case
* insensitive.
* - `SetDefineFromFile set path` -- As `SetDefine` but members are read
* from file at `path`, one item per line.
* - `SetDefineInsensitiveFromFile` -- As `SetDefineFromFile` but case
* insensitive.
*
* @par Configuration
* - `Set set.debug 1` -- Turn on debugging information for the current
* context. Will log every membership query.
*
* @par Note
* The operator has access to all the sets defined in its context and any
* ancestor context. It does not have access to sets defined in other
* contexts. Similarly, it is an error to create a new set with the same
* name as a set in current context or any ancestor context, but not an error
* to create a set with the same name as a set in other contexts.
*
* @par C specific comments
* - This implementation uses `ib_hash_t` with trivial (`(void *)1`) values
* as the underlying datastructure. It uses memory pools to manage its
* state lifetime.
* - The C API makes heavy use of callback functions. All callbacks are a
* pair of a C function pointer and a `void *` pointer known as the
* "callback data". The callback data is always passed to the callback
* functions as the final argument. Callback data allows a single C
* function to be used in multiple callbacks, distinguished by the callback
* data, and allows data to be transmitted from the registration location
* to the execution location. The C++ API makes heavy use of callback data
* to trampoline the C callbacks to C++ functionals.
* - The C module definition code centers around carefully constructed static
* structures that are passed to the engine when the module loads. This
* approach makes simple cases easy as demonstrated in this module. More
* complex behavior, however, requires programmatic setup in the
* initialization function.
* - Comprehensible errors, such as incorrect user usage, are handled. Other
* errors are simply asserted. They represent either a misunderstanding of
* the API or unrecoverable engine problems.
*
* @author <NAME> <<EMAIL>>
*/
/* See `man 7 feature_test_macros` on certain Linux flavors. */
#define _POSIX_C_SOURCE 200809L
/* IronBee has a canonical header order, exemplified in this module. It is
* not required for third party development.
*
* Headers are divided into sections, more or less:
*
* - The autoconf config file containing information about configure.
* - For implementation files, the public header file.
* - Any corresponding private header file.
* - Headers for the framework the current file is party of.
* - IronBee++
* - IronBee
* - Third party libraries, e.g., boost.
* - Standard library.
*
* Within each section, includes are arranged alphabetically.
*
* The order is, more or less, specific to general and is arranged as such to
* increase the chance of catching missing includes.
*/
#include <ironbee/context.h>
#include <ironbee/engine.h>
#include <ironbee/engine_state.h>
#include <ironbee/hash.h>
#include <ironbee/module.h>
#include <assert.h>
#include <errno.h>
#include <stdio.h>
#include <stdlib.h>
#ifdef NDEBUG
#warning "NDEBUG is inappropriate. Disabling."
#undef NDEBUG
#endif
/** Name of module */
#define MODULE_NAME set
/** MODULE_NAME as string */
#define MODULE_NAME_STR IB_XSTRINGIFY(MODULE_NAME)
/** The public module symbol */
IB_MODULE_DECLARE();
/**
* Per-Configuration Context Data
*
* A @ref per_context_t will be created for each configuration context and
* will hold module data specific to that context. The first one will be
* created as a copy of @ref c_per_context_initial. Later ones will be
* created as copies of the parent's @ref per_context_t.
*
* The function `ctx_open()` will be called at the beginning of every
* context. It will create a new hash, copy the existing (parent's) @c sets
* member into the new hash, and then set the @c sets member to the new hash.
* In this way, each child will know of all the sets of its parent but any
* sets it defines will not be added to the parents @c sets hash.
**/
typedef struct {
/**
* Index of set by set name.
*
* Value type will be: `const ib_hash_t *`
*
* This hash, but not the hashes its values point to, will be duplicated
* for children. Thus children can access sets defined in parent contexts
* but not those defined in sibling or child contexts.
**/
ib_hash_t *sets;
/**
* If 1, log queries.
*
* This member is an @ref ib_num_t in order to interact with the
* configuration map code. The configuration map code makes it easy for
* module writers to expose members of their per-context data to the
* configuration language. However, doing so requires that those members
* have types based on the field code.
*
* @sa field.h
* @sa cfgmap.h
**/
ib_num_t debug;
} per_context_t;
/**
* Per-Operator Instance data.
*
* Every time the `set_member` operator is used in a rule, operator_create()
* will be called. It will construct and populate one of these structures
* which will then be stored by the engine. When the rule is evaluated,
* operator_execute() will be called and provided with the this structure.
**/
typedef struct {
/**
* The set to check membership in.
*
* Values are invalid pointers and should be ignored.
**/
const ib_hash_t *set;
/**
* Whether to log queries.
*
* This member will be true iff per_context_t::debug is 1 for the context
* the operator was created in at operator creation.
**/
bool debug;
/**
* Name of set.
*
* Used for query logging.
**/
const char *set_name;
} per_operator_t;
/**
* @name Helper Functions
*
* These functions are used by other functions.
*/
/*@{*/
/**
* Fetch per-context data.
*
* Helper function to fetch the per-context data for a context.
*
* @param[in] ctx Context to fetch for.
* @return Per context data.
**/
static
per_context_t *fetch_per_context(ib_context_t *ctx);
/**
* Define a set.
*
* Helper function to define a set. This function is intended to be called
* as the final part one of the set defining directives.
*
* @param[in] cp Configuration parser. This parameter is used
* to log errors in a manner that also reports the
* configuration line that caused the error. It
* also provides access to the IronBee engine.
* @param[in] case_insensitive If true, create a case insensitive set.
* @param[in] directive_name Name of directive defining set. Used for
* better log messages.
* @param[in] set_name Name of set to define.
* @param[in] items Items, as a list node. Using a list node
* rather than a list makes it easy to forward
* the tail of a list of parameters.
* @return
* - IB_OK on success.
* - IB_EOTHER on if a set with same name already exists.
**/
static
ib_status_t define_set(
ib_cfgparser_t *cp,
bool case_insensitive,
const char *directive_name,
const char *set_name,
const ib_list_node_t *items
);
/*@}*/
/**
* @name Callbacks
*
* These functions are called by the IronBee engine.
*/
/*@{*/
/**
* Initialize module.
*
* Called at module initialization. In this module we will initialize the
* per-context data for the main context and tell the engine about the
* operator. These two items are all this module uses initialization for,
* but other common uses are:
*
* - Register directives. This module uses a directive map to register
* directives, but it could instead register them here, if, e.g., it wanted
* to set up complex callback data.
* - Register per-context data. This module has simple per-context data and
* can simply provide the initial value to IB_MODULE_INIT(). More complex
* modules could register the per-context data during initialization with
* ib_module_config_initialize().
* - Register hook callbacks. Modules can register callbacks to be called
* at state transitions as the engine processes traffic.
* - Set up module state.
*
* @param[in] ib IronBee engine.
* @param[in] m This module.
* @param[in] cbdata Callback data; unused.
* @return
* - IB_OK on success.
* - IB_EOTHER if an operator named @c set_member already exists.
**/
static
ib_status_t init(
ib_engine_t *ib,
ib_module_t *m,
void *cbdata
);
/**
* Handle @c SetDefine and @c SetDefineInsensitive directives.
*
* @param[in] cp Configuration parser representing state of configuration
* handling. Can be used to access engine or report errors.
* @param[in] name Name of directive.
* @param[in] params List of `const char *` representing parameters to
* directive.
* @param[in] cbdata Callback data; case insensitive iff non-NULL.
* @return
* - IB_OK on success.
* - IB_EINVAL if less than two parameters provided.
**/
static
ib_status_t dir_define(
ib_cfgparser_t *cp,
const char *name,
const ib_list_t *params,
void *cbdata
);
/**
* Handle @c SetDefineFromFile and @c SetDefineInsensitiveFromFile directives.
*
* @param[in] cp Configuration parser representing state of
* configuration handling. Can be used to access engine
* or report errors.
* @param[in] name Name of directive.
* @param[in] set_name Name of set. First parameter to directive.
* @param[in] path Path to file of items. Second parameter to directive.
* @param[in] cbdata Callback data; case insensitive iff non-NULL.
* @return
* - IB_OK on success.
* - IB_EINVAL on file system error.
**/
static
ib_status_t dir_define_from_file(
ib_cfgparser_t *cp,
const char *name,
const char *set_name,
const char *path,
void *cbdata
);
/**
* Handle creation of a @c set_member instance.
*
* This callback is called every time the @c set_member operator in
* instantiated. It is responsible for setting up the data needed to execute
* the operator and returning a pointer to that data via @a instance_data.
*
* The canonical example of operator instantiation is when the operator is
* is used in a rule. However, there are other possibilities such as the
* `operator` call in Predicate.
*
* It will create and set up a @ref per_operator_t.
*
* @param[in] ctx Configuration context of operator.
* @param[in] mm Memory manager.
* @param[in] set_name Name of set to check membership in.
* @param[out] instance_data Instance data; will be a @ref per_operator_t.
* @param[in] cbdata Callback data; not used.
* @return IB_OK
**/
static
ib_status_t operator_create(
ib_context_t *ctx,
ib_mm_t mm,
const char *set_name,
void *instance_data,
void *cbdata
);
/**
* Handle execution of a @c set_member instance.
*
* This callback is called when the @c set_member operator is executed. It is
* provided with the instance data produced by operator_create().
*
* It will interpret @a field as a bytestring and check for membership in the
* set defined in @a instance_data and output whether a match is found to
* @a result.
*
* @param[in] tx Current transaction.
* @param[in] field Input to operator.
* @param[in] capture Collection to store captured data in.
* @c set_member does not support capture and
* ignores this parameter. It can be used to store
* output beyond the result.
* @param[out] result Result of operator. 1 = true, 0 = false.
* @param[in] instance_data Instance data produced by operator_create().
* @param[in] cbdata Callback data; ignored.
* @return
* - IB_OK on success.
* - IB_EINVAL if @a field is not a bytestring field.
**/
static
ib_status_t operator_execute(
ib_tx_t *tx,
const ib_field_t *field,
ib_field_t *capture,
ib_num_t *result,
void *instance_data,
void *cbdata
);
/**
* Called at open of every configuration context.
*
* This callback is called at the beginning of every configuration context
* during configuration parsing. This module uses it to set up the
* per-context data.
*
* Note that, as modules are loaded after the main context is opened, this
* function will never be called for the main context. Per-context data for
* the main context is handled in init().
*
* It will create a new hash for per_context_t::sets and copy the parent's
* sets into it.
*
* @param[in] ib IronBee engine.
* @param[in] ctx Current configuration context.
* @param[in] state Which state we entered.
* @param[in] cbdata Callback data; unused.
*
* @return IB_OK
**/
static
ib_status_t context_open(
ib_engine_t *ib,
ib_context_t *ctx,
ib_state_t state,
void *cbdata
);
/*@}*/
/**
* @name Initialization Statics
*
* These static variables are used to initialize the module. They should
* never be used to hold varying state, only to provide configuration.
*/
/*@{*/
/** Initial value for per-context data. */
static per_context_t c_per_context_initial = {
NULL, /* sets */
0 /* debug */
};
/**
* Configuration map.
*
* The configuration map is a static variable that is provided to
* IB_MODULE_INIT() to automatically connect fields of the per-context data
* to configuration settings. Settings can be set in configuration, e.g.,
*
* @code
* Set set.debug 1
* @endcode
*
* Configuration maps work through fields (see field.h) and thus require the
* members they access to be field types. Thus, per_context_t::debug is an
* @ref ib_num_t instead of a @c bool.
**/
static IB_CFGMAP_INIT_STRUCTURE(c_config_map) = {
IB_CFGMAP_INIT_ENTRY(
MODULE_NAME_STR ".debug",
IB_FTYPE_NUM,
per_context_t,
debug
),
IB_CFGMAP_INIT_LAST
};
/**
* Directive map.
*
* The directive map is a static variable that is provided to IB_MODULE_INIT()
* to automatically register directives. It is also possible to register
* directive during module initialization via ib_config_register_directive().
* This latter approach is useful, e.g., if complex callback data is needed.
*
* The use of `(void *)1` below is used to indicate case insensitivity, i.e.,
* case insensitive iff callback data is non-NULL.
**/
static IB_DIRMAP_INIT_STRUCTURE(c_directive_map) = {
IB_DIRMAP_INIT_LIST(
"SetDefine",
dir_define, NULL
),
IB_DIRMAP_INIT_LIST(
"SetDefineInsensitive",
dir_define, (void *)1
),
IB_DIRMAP_INIT_PARAM2(
"SetDefineFromFile",
dir_define_from_file, NULL
),
IB_DIRMAP_INIT_PARAM2(
"SetDefineInsensitiveFromFile",
dir_define_from_file, (void *)1
),
IB_DIRMAP_INIT_LAST
};
/*@}*/
/**
* Module initialization.
*
* This macro sets up the standard interface that IronBee uses to load
* modules. At minimum, it requires the module name and initialization
* function. In this module, we also provide information about the
* per-context data, configuration map, directive map, and a context open
* handler.
**/
IB_MODULE_INIT(
IB_MODULE_HEADER_DEFAULTS, /* Default metadata */
MODULE_NAME_STR, /* Module name */
IB_MODULE_CONFIG(&c_per_context_initial), /* Per context data. */
c_config_map, /* Configuration map */
c_directive_map, /* Directive map */
init, NULL, /* On initialize */
NULL, NULL, /* On finish */
);
/* Finished with declarations. Remainder of file is definitions. */
/* Helpers Implementation */
static
per_context_t *fetch_per_context(ib_context_t *ctx)
{
assert(ctx != NULL);
ib_status_t rc;
per_context_t *per_context = NULL;
ib_module_t *module = NULL;
rc = ib_engine_module_get(
ib_context_get_engine(ctx),
MODULE_NAME_STR,
&module
);
assert(rc == IB_OK);
rc = ib_context_module_config(ctx, module, &per_context);
assert(rc == IB_OK);
return per_context;
}
static
ib_status_t define_set(
ib_cfgparser_t *cp,
bool case_insensitive,
const char *directive_name,
const char *set_name,
const ib_list_node_t *items
)
{
assert(cp != NULL);
assert(directive_name != NULL);
assert(set_name != NULL);
assert(items != NULL);
ib_status_t rc;
ib_context_t *ctx = NULL;
per_context_t *per_context = NULL;
ib_hash_t *set = NULL;
ib_mm_t mm;
mm = ib_engine_mm_main_get(cp->ib);
rc = ib_cfgparser_context_current(cp, &ctx);
assert(rc == IB_OK);
assert(ctx != NULL);
per_context = fetch_per_context(ctx);
assert(per_context != NULL);
assert(per_context->sets != NULL);
rc = ib_hash_get(per_context->sets, NULL, set_name);
if (rc == IB_OK) {
ib_cfg_log_error(
cp,
"%s tried to define an already existent set: %s",
directive_name,
set_name
);
return IB_EOTHER;
}
assert(rc == IB_ENOENT);
if (case_insensitive) {
rc = ib_hash_create_nocase(&set, mm);
}
else {
rc = ib_hash_create(&set, mm);
}
assert(rc == IB_OK);
assert(set != NULL);
for (
const ib_list_node_t *n = items;
n != NULL;
n = ib_list_node_next_const(n)
) {
const char *item = ib_list_node_data_const(n);
rc = ib_hash_set(set, ib_mm_strdup(mm, item), (void *)1);
assert(rc == IB_OK);
}
rc = ib_hash_set(per_context->sets, set_name, set);
assert(rc == IB_OK);
return IB_OK;
}
/* Callbacks Implementation */
static
ib_status_t init(
ib_engine_t *ib,
ib_module_t *m,
void *cbdata
)
{
assert(ib != NULL);
assert(m != NULL);
ib_status_t rc;
per_context_t *per_context = NULL;
ib_mm_t mm;
/* Set up main context data. */
per_context = fetch_per_context(ib_context_main(ib));
assert(per_context != NULL);
assert(per_context->sets == NULL);
mm = ib_engine_mm_main_get(ib);
rc = ib_hash_create(&per_context->sets, mm);
assert(rc == IB_OK);
assert(per_context->sets != NULL);
/* Register context open callback to handle per context data copying. */
ib_hook_context_register(
ib,
context_open_state,
context_open, NULL
);
/* Register operator */
rc = ib_operator_create_and_register(
NULL,
ib,
"set_member",
IB_OP_CAPABILITY_ALLOW_NULL,
operator_create, NULL,
NULL, NULL,
operator_execute, NULL
);
if (rc == IB_EINVAL) {
ib_log_error(ib, "Operator set_member already exists. Double load?");
return IB_EOTHER;
}
return IB_OK;
}
static
ib_status_t dir_define(
ib_cfgparser_t *cp,
const char *name,
const ib_list_t *params,
void *cbdata
)
{
assert(cp != NULL);
assert(name != NULL);
assert(params != NULL);
bool case_insensitive = (cbdata != NULL);
const ib_list_node_t *param_node = NULL;
const char *set_name = NULL;
if (ib_list_elements(params) < 2) {
ib_cfg_log_error(cp, "%s requires 2 or more arguments.", name);
return IB_EINVAL;
}
param_node = ib_list_first_const(params);
assert(param_node != NULL);
set_name = ib_list_node_data_const(param_node);
param_node = ib_list_node_next_const(param_node);
/* Forward to define_set() */
return define_set(
cp,
case_insensitive,
name,
set_name,
param_node
);
}
static
ib_status_t dir_define_from_file(
ib_cfgparser_t *cp,
const char *name,
const char *set_name,
const char *path,
void *cbdata
)
{
assert(cp != NULL);
assert(name != NULL);
assert(set_name != NULL);
assert(path != NULL);
ib_status_t rc;
bool case_insensitive = (cbdata != NULL);
FILE *fp = NULL;
char *buffer = NULL;
size_t buffer_size = 0;
ib_list_t *items = NULL;
ib_mm_t mm;
mm = ib_engine_mm_main_get(cp->ib);
fp = fopen(path, "r");
if (fp == NULL) {
ib_cfg_log_error(
cp,
"%s unable to open file %s",
name,
path
);
return IB_EINVAL;
}
rc = ib_list_create(&items, mm);
assert(rc == IB_OK);
assert(items != NULL);
for (;;) {
char *buffer_copy;
int read = getline(&buffer, &buffer_size, fp);
if (read == -1) {
if (! feof(fp)) {
ib_cfg_log_error(
cp,
"%s had error reading from file %s: %d",
name,
path,
errno
);
fclose(fp);
return IB_EINVAL;
}
else {
break;
}
}
buffer_copy = ib_mm_memdup(mm, buffer, read);
assert(buffer_copy != NULL);
while (buffer_copy[read-1] == '\n' || buffer_copy[read-1] == '\r') {
buffer_copy[read-1] = '\0';
--read;
}
rc = ib_list_push(items, (void *)buffer_copy);
assert(rc == IB_OK);
}
fclose(fp);
/* Forward to define_set() */
return define_set(
cp,
case_insensitive,
name,
set_name,
ib_list_first_const(items)
);
}
static
ib_status_t operator_create(
ib_context_t *ctx,
ib_mm_t mm,
const char *set_name,
void *instance_data,
void *cbdata
)
{
assert(ctx != NULL);
assert(set_name != NULL);
assert(instance_data != NULL);
ib_status_t rc;
const per_context_t *per_context = NULL;
const ib_hash_t *set = NULL;
per_operator_t *per_operator = NULL;
per_context = fetch_per_context(ctx);
assert(per_context != NULL);
rc = ib_hash_get(per_context->sets, &set, set_name);
assert(rc == IB_OK);
assert(set != NULL);
per_operator = ib_mm_alloc(mm, sizeof(*per_operator));
assert(per_operator != NULL);
per_operator->debug = (per_context->debug != 0);
per_operator->set = set;
per_operator->set_name = ib_mm_strdup(mm, set_name);
assert(per_operator->set_name != NULL);
*(per_operator_t **)instance_data = per_operator;
return IB_OK;
}
ib_status_t operator_execute(
ib_tx_t *tx,
const ib_field_t *field,
ib_field_t *capture,
ib_num_t *result,
void *instance_data,
void *cbdata
)
{
assert(tx != NULL);
assert(instance_data != NULL);
assert(result != NULL);
ib_status_t rc;
const per_operator_t *per_operator = NULL;
const ib_bytestr_t *input = NULL;
per_operator = instance_data;
assert(per_operator->set != NULL);
if (field == NULL) {
*result = 0;
return IB_OK;
}
rc = ib_field_value_type(
field,
ib_ftype_bytestr_out(&input),
IB_FTYPE_BYTESTR
);
if (rc == IB_EINVAL) {
ib_log_error_tx(tx,
"Input to set_member %s is not a bytestring.",
per_operator->set_name
);
return IB_EINVAL;
}
assert(rc == IB_OK);
rc = ib_hash_get_ex(
per_operator->set,
NULL,
(const char *)ib_bytestr_const_ptr(input),
ib_bytestr_length(input)
);
if (rc == IB_ENOENT) {
*result = 0;
}
else {
assert(rc == IB_OK);
*result = 1;
}
if (per_operator->debug) {
ib_log_info_tx(tx,
"set_member %s for %.*s = %s",
per_operator->set_name,
(int)ib_bytestr_length(input),
ib_bytestr_const_ptr(input),
(*result == 1 ? "yes" : "no")
);
}
return IB_OK;
}
static
ib_status_t context_open(
ib_engine_t *ib,
ib_context_t *ctx,
ib_state_t state,
void *cbdata
)
{
assert(ib != NULL);
assert(ctx != NULL);
assert(state == context_open_state);
assert(cbdata == NULL);
ib_status_t rc;
ib_mm_t mm;
per_context_t *per_context = NULL;
const ib_hash_t *parent_sets = NULL;
ib_mm_t temp_mm;
ib_hash_iterator_t *iterator = NULL;
per_context = fetch_per_context(ctx);
assert(per_context != NULL);
mm = ib_context_get_mm(ctx);
parent_sets = per_context->sets;
assert(parent_sets != NULL);
rc = ib_hash_create(&per_context->sets, mm);
assert(rc == IB_OK);
assert(per_context->sets != NULL);
temp_mm = ib_engine_mm_temp_get(ib);
iterator = ib_hash_iterator_create(temp_mm);
assert(iterator != NULL);
for (
ib_hash_iterator_first(iterator, parent_sets);
! ib_hash_iterator_at_end(iterator);
ib_hash_iterator_next(iterator)
) {
const char *key = NULL;
size_t key_length = 0;
const ib_hash_t *set = NULL;
ib_hash_iterator_fetch(&key, &key_length, &set, iterator);
assert(key != NULL);
assert(set != NULL);
rc = ib_hash_set_ex(
per_context->sets,
key, key_length,
(void *)set
);
assert(rc == IB_OK);
}
return IB_OK;
}
|
<reponame>batizhao/pecado
package me.batizhao.dp.service;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.baomidou.mybatisplus.extension.service.IService;
import me.batizhao.dp.domain.Code;
import me.batizhao.dp.domain.CodeMeta;
import java.util.List;
import java.util.Map;
/**
* @author batizhao
* @date 2020/10/10
*/
public interface CodeService extends IService<Code> {
/**
* 分页查询
* @param page 分页对象
* @param code 生成代码
* @return IPage<Code>
*/
IPage<Code> findCodes(Page<Code> page, Code code);
/**
* 通过id查询生成代码
* @param id id
* @return Code
*/
Code findById(Long id);
/**
* 添加\生成代码
* @param code 生成代码
* @return Code
*/
Code saveCode(Code code, List<CodeMeta> codeMetas);
/**
* 更新\生成代码
* @param code 生成代码
* @return
*/
Code updateCode(Code code);
/**
* 添加或修改生成代码
* @param code 生成代码
* @return Code
*/
Code saveOrUpdateCode(Code code);
/**
* 删除
* @param ids
* @return
*/
Boolean deleteByIds(List<Long> ids);
/**
* 查询数据源下的所有表
* @param page 分页对象
* @param dsName 数据源
* @return IPage<Code>
*/
IPage<Code> findTables(Page<Code> page, Code code, String dsName);
/**
* 导入选中的表
* @param codes
* @return
*/
Boolean importTables(List<Code> codes);
/**
* 生成代码 zip
* @param ids
* @return byte[]
*/
byte[] downloadCode(List<Long> ids);
/**
* 生成代码 path
* @param id
* @return
*/
Boolean generateCode(Long id);
/**
* 预览代码
* @param id Code Id
* @return
*/
Map<String, String> previewCode(Long id);
/**
* 同步表元数据
* @param id
* @return
*/
Boolean syncCodeMeta(Long id);
/**
* 同步表列
* @param id
* @param codeMetas
* @param dbTableColumns
* @return
*/
Boolean syncColumn(Long id, List<CodeMeta> codeMetas, List<CodeMeta> dbTableColumns);
}
|
<reponame>Fassial/ODDB-Lab
import java.util.ArrayList;
public class test_pathParser {
final static String path = "./data/data.sqlite";
public static void main(String[] args) {
ArrayList<String> subPath = fileToolset.pathParser(test_pathParser.path);
System.out.println("subPath:");
for (int i = 0; i < subPath.size(); i++) {
System.out.println("\t" + subPath.get(i));
}
}
}
|
<reponame>lindq/ts-csv<filename>spec/iterator.spec.ts
import {HasNextIterator} from '../src/iterator';
describe('HasNextIterator', () => {
it('should report hasNext', () => {
const iterator = new HasNextIterator('foo');
expect(iterator.hasNext()).toBe(true);
expect(iterator.next()).toEqual({value: 'f', done: false});
expect(iterator.hasNext()).toBe(true);
expect(iterator.next()).toEqual({value: 'o', done: false});
expect(iterator.hasNext()).toBe(true);
expect(iterator.next()).toEqual({value: 'o', done: false});
expect(iterator.hasNext()).toBe(false);
});
it('should not advance the iterator on multiple hasNext calls', () => {
const iterator = new HasNextIterator('foo');
iterator.hasNext();
iterator.hasNext();
iterator.hasNext();
expect(iterator.next()).toEqual({value: 'f', done: false});
});
it('should report undefined for exhausted iterable', () => {
const iterator = new HasNextIterator('');
expect(iterator.hasNext()).toBe(false);
const {value, done} = iterator.next();
expect(value).toBeUndefined();
expect(done).toBe(true);
});
it('should be an iterable iterator', () => {
const iterator = new HasNextIterator('123');
expect([...iterator]).toEqual(['1', '2', '3']);
});
});
|
def is_email_address(string):
if "@" in string and "." in string:
return True
else:
return False |
/**
* This is only necessary in order
* to make angularfire2 importable inside the
* generated bundle, instead of angularfire2/angularfire2
*/
export * from './angularfire2/angularfire2';
|
#!/usr/bin/env bash
ps -ef | grep python | grep -v grep | awk '{print $2}' | xargs kill |
# A class to represent a stack
class Stack:
def __init__(self):
self.stack = []
def push(self, data):
self.stack.append(data)
def pop(self):
return self.stack.pop()
def peek(self):
return self.stack[-1]
# Create a stack of characters
string = 'Hello World!'
char_stack = Stack()
# Push every character in the string to the stack
for i in string:
char_stack.push(i)
# Pop and print each character from the stack
while len(char_stack.stack) > 0:
print(char_stack.pop()) |
<reponame>thitranthanh/Achilles<filename>achilles-core/src/main/java/info/archinnov/achilles/internal/table/TableValidator.java<gh_stars>1-10
/*
* Copyright (C) 2012-2014 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package info.archinnov.achilles.internal.table;
import static com.datastax.driver.core.DataType.counter;
import static com.datastax.driver.core.DataType.text;
import static info.archinnov.achilles.counter.AchillesCounter.ACHILLES_COUNTER_FQCN;
import static info.archinnov.achilles.counter.AchillesCounter.ACHILLES_COUNTER_PRIMARY_KEY;
import static info.archinnov.achilles.counter.AchillesCounter.ACHILLES_COUNTER_PROPERTY_NAME;
import static info.archinnov.achilles.counter.AchillesCounter.ACHILLES_COUNTER_TABLE;
import static info.archinnov.achilles.counter.AchillesCounter.ACHILLES_COUNTER_VALUE;
import java.util.Collection;
import info.archinnov.achilles.internal.metadata.holder.PropertyMetaTableValidator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.datastax.driver.core.ColumnMetadata;
import com.datastax.driver.core.DataType.Name;
import com.datastax.driver.core.KeyspaceMetadata;
import com.datastax.driver.core.TableMetadata;
import info.archinnov.achilles.internal.context.ConfigurationContext;
import info.archinnov.achilles.internal.metadata.holder.EntityMeta;
import info.archinnov.achilles.internal.metadata.holder.PropertyMeta;
import info.archinnov.achilles.internal.validation.Validator;
public class TableValidator {
private static final Logger log = LoggerFactory.getLogger(TableValidator.class);
private ColumnMetaDataComparator columnMetaDataComparator = ColumnMetaDataComparator.Singleton.INSTANCE.get();
public void validateForEntity(EntityMeta entityMeta, TableMetadata tableMetadata, ConfigurationContext configContext) {
log.debug("Validate existing table {} for {}", tableMetadata.getName(), entityMeta);
// Primary key Validation
PropertyMeta idMeta = entityMeta.getIdMeta();
final PropertyMetaTableValidator primaryKeyValidator = idMeta.forTableValidation();
if (entityMeta.structure().isEmbeddedId()) {
primaryKeyValidator.validatePrimaryKeyComponents(tableMetadata, true);
primaryKeyValidator.validatePrimaryKeyComponents(tableMetadata, false);
} else {
primaryKeyValidator.validateColumn(tableMetadata, entityMeta, configContext);
}
// Other fields validation
for (PropertyMeta pm : entityMeta.getAllMetasExceptIdAndCounters()) {
final PropertyMetaTableValidator columnValidator = pm.forTableValidation();
switch (pm.type()) {
case SIMPLE:
columnValidator.validateColumn(tableMetadata, entityMeta, configContext);
break;
case LIST:
case SET:
case MAP:
columnValidator.validateCollectionAndMapColumn(tableMetadata, entityMeta);
break;
default:
break;
}
}
// Clustered Counter fields validation
if (entityMeta.structure().isClusteredCounter()) {
for (PropertyMeta counterMeta : entityMeta.getAllCounterMetas()) {
counterMeta.forTableValidation().validateClusteredCounterColumn(tableMetadata, entityMeta);
}
}
}
public void validateAchillesCounter(KeyspaceMetadata keyspaceMetaData, String keyspaceName) {
log.debug("Validate existing Achilles Counter table");
Name textTypeName = text().getName();
Name counterTypeName = counter().getName();
TableMetadata tableMetaData = keyspaceMetaData.getTable(ACHILLES_COUNTER_TABLE);
Validator.validateTableTrue(tableMetaData != null, "Cannot find table '%s' from keyspace '%s'",ACHILLES_COUNTER_TABLE, keyspaceName);
ColumnMetadata fqcnColumn = tableMetaData.getColumn(ACHILLES_COUNTER_FQCN);
Validator.validateTableTrue(fqcnColumn != null, "Cannot find column '%s' from table '%s'", ACHILLES_COUNTER_FQCN,ACHILLES_COUNTER_TABLE);
Validator.validateTableTrue(fqcnColumn.getType().getName() == textTypeName,
"Column '%s' of type '%s' should be of type '%s'", ACHILLES_COUNTER_FQCN, fqcnColumn.getType().getName(),
textTypeName);
Validator.validateBeanMappingTrue(hasColumnMeta(tableMetaData.getPartitionKey(), fqcnColumn),
"Column '%s' of table '%s' should be a partition key component", ACHILLES_COUNTER_FQCN, ACHILLES_COUNTER_TABLE);
ColumnMetadata pkColumn = tableMetaData.getColumn(ACHILLES_COUNTER_PRIMARY_KEY);
Validator.validateTableTrue(pkColumn != null, "Cannot find column '%s' from table '%s'",ACHILLES_COUNTER_PRIMARY_KEY, ACHILLES_COUNTER_TABLE);
Validator.validateTableTrue(pkColumn.getType().getName() == textTypeName,
"Column '%s' of type '%s' should be of type '%s'", ACHILLES_COUNTER_PRIMARY_KEY, pkColumn.getType()
.getName(), textTypeName);
Validator.validateBeanMappingTrue(hasColumnMeta(tableMetaData.getPartitionKey(), pkColumn),
"Column '%s' of table '%s' should be a partition key component", ACHILLES_COUNTER_PRIMARY_KEY,ACHILLES_COUNTER_TABLE);
ColumnMetadata propertyNameColumn = tableMetaData.getColumn(ACHILLES_COUNTER_PROPERTY_NAME);
Validator.validateTableTrue(propertyNameColumn != null, "Cannot find column '%s' from table '%s'",ACHILLES_COUNTER_PROPERTY_NAME, ACHILLES_COUNTER_TABLE);
Validator.validateTableTrue(propertyNameColumn.getType().getName() == textTypeName,
"Column '%s' of type '%s' should be of type '%s'", ACHILLES_COUNTER_PROPERTY_NAME, propertyNameColumn
.getType().getName(), textTypeName);
Validator.validateBeanMappingTrue(hasColumnMeta(tableMetaData.getClusteringColumns(), propertyNameColumn),
"Column '%s' of table '%s' should be a clustering key component", ACHILLES_COUNTER_PROPERTY_NAME,ACHILLES_COUNTER_TABLE);
ColumnMetadata counterValueColumn = tableMetaData.getColumn(ACHILLES_COUNTER_VALUE);
Validator.validateTableTrue(counterValueColumn != null, "Cannot find column '%s' from table '%s'",ACHILLES_COUNTER_VALUE, ACHILLES_COUNTER_TABLE);
Validator.validateTableTrue(counterValueColumn.getType().getName() == counterTypeName,
"Column '%s' of type '%s' should be of type '%s'", ACHILLES_COUNTER_VALUE, counterValueColumn.getType().getName(), counterTypeName);
}
private boolean hasColumnMeta(Collection<ColumnMetadata> columnMetadatas, ColumnMetadata columnMetaToVerify) {
boolean fqcnColumnMatches = false;
for (ColumnMetadata columnMetadata : columnMetadatas) {
fqcnColumnMatches = fqcnColumnMatches || columnMetaDataComparator.isEqual(columnMetaToVerify, columnMetadata);
}
return fqcnColumnMatches;
}
public static enum Singleton {
INSTANCE;
private final TableValidator instance = new TableValidator();
public TableValidator get() {
return instance;
}
}
}
|
<reponame>siegenth/streamsx.health
package com.ibm.streamsx.objectstorage;
public interface IObjectStorageAuth {
public void setObjectStorageUser(String objectStorageUser);
public String getObjectStorageUser();
public void setObjectStoragePassword(String objectStoragePassword);
public String getObjectStoragePassword();
public void setObjectStorageURI(String objectStorageURI);
public String getObjectStorageURI();
}
|
import java.util.ArrayList;
import java.util.List;
class AuthorizationScope {
private String scope;
private String description;
public AuthorizationScope(String scope, String description) {
this.scope = scope;
this.description = description;
}
public String getScope() {
return scope;
}
public String getDescription() {
return description;
}
}
class Authorization {
private String value;
private AuthorizationScope[] scopes;
public Authorization(String value, AuthorizationScope[] scopes) {
this.value = value;
this.scopes = scopes;
}
public String getValue() {
return value;
}
public AuthorizationScope[] getScopes() {
return scopes;
}
}
public class ApiManager {
private List<Authorization> authorizations;
private List<String> protocols;
public ApiManager() {
authorizations = new ArrayList<>();
protocols = new ArrayList<>();
}
public void addAuthorization(Authorization[] authorizationArray) {
for (Authorization authorization : authorizationArray) {
AuthorizationScope[] authorizationScopes = new AuthorizationScope[authorization.getScopes().length];
for (int i = 0; i < authorization.getScopes().length; i++) {
AuthorizationScope authScope = authorization.getScopes()[i];
authorizationScopes[i] = new AuthorizationScope(authScope.getScope(), authScope.getDescription());
}
authorizations.add(new Authorization(authorization.getValue(), authorizationScopes));
}
}
public void addProtocols(String protocols) {
if (protocols != null && !protocols.isEmpty()) {
this.protocols.add(protocols);
}
}
} |
from kafka import KafkaConsumer
# Create a Kafka consumer with the specified topic.
consumer = KafkaConsumer(topic)
# Consume messages from the Kafka topic.
for message in consumer:
print(message.value) |
import {
ChangeDetectionStrategy,
ChangeDetectorRef,
Component,
ContentChild,
EventEmitter,
Input,
NgZone,
OnChanges,
OnDestroy,
OnInit,
Output,
Renderer2,
SimpleChanges,
TemplateRef,
ViewEncapsulation,
} from '@angular/core';
import { IPsSavebarIntlTexts, PsIntlService } from '@prosoft/components/core';
import { merge, Subject, Subscription } from 'rxjs';
import { startWith, takeUntil } from 'rxjs/operators';
import { IPsSavebarMode } from './models';
import { PsSavebarRightContentDirective } from './savebar-right-content.directive';
import type { FormGroup } from '@angular/forms';
/**
* @deprecated Please use ps-form instead, will be removed in a later release
*/
@Component({
selector: 'ps-savebar',
templateUrl: './savebar.component.html',
styleUrls: ['./savebar.component.scss'],
changeDetection: ChangeDetectionStrategy.OnPush,
encapsulation: ViewEncapsulation.None,
})
export class PsSavebarComponent implements OnInit, OnChanges, OnDestroy {
@Input() public mode: IPsSavebarMode = 'auto';
@Input() public form: FormGroup;
// eslint-disable-next-line @angular-eslint/no-input-prefix
@Input() public canSave: boolean | null;
@Input() public intlOverride: Partial<IPsSavebarIntlTexts>;
@Input() public saveKey = 's';
@Output() public readonly save = new EventEmitter<void>();
@Output() public readonly saveAndClose = new EventEmitter<void>();
@Output() public readonly cancel = new EventEmitter<void>();
@ContentChild(PsSavebarRightContentDirective, { read: TemplateRef })
public customRightContent: TemplateRef<any> | null;
public get isHidden(): boolean {
return this.mode === 'hide';
}
public get isSticky(): boolean {
if (this.mode && this.mode !== 'auto') {
return this.mode === 'sticky';
}
if (this.form) {
return this.form.dirty || this.form.touched;
}
return false;
}
public get saveDisabled(): boolean {
if (typeof this.canSave === 'boolean') {
return !this.canSave;
}
if (this.form) {
return this.form.pristine || this.form.invalid;
}
return true;
}
public intl: IPsSavebarIntlTexts;
private ngUnsubscribe$ = new Subject<void>();
private _formSub: Subscription;
private _stopListening: () => void;
constructor(private intlService: PsIntlService, private renderer: Renderer2, private ngZone: NgZone, public cd: ChangeDetectorRef) {}
public ngOnInit() {
this.intlService.intlChanged$.pipe(startWith(null as any), takeUntil(this.ngUnsubscribe$)).subscribe(() => {
this.updateIntl();
this.cd.markForCheck();
});
this.updateSaveKeyListener();
}
public ngOnChanges(changes: SimpleChanges) {
if (changes.intlOverride) {
this.updateIntl();
}
if (changes.form) {
if (this._formSub) {
this._formSub.unsubscribe();
}
if (this.form) {
// Die Werte für isSticky und saveDisabled können hier nicht direkt berechnet und gespeichert werden,
// weil ValueChanges/statusChanges teils läuft, bevor die dirty, etc. Werte an der FormGroup richtig sind.
// Über markForCheck() funktioniert es, weil die ChangeDetection nicht sofort läuft und die Werte stimmen, sobald sie dann läuft.
// Wenn es ein Event für dirty/pristine und touched/untouched gäbe, könnte man es umbauen: https://github.com/angular/angular/issues/10887
this._formSub = merge(this.form.valueChanges, this.form.statusChanges).subscribe(() => {
this.cd.markForCheck();
});
}
}
if (changes.saveKey) {
this.updateSaveKeyListener();
}
}
public ngOnDestroy(): void {
this.ngUnsubscribe$.next();
this.ngUnsubscribe$.complete();
if (this._stopListening) {
this._stopListening();
}
if (this._formSub) {
this._formSub.unsubscribe();
}
}
public hasObservers(emitter: EventEmitter<any>) {
return emitter && !!emitter.observers.length;
}
private updateIntl() {
const intl = this.intlService.get('savebar');
this.intl = this.intlService.merge(intl, this.intlOverride);
}
private updateSaveKeyListener() {
if (this._stopListening) {
this._stopListening();
}
if (this.saveKey) {
this.ngZone.runOutsideAngular(() => {
this._stopListening = this.renderer.listen('document', 'keydown', this.onKeydown.bind(this));
});
}
}
private onKeydown(event: KeyboardEvent) {
if (event.ctrlKey && event.key === this.saveKey && !this.saveDisabled) {
if (this.hasObservers(this.save)) {
event.preventDefault();
this.ngZone.run(() => this.save.emit());
} else if (this.hasObservers(this.saveAndClose)) {
event.preventDefault();
this.ngZone.run(() => this.saveAndClose.emit());
}
}
}
}
|
/*
* Copyright 2020 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.apidocumentation
import com.github.tomakehurst.wiremock.client.WireMock._
import java.io.File
import java.net.URLDecoder
import play.api.http.ContentTypes
import play.utils.UriEncoding
import scala.collection.immutable.Seq
import scala.io.Source
import scala.util.{Failure, Success, Try}
trait Stubs extends ApiMicroservice with DeveloperFrontend with ApiPlatformMicroservice
trait ApiPlatformMicroservice{
def fetchAll() {
val allDefinitionJson = Source.fromURL(getClass.getResource(s"/acceptance/api-definition/all.json")).mkString
stubFor(
get(urlMatching("/combined-api-definitions"))
.willReturn(aResponse()
.withStatus(200)
.withHeader("Content-Type", "application/json")
.withBody(allDefinitionJson))
)
}
def fetchDefinition(serviceName: String) {
val definitionJson = Source.fromURL(getClass.getResource(s"/acceptance/api-definition/$serviceName.json")).mkString
stubFor(
get(urlMatching(s"/combined-api-definitions/$serviceName"))
.willReturn(aResponse()
.withStatus(200)
.withHeader("Content-Type", "application/json")
.withBody(definitionJson))
)
}
def fetchRaml(serviceName: String, version: String) = {
def fetchFile(filename: String, contentType: String) = {
val url = getClass.getResource(s"/services/$serviceName/conf/$version/$filename")
val file = Source.fromURL(url).mkString
stubFor(get(urlMatching(s"/combined-api-definitions/$serviceName/$version/documentation/$filename"))
.willReturn(aResponse()
.withStatus(200)
.withHeader("Content-Type", contentType)
.withBody(file))
)
}
fetchFile("application.raml", "application/yaml+raml")
fetchFile("docs/overview.md", "text/markdown")
fetchFile("docs/versioning.md", "text/markdown")
fetchFile("modules/oauth2.raml", "application/yaml+raml")
}
def fetchDocRaml(serviceName: String, version: String) = {
def fetchFile(filename: String, contentType: String) = {
val file = Source.fromURL(getClass.getResource(s"/services/$serviceName/conf/$version/$filename")).mkString
stubFor(get(urlMatching(s"/combined-api-definitions/$serviceName/$version/documentation/$filename"))
.willReturn(aResponse()
.withStatus(200)
.withHeader("Content-Type", contentType)
.withBody(file))
)
}
def fetchJsonFile(path: String) = {
val smt: Try[String] = Try(getClass.getResource(s"/services/$serviceName/conf/$version/$path").getPath)
val listOfFiles: Seq[File] = smt match {
case Success(s) =>
val dir = new File(URLDecoder.decode(s, "UTF-8"))
if (dir.exists()) {
dir.listFiles
.filter(f => f.exists() && f.isFile)
.toList
}
else {
List.empty[File]
}
case Failure(f) => List.empty[File]
}
listOfFiles.foreach {
r =>
val file: String = Source.fromURL(getClass.getResource(s"/services/$serviceName/conf/$version/$path/${r.getName}")).mkString
stubFor(get(urlMatching(s"/combined-api-definitions/$serviceName/$version/documentation/$path/${r.getName}"))
.willReturn(aResponse()
.withStatus(200)
.withHeader("Content-Type", ContentTypes.JSON)
.withBody(file))
)
}
}
fetchFile("application.raml", "application/yaml+raml")
fetchFile("docs/overview.md", "text/markdown")
fetchJsonFile("examples")
fetchJsonFile("schemas")
}
def failToFetch(serviceName: String) {
stubFor(
get(urlMatching(s"/combined-api-definitions/$serviceName/definition"))
.willReturn(aResponse()
.withStatus(404))
)
}
}
trait ApiMicroservice {
def documentation(serviceName: String, version: String, endpointName: String) {
val documentationXml = Source.fromURL(
getClass.getResource(s"/acceptance/$serviceName/${endpointName.toLowerCase.replace(" ", "-")}/documentation.xml")
).mkString
stubFor(
get(urlPathEqualTo(s"/combined-api-definitions/$serviceName/$version/documentation/${UriEncoding.encodePathSegment(endpointName, "UTF-8")}"))
.willReturn(aResponse()
.withStatus(200)
.withHeader("Content-Type", "application/xml")
.withBody(documentationXml))
)
}
}
trait DeveloperFrontend {
def developerIsSignedIn() {
stubFor(
get(urlPathEqualTo(s"/developer/user-navlinks"))
.willReturn(
aResponse()
.withStatus(200)
.withBody("""[{"label": "<NAME>", "href": "/developer/profile", "truncate" : false, "openInNewWindow": false}, {"label":"Sign out", "href":"/developer/logout", "truncate" : false, "openInNewWindow": false}]"""))
)
}
def developerIsSignedOut() {
stubFor(
get(urlPathEqualTo(s"/developer/user-navlinks"))
.willReturn(
aResponse()
.withStatus(200)
.withBody("""[{"label": "Sign in", "href": "/developer/login", "truncate" : false, "openInNewWindow": false}, {"label":"Register", "href":"/developer/registration", "truncate" : false, "openInNewWindow": false}]"""))
)
}
}
object ExternalServicesConfig {
val stubPort = sys.env.getOrElse("WIREMOCK_PORT", "11111").toInt
val stubHost = "localhost"
val wireMockUrl = s"http://$stubHost:$stubPort"
}
|
package de.siphalor.tweed.config.value.serializer;
import de.siphalor.tweed.data.DataContainer;
import de.siphalor.tweed.data.DataValue;
import net.minecraft.network.PacketByteBuf;
public class IntegerSerializer extends ConfigValueSerializer<Integer> {
@Override
public Integer read(DataValue<?> data) {
if (data.isNumber())
return data.asInt();
return 0;
}
@Override
public <Key> void write(DataContainer<?, Key> dataContainer, Key key, Integer value) {
dataContainer.set(key, value);
}
@Override
public Integer read(PacketByteBuf packetByteBuf) {
return packetByteBuf.readInt();
}
@Override
public void write(PacketByteBuf packetByteBuf, Integer value) {
packetByteBuf.writeInt(value);
}
@Override
public String asString(Integer value) {
return value.toString();
}
@Override
public Class<Integer> getType() {
return Integer.class;
}
}
|
// Generated by script, don't edit it please.
import createSvgIcon from '../../createSvgIcon';
import ExpeditedsslSvg from '@rsuite/icon-font/lib/legacy/Expeditedssl';
const Expeditedssl = createSvgIcon({
as: ExpeditedsslSvg,
ariaLabel: 'expeditedssl',
category: 'legacy',
displayName: 'Expeditedssl'
});
export default Expeditedssl;
|
package clientservercommunication.eim.systems.cs.pub.ro.clientservercommunication.views;
import android.app.FragmentManager;
import android.app.FragmentTransaction;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import clientservercommunication.eim.systems.cs.pub.ro.clientservercommunication.R;
public class ClientServerCommunicationActivity extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_client_server_communication);
FragmentManager fragmentManager = getFragmentManager();
FragmentTransaction fragmentTransaction = fragmentManager.beginTransaction();
fragmentTransaction.add(R.id.server_frame_layout, new ServerFragment());
fragmentTransaction.add(R.id.client_frame_layout, new ClientFragment());
fragmentTransaction.commit();
}
}
|
import pandas as pd
from sklearn.ensemble import RandomForestClassifier
import matplotlib.pyplot as plt
# Create the Random Forest classifier
model = RandomForestClassifier()
# Train the model
model.fit(X_train, y_train)
# Get the feature importances
importances = model.feature_importances_
# Convert the importances into one-dimensional 1darray with corresponding df column names as axis labels
f_importances = pd.Series(importances, X.columns)
# Sort the array in descending order of the importances
f_importances.sort_values(ascending=False, inplace=True)
# Draw the bar Plot from f_importances
f_importances.plot(x='Features', y='Importance', kind='bar', figsize=(16,9), rot=90, fontsize=15)
# Show the plot
plt.tight_layout()
plt.show() |
def average(a, b):
return (a + b)/2
a = 8
b = 10
print("Average of", a, "and", b, "is", average(a, b)) |
# Build script for ncurses
do_ncurses_get() { :; }
do_ncurses_extract() { :; }
do_ncurses_for_build() { :; }
do_ncurses_for_host() { :; }
do_ncurses_for_target() { :; }
if [ "${CT_NCURSES_TARGET}" = "y" -o "${CT_NCURSES}" = "y" ]; then
do_ncurses_get() {
CT_GetFile "ncurses-${CT_NCURSES_VERSION}" .tar.gz \
{http,ftp,https}://ftp.gnu.org/pub/gnu/ncurses \
ftp://invisible-island.net/ncurses
}
do_ncurses_extract() {
CT_Extract "ncurses-${CT_NCURSES_VERSION}"
CT_DoExecLog ALL chmod -R u+w "${CT_SRC_DIR}/ncurses-${CT_NCURSES_VERSION}"
CT_Patch "ncurses" "${CT_NCURSES_VERSION}"
}
# We need tic that runs on the build when building ncurses for host/target
do_ncurses_for_build() {
local -a opts
CT_DoStep INFO "Installing ncurses for build"
CT_mkdir_pushd "${CT_BUILD_DIR}/build-ncurses-build-${CT_BUILD}"
opts=("--enable-symlinks" \
"--without-manpages" \
"--without-tests" \
"--without-cxx" \
"--without-cxx-binding" \
"--without-ada")
do_ncurses_backend host="${CT_BUILD}" \
destdir="${CT_BUILDTOOLS_PREFIX_DIR}" \
cflags="${CT_CFLAGS_FOR_BUILD}" \
ldflags="${CT_LDFLAGS_FOR_BUILD}" \
"${opts[@]}"
CT_Popd
CT_EndStep
}
if [ "${CT_NCURSES}" = "y" ]; then
do_ncurses_for_host() {
local -a opts
# Unlike other companion libs, we skip host build if build==host
# (i.e. in simple cross or native): ncurses may not be needed for
# host, but we still need them on build to produce 'tic'.
case "${CT_TOOLCHAIN_TYPE}" in
native|cross) return 0;;
esac
CT_DoStep INFO "Installing ncurses for host"
CT_mkdir_pushd "${CT_BUILD_DIR}/build-ncurses-host-${CT_HOST}"
opts=("--enable-symlinks" \
"--without-manpages" \
"--without-tests" \
"--without-cxx" \
"--without-cxx-binding" \
"--without-ada")
do_ncurses_backend host="${CT_HOST}" \
prefix="${CT_HOST_COMPLIBS_DIR}" \
cflags="${CT_CFLAGS_FOR_HOST}" \
ldflags="${CT_LDFLAGS_FOR_HOST}" \
"${opts[@]}"
CT_Popd
CT_EndStep
}
fi
if [ "${CT_NCURSES_TARGET}" = "y" ]; then
do_ncurses_for_target() {
CT_DoStep INFO "Installing ncurses for target"
CT_mkdir_pushd "${CT_BUILD_DIR}/build-ncurses-target-${CT_TARGET}"
opts=("--without-sysmouse")
[ "${CT_CC_LANG_CXX}" = "y" ] || opts+=("--without-cxx" "--without-cxx-binding")
[ "${CT_CC_LANG_ADA}" = "y" ] || opts+=("--without-ada")
do_ncurses_backend host="${CT_TARGET}" \
prefix="/usr" \
destdir="${CT_SYSROOT_DIR}" \
"${opts[@]}"
CT_Popd
CT_EndStep
}
fi
# Build libncurses
# Parameter : description : type : default
# host : machine to run on : tuple : (none)
# prefix : prefix to install into : dir : (none)
# cflags : cflags to use : string : (empty)
# ldflags : ldflags to use : string : (empty)
# --* : passed to configure : n/a : n/a
do_ncurses_backend() {
local -a ncurses_opts
local host
local prefix
local cflags
local ldflags
local arg
local for_target
for arg in "$@"; do
case "$arg" in
--*)
ncurses_opts+=("$arg")
;;
*)
eval "${arg// /\\ }"
;;
esac
done
if [ "${CT_NCURSES_NEW_ABI}" != "y" ]; then
ncurses_opts+=("--with-abi-version=5")
fi
case "$host" in
*-*-mingw*)
# Needed to build for mingw, see
# http://lists.gnu.org/archive/html/info-gnu/2011-02/msg00020.html
ncurses_opts+=("--enable-term-driver")
ncurses_opts+=("--enable-sp-funcs")
;;
esac
CT_DoLog EXTRA "Configuring ncurses"
CT_DoExecLog CFG \
CFLAGS="${cflags}" \
LDFLAGS="${ldflags}" \
"${CT_SRC_DIR}/ncurses-${CT_NCURSES_VERSION}/configure" \
--build=${CT_BUILD} \
--host=${host} \
--prefix="${prefix}" \
--with-install-prefix="${destdir}" \
--enable-termcap \
"${ncurses_opts[@]}"
# FIXME: old ncurses build code was removing -static from progs/Makefile,
# claiming static linking does not work on MacOS. A knowledge base article
# (https://developer.apple.com/library/mac/qa/qa1118/_index.html) says that
# static linking works just fine, just do not use it for libc (or other
# libraries that make system calls). ncurses use -static only for linking
# the curses library, then switches back to -dynamic - so they should be fine.
# FIXME: for target, we only need tic (terminfo compiler). However, building
# it also builds ncurses anyway, and dedicated targets (install.includes and
# install.progs) do not do well with parallel make (-jX).
CT_DoLog EXTRA "Building ncurses"
CT_DoExecLog ALL ${make} ${JOBSFLAGS}
CT_DoLog EXTRA "Installing ncurses"
CT_DoExecLog ALL ${make} install
}
fi
|
#!/usr/bin/env sh
if [[ "${DEBUG}" =~ ^1|yes|true ]]; then
echo "DEBUG=true"
set -o xtrace
fi
SCRIPTPATH="$(
cd "$(dirname "$0")"
pwd -P
)"
CURRENT_DIR=$SCRIPTPATH
ROOT_DIR="$(dirname $CURRENT_DIR)"
PROJECT_NAME="$(basename $ROOT_DIR)"
BUILD_DIR=${ROOT_DIR}/_build
VERSION=$(git rev-parse --short HEAD)
function build {
echo "BUILD ..."
echo "VERSION ${VERSION}"
rm -r ${BUILD_DIR} >/dev/null 2>&1
mkdir _build
go build -o ${BUILD_DIR}/main .
echo "VERSION=${VERSION}" > ${BUILD_DIR}/.base.env
# cp .env ${BUILD_DIR}/
cp config.yaml ${BUILD_DIR}/
cp ${SCRIPTPATH}/run.sh ${BUILD_DIR}/
cd $RUNNING_DIR
echo "BUILD DONE!"
}
function envup {
echo "ENVUP ..."
set -o allexport
source ${ROOT_DIR}/.env
set +o allexport
export VERSION=${VERSION}
echo "ENVUP DONE!"
}
function start() {
echo "START ..."
echo "VERSION ${VERSION}"
envup
go run . --config=${ROOT_DIR}/config.yaml
echo "START DONE!"
}
function code_lint() {
echo "LINT ..."
command -v golangci-lint >/dev/null 2>&1 || {
echo ""
echo "project is installing golangci-lint"
curl -sfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b $(go env GOPATH)/bin v1.39.0
}
golangci-lint run --timeout 10m ./internal/... ./pkg/...
echo "LINT DONE!"
}
function code_format() {
echo "FORMAT ..."
go generate ./...
gofmt -w internal/ pkg/
goimports -local go-wire-example -w internal/ pkg/
echo "FORMAT DONE!"
}
function main() {
case $1 in
build)
build ${@:2}
;;
start)
start ${@:2}
;;
code_lint)
code_lint ${@:2}
;;
code_format)
code_format ${@:2}
;;
*)
echo "build|start|code_lint|code_format"
;;
esac
}
if [ "${1}" != "--source-only" ]; then
main "${@}"
fi
|
<gh_stars>1-10
#ifndef __VP_CPP_TEMPLATE__HEADERFILE__
#define __VP_CPP_TEMPLATE__HEADERFILE__
#include <CCEXP.hpp>
#include <CECS.hpp>
#include <PThreadPool.hpp>
#include <vkpLibs.hpp>
#endif
|
import defaults from './defaults';
import enUs from 'hyphenated-en-us';
import { fromSyllables } from './utils';
import { hyphenated } from '.';
let { minWordLength } = defaults;
describe('hyphenated', () => {
beforeEach(() => {
defaults.minWordLength = minWordLength;
});
it('hyphenates the text “hyphenated”', () => {
expect(hyphenated('hyphenated')).toEqual(
fromSyllables('hy', 'phen', 'at', 'ed')
);
});
it('hyphenates the text “hyphenated” when en-us language is passed explicitly', () => {
expect(hyphenated('hyphenated', { language: enUs })).toEqual(
fromSyllables('hy', 'phen', 'at', 'ed')
);
});
it('hyphenates the text “hyphenated” when language has matching patterns', () => {
const language = {
id: 'limited-patterns',
patterns: ['hy3ph', 'he2n', 'hen5at', '2t1ed']
};
expect(hyphenated('hyphenated', { language })).toEqual(
fromSyllables('hy', 'phen', 'at', 'ed')
);
});
it('hyphenates the text “hyphenated” when language has a matching exception', () => {
const language = {
id: 'limited-exceptions',
exceptions: ['hy-phen-at-ed']
};
expect(hyphenated('hyphenated', { language })).toEqual(
fromSyllables('hy', 'phen', 'at', 'ed')
);
});
it(`hyphenates the text “hyphenated” using exception only
when matching patterns are also present`, () => {
const language = {
id: 'limited-patterns-and-exception',
patterns: ['hy3ph', 'he2n', 'hen5at', '2t1ed'],
exceptions: ['hyp-he-nated']
};
expect(hyphenated('hyphenated', { language })).toEqual(
fromSyllables('hyp', 'he', 'nated')
);
});
it(`doesn’t hyphenate the text “hyphenated”
when language has no appropriate patterns and exceptions`, () => {
const language = { id: 'empty' };
expect(hyphenated('hyphenated', { language })).toEqual('hyphenated');
});
it('doesn’t hyphenate short words', () => {
const patterns = ['ca1fé'];
const word = 'café';
defaults.minWordLength = word.length;
expect(
hyphenated(word, { language: { id: 'min-word-length-4', patterns } })
).toEqual(word);
defaults.minWordLength = word.length - 1;
expect(
hyphenated(word, { language: { id: 'min-word-length-3', patterns } })
).toEqual(fromSyllables('ca', 'fé'));
});
});
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.