diff --git "a/data/dataset_Genesis.csv" "b/data/dataset_Genesis.csv" new file mode 100644--- /dev/null +++ "b/data/dataset_Genesis.csv" @@ -0,0 +1,42632 @@ +"keyword","repo_name","file_path","file_extension","file_size","line_count","content","language" +"Genesis","yandorazhang/GENESIS","src/EM_EM.cpp",".cpp","44549","1245","//------------------------------------------------ +// EM_EM.cpp +// Goal: Estimate effect size distribution. +// +// Author: Yan (Dora) Zhang +// Email: yandorazhang@gmail.com +//------------------------------------------------ + +#include +#include +#include // std::cout +#include // std::min +#include +//[[Rcpp::depends(RcppArmadillo)]] + +// --------------------------------//-------------------------------- +#include +//[[Rcpp::plugins(openmp)]] +// --------------------------------//-------------------------------- + +using namespace arma; +using namespace Rcpp; +using namespace std; +using namespace stats; + + +//-------------------------------- +//-------------------------------- +// [[Rcpp::export]] +vec modification_loc(vec inx_name, int K, int mx_k){ + vec inx_loc(mx_k); + inx_loc.fill(0); + + for(int i = 0; i < K; i++){ + inx_loc(inx_name(i) - 1) = i+1; + } + return(inx_loc); +} + + + +//-------------------------------- +//-------------------------------- +// [[Rcpp::export]] +long double cpnorm(long double x) // R: pnorm() +{ + // constants + long double a1 = 0.254829592; + long double a2 = -0.284496736; + long double a3 = 1.421413741; + long double a4 = -1.453152027; + long double a5 = 1.061405429; + long double p = 0.3275911; + + // Save the sign of x + int sign = 1; + if (x < 0) + sign = -1; + x = fabs(x)/sqrt(2.0); + + // A&S formula 7.1.26 + long double t = 1.0/(1.0 + p*x); + long double y = 1.0 - (((((a5*t + a4)*t) + a3)*t + a2)*t + a1)*t*exp(-x*x); + + return 0.5*(1.0 + sign*y); +} + +//-------------------------------- +//-------------------------------- +// [[Rcpp::export]] +long double sumfactorial(int n) // log(factorial(n)) = log(n!) +{ + if(n > 1) + return log(n) + sumfactorial(n - 1); + else + return 0; +} + +//-------------------------------- +//-------------------------------- +// [[Rcpp::export]] +long double sumfactorial_rev(int n, const int &k0) // log(n!/k0!) +{ + if(n > k0) + return log(n) + sumfactorial_rev(n-1,k0); + else + return 0; +} + +//-------------------------------- +//-------------------------------- +// [[Rcpp::export]] +long double cdnorm(long double x, long double mean, long double sd, bool loglog) // R: dnorm() +{ + long double res; + res = -0.5*log((2.0*PI)) - log(sd)- pow(x-mean, 2)/(2.0*pow(sd,2)); + if(loglog){return res;} + else{return exp(res);} +} + +//-------------------------------- +//-------------------------------- +// [[Rcpp::export]] +long double cdbinom(const int &k, const int &size, long double prob, bool loglog) // R: dbinom() +{ + long double res; + res = sumfactorial_rev(size,size-k) - sumfactorial(k) + k*log(prob) + (size-k)*log(1-prob); + if(loglog){return res;} + else{return exp(res);} +} + +//-------------------------------- +//-------------------------------- +// [[Rcpp::export]] +long double cdmultinom3(const int k0, const int k1, const int k2, vec prob, bool loglog) // R: dbmultinom() +{ + long double res; + + res = sumfactorial_rev(k0+k1+k2,k2) - sumfactorial(k0) - sumfactorial(k1) + + k0*log(prob(0)) + k1*log(prob(1)) + k2*log(prob(2)); + + if(loglog){return res;} + else{return exp(res);} +} + + +//--------------------------------//-------------------------------- +// 2-component model +//--------------------------------//-------------------------------- + +//-------------------------------- +//-------------------------------- +// [[Rcpp::export]] +long double loglikelihood(const vec & par, const vec &betahat, const vec &varbetahat, const vec &ldscore,const int & c0, const vec &Nstar, const int & num_threads) // loglikelihood function +{ + int K = betahat.n_elem; + long double marginal_likelihood; + long double pi1 = par(0); + long double sigsq = par(1); + long double a = par(2); + long double res = 0; + long double y=0; + long double tem; + int k,j; + // --------------------------------//-------------------------------- + omp_set_num_threads(num_threads); + #pragma omp parallel for shared(betahat,pi1, sigsq, a, varbetahat,ldscore,c0,Nstar) private(marginal_likelihood,y,k,j,tem) reduction(+:res) + // --------------------------------//-------------------------------- + for(k=0; k(wllk[""w""]); + llk = as(wllk[""llk""]); + finish_w = clock(); + + // M-step: update the proportion parameters + pi1 = update_pi1(w,Nstar,num_threads); + finish_pi1 = clock(); + + // M-step: update the variance parameters + new_sig(0) = par(1); new_sig(1) = par(2); + if(stratification==false){new_sig(1)=0;} + for(int j=0; j 1) new_sig(0) = 1e-5; + if(new_sig(0) < 0) new_sig(0) = 1e-12; + if(new_sig(1) > 1) new_sig(1) = 1e-6; + if(new_sig(1) < -min(varbetahat)) new_sig(1) = -min(varbetahat)/2; + } + finish_sigsq = clock(); + + // update par vector with the new parameter values + par(0) = pi1; par(1) = new_sig(0); par(2) = new_sig(1); + + if(isinf(-llk) | isnan(-llk) | isnan(llk)) {par(2) = abs(par(2));} + if((llk=20)) {break;} + + // output results into a vector result + result(0) = i; result(1) = llk; + result(2) = par(0); result(3) = par(1); result(4) = par(2); + result(5) = par(0)*par(1)*M; result(6) = c0; + result(7) = double(finish_w-start_w)/CLOCKS_PER_SEC; + result(8) = double(finish_pi1-finish_w)/CLOCKS_PER_SEC; + result(9) = double(finish_sigsq-finish_pi1)/CLOCKS_PER_SEC; + + if(print==true){ + if(i%printfreq==0){ + for(int r=0; r<10; r++){ + Rcout << result(r)<< "", "" ; + } + Rcout << endl; + } + } + + error_pi = abs(prev_par(0) - par(0)) ; + error_sigsq = abs(prev_par(1) - par(1)) ; + error_a = abs(prev_par(2) - par(2)) ; + increase_ll = (llk - prev_llk)/prev_llk ; + + if(((error_pi< eps1) & (error_sigsq (wllk[""w""]); + llk = as(wllk[""llk""]); + finish_w = clock(); + + // update proportion + if ((pic>=lower_pi(0)) & (pic<=upper_pi(0)) & (p0>=lower_pi(1)) & (p0 <= upper_pi(1))) tem_p = update_p3(w,Nstar,num_threads); + finish_p = clock(); + + if (pic < lower_pi(0)) tem_p(0) = lower_pi(0); + if (p0 < lower_pi(1)) tem_p(1) = lower_pi(1); + if (pic > upper_pi(0)) tem_p(0) = upper_pi(0); + if (p0 > upper_pi(1)) tem_p(1) = upper_pi(1); + + // update variance components + tem_sig(0) = sig1; tem_sig(1) = sig2; tem_sig(2) = a; + if(stratification==false){tem_sig(2)=0;} + for(int j=0; j 1) tem_sig(0) = 1e-5; + if(tem_sig(0) < 0) tem_sig(0) = 1e-12; + if(tem_sig(1) > 1) tem_sig(1) = 1e-5; + if(tem_sig(1) < 0) tem_sig(1) = 1e-12; + if(tem_sig(2) > 1) tem_sig(2) = 1e-5; + if(tem_sig(2) < -min(varbetahat)) tem_sig(2) = -min(varbetahat)/2; + finish_sigsq = clock(); + + par(0) = tem_p(0); par(1) =tem_p(1); + par(2) = tem_sig(0); par(3) = tem_sig(1); par(4) = tem_sig(2); + + if(isinf(-llk) | isnan(-llk) | isnan(llk)) {par(4) = abs(par(4));} + if((llk=20)) {break;} + + result(0) = i; result(1) = llk; + result(2) = par(0); result(3) = par(1); result(4) = par(2); result(5) = par(3); result(6)= par(4); + result(7) = M*par(0)*( par(1)*par(2) + (1-par(1))*par(3)); result(8) = c0; + + result(9) = double(finish_w-start_w)/CLOCKS_PER_SEC; + result(10) = double(finish_p-finish_w)/CLOCKS_PER_SEC; + result(11) = double(finish_sigsq-finish_p)/CLOCKS_PER_SEC; + + if(print==true){ + if(i%printfreq==0){ + for(int r=0; r<12; r++){ + Rcout << result(r)<< "", "" ; + } + Rcout << endl; + } + } + + error_pi = abs(prev_par(0) - par(0)) ; + error_p0 = abs(prev_par(1) - par(1)) ; + error_sig1 = abs(prev_par(2) - par(2)) ; + error_sig2 = abs(prev_par(3) - par(3)) ; + error_a = abs(prev_par(4) - par(4)) ; + increase_ll = (llk - prev_llk)/prev_llk; + + if(((error_pi< eps1) & (error_p0 do not edit by hand +// Generator token: 10BE3573-1514-4C36-9D1C-5A225CD40393 + +#include + +using namespace Rcpp; +using namespace arma; + + +// modification_loc +vec modification_loc(vec inx_name, int K, int mx_k); +RcppExport SEXP _GENESIS_modification_loc(SEXP inx_nameSEXP, SEXP KSEXP, SEXP mx_kSEXP) { +BEGIN_RCPP + Rcpp::RObject rcpp_result_gen; + Rcpp::RNGScope rcpp_rngScope_gen; + Rcpp::traits::input_parameter< vec >::type inx_name(inx_nameSEXP); + Rcpp::traits::input_parameter< int >::type K(KSEXP); + Rcpp::traits::input_parameter< int >::type mx_k(mx_kSEXP); + rcpp_result_gen = Rcpp::wrap(modification_loc(inx_name, K, mx_k)); + return rcpp_result_gen; +END_RCPP +} +// cpnorm +long double cpnorm(long double x); +RcppExport SEXP _GENESIS_cpnorm(SEXP xSEXP) { +BEGIN_RCPP + Rcpp::RObject rcpp_result_gen; + Rcpp::RNGScope rcpp_rngScope_gen; + Rcpp::traits::input_parameter< long double >::type x(xSEXP); + rcpp_result_gen = Rcpp::wrap(cpnorm(x)); + return rcpp_result_gen; +END_RCPP +} +// sumfactorial +long double sumfactorial(int n); +RcppExport SEXP _GENESIS_sumfactorial(SEXP nSEXP) { +BEGIN_RCPP + Rcpp::RObject rcpp_result_gen; + Rcpp::RNGScope rcpp_rngScope_gen; + Rcpp::traits::input_parameter< int >::type n(nSEXP); + rcpp_result_gen = Rcpp::wrap(sumfactorial(n)); + return rcpp_result_gen; +END_RCPP +} +// sumfactorial_rev +long double sumfactorial_rev(int n, const int& k0); +RcppExport SEXP _GENESIS_sumfactorial_rev(SEXP nSEXP, SEXP k0SEXP) { +BEGIN_RCPP + Rcpp::RObject rcpp_result_gen; + Rcpp::RNGScope rcpp_rngScope_gen; + Rcpp::traits::input_parameter< int >::type n(nSEXP); + Rcpp::traits::input_parameter< const int& >::type k0(k0SEXP); + rcpp_result_gen = Rcpp::wrap(sumfactorial_rev(n, k0)); + return rcpp_result_gen; +END_RCPP +} +// cdnorm +long double cdnorm(long double x, long double mean, long double sd, bool loglog); +RcppExport SEXP _GENESIS_cdnorm(SEXP xSEXP, SEXP meanSEXP, SEXP sdSEXP, SEXP loglogSEXP) { +BEGIN_RCPP + Rcpp::RObject rcpp_result_gen; + Rcpp::RNGScope rcpp_rngScope_gen; + Rcpp::traits::input_parameter< long double >::type x(xSEXP); + Rcpp::traits::input_parameter< long double >::type mean(meanSEXP); + Rcpp::traits::input_parameter< long double >::type sd(sdSEXP); + Rcpp::traits::input_parameter< bool >::type loglog(loglogSEXP); + rcpp_result_gen = Rcpp::wrap(cdnorm(x, mean, sd, loglog)); + return rcpp_result_gen; +END_RCPP +} +// cdbinom +long double cdbinom(const int& k, const int& size, long double prob, bool loglog); +RcppExport SEXP _GENESIS_cdbinom(SEXP kSEXP, SEXP sizeSEXP, SEXP probSEXP, SEXP loglogSEXP) { +BEGIN_RCPP + Rcpp::RObject rcpp_result_gen; + Rcpp::RNGScope rcpp_rngScope_gen; + Rcpp::traits::input_parameter< const int& >::type k(kSEXP); + Rcpp::traits::input_parameter< const int& >::type size(sizeSEXP); + Rcpp::traits::input_parameter< long double >::type prob(probSEXP); + Rcpp::traits::input_parameter< bool >::type loglog(loglogSEXP); + rcpp_result_gen = Rcpp::wrap(cdbinom(k, size, prob, loglog)); + return rcpp_result_gen; +END_RCPP +} +// cdmultinom3 +long double cdmultinom3(const int k0, const int k1, const int k2, vec prob, bool loglog); +RcppExport SEXP _GENESIS_cdmultinom3(SEXP k0SEXP, SEXP k1SEXP, SEXP k2SEXP, SEXP probSEXP, SEXP loglogSEXP) { +BEGIN_RCPP + Rcpp::RObject rcpp_result_gen; + Rcpp::RNGScope rcpp_rngScope_gen; + Rcpp::traits::input_parameter< const int >::type k0(k0SEXP); + Rcpp::traits::input_parameter< const int >::type k1(k1SEXP); + Rcpp::traits::input_parameter< const int >::type k2(k2SEXP); + Rcpp::traits::input_parameter< vec >::type prob(probSEXP); + Rcpp::traits::input_parameter< bool >::type loglog(loglogSEXP); + rcpp_result_gen = Rcpp::wrap(cdmultinom3(k0, k1, k2, prob, loglog)); + return rcpp_result_gen; +END_RCPP +} +// loglikelihood +long double loglikelihood(const vec& par, const vec& betahat, const vec& varbetahat, const vec& ldscore, const int& c0, const vec& Nstar, const int& num_threads); +RcppExport SEXP _GENESIS_loglikelihood(SEXP parSEXP, SEXP betahatSEXP, SEXP varbetahatSEXP, SEXP ldscoreSEXP, SEXP c0SEXP, SEXP NstarSEXP, SEXP num_threadsSEXP) { +BEGIN_RCPP + Rcpp::RObject rcpp_result_gen; + Rcpp::RNGScope rcpp_rngScope_gen; + Rcpp::traits::input_parameter< const vec& >::type par(parSEXP); + Rcpp::traits::input_parameter< const vec& >::type betahat(betahatSEXP); + Rcpp::traits::input_parameter< const vec& >::type varbetahat(varbetahatSEXP); + Rcpp::traits::input_parameter< const vec& >::type ldscore(ldscoreSEXP); + Rcpp::traits::input_parameter< const int& >::type c0(c0SEXP); + Rcpp::traits::input_parameter< const vec& >::type Nstar(NstarSEXP); + Rcpp::traits::input_parameter< const int& >::type num_threads(num_threadsSEXP); + rcpp_result_gen = Rcpp::wrap(loglikelihood(par, betahat, varbetahat, ldscore, c0, Nstar, num_threads)); + return rcpp_result_gen; +END_RCPP +} +// weight +mat weight(const vec& par, const vec& betahat, const vec& varbetahat, const vec& ldscore, const int& c0, const vec& Nstar, const int& num_threads); +RcppExport SEXP _GENESIS_weight(SEXP parSEXP, SEXP betahatSEXP, SEXP varbetahatSEXP, SEXP ldscoreSEXP, SEXP c0SEXP, SEXP NstarSEXP, SEXP num_threadsSEXP) { +BEGIN_RCPP + Rcpp::RObject rcpp_result_gen; + Rcpp::RNGScope rcpp_rngScope_gen; + Rcpp::traits::input_parameter< const vec& >::type par(parSEXP); + Rcpp::traits::input_parameter< const vec& >::type betahat(betahatSEXP); + Rcpp::traits::input_parameter< const vec& >::type varbetahat(varbetahatSEXP); + Rcpp::traits::input_parameter< const vec& >::type ldscore(ldscoreSEXP); + Rcpp::traits::input_parameter< const int& >::type c0(c0SEXP); + Rcpp::traits::input_parameter< const vec& >::type Nstar(NstarSEXP); + Rcpp::traits::input_parameter< const int& >::type num_threads(num_threadsSEXP); + rcpp_result_gen = Rcpp::wrap(weight(par, betahat, varbetahat, ldscore, c0, Nstar, num_threads)); + return rcpp_result_gen; +END_RCPP +} +// weight_loglikelihood +List weight_loglikelihood(const vec& par, const vec& betahat, const vec& varbetahat, const vec& ldscore, const int& c0, const vec& Nstar, const int& num_threads); +RcppExport SEXP _GENESIS_weight_loglikelihood(SEXP parSEXP, SEXP betahatSEXP, SEXP varbetahatSEXP, SEXP ldscoreSEXP, SEXP c0SEXP, SEXP NstarSEXP, SEXP num_threadsSEXP) { +BEGIN_RCPP + Rcpp::RObject rcpp_result_gen; + Rcpp::RNGScope rcpp_rngScope_gen; + Rcpp::traits::input_parameter< const vec& >::type par(parSEXP); + Rcpp::traits::input_parameter< const vec& >::type betahat(betahatSEXP); + Rcpp::traits::input_parameter< const vec& >::type varbetahat(varbetahatSEXP); + Rcpp::traits::input_parameter< const vec& >::type ldscore(ldscoreSEXP); + Rcpp::traits::input_parameter< const int& >::type c0(c0SEXP); + Rcpp::traits::input_parameter< const vec& >::type Nstar(NstarSEXP); + Rcpp::traits::input_parameter< const int& >::type num_threads(num_threadsSEXP); + rcpp_result_gen = Rcpp::wrap(weight_loglikelihood(par, betahat, varbetahat, ldscore, c0, Nstar, num_threads)); + return rcpp_result_gen; +END_RCPP +} +// update_pi1 +long double update_pi1(const mat& w, const vec& Nstar, const int& num_threads); +RcppExport SEXP _GENESIS_update_pi1(SEXP wSEXP, SEXP NstarSEXP, SEXP num_threadsSEXP) { +BEGIN_RCPP + Rcpp::RObject rcpp_result_gen; + Rcpp::RNGScope rcpp_rngScope_gen; + Rcpp::traits::input_parameter< const mat& >::type w(wSEXP); + Rcpp::traits::input_parameter< const vec& >::type Nstar(NstarSEXP); + Rcpp::traits::input_parameter< const int& >::type num_threads(num_threadsSEXP); + rcpp_result_gen = Rcpp::wrap(update_pi1(w, Nstar, num_threads)); + return rcpp_result_gen; +END_RCPP +} +// onestep_varcomponent +vec onestep_varcomponent(const vec varcomponent, const mat& w, const vec& betahat, const vec& varbetahat, const vec& ldscore, const vec& Nstar, const int& num_threads); +RcppExport SEXP _GENESIS_onestep_varcomponent(SEXP varcomponentSEXP, SEXP wSEXP, SEXP betahatSEXP, SEXP varbetahatSEXP, SEXP ldscoreSEXP, SEXP NstarSEXP, SEXP num_threadsSEXP) { +BEGIN_RCPP + Rcpp::RObject rcpp_result_gen; + Rcpp::RNGScope rcpp_rngScope_gen; + Rcpp::traits::input_parameter< const vec >::type varcomponent(varcomponentSEXP); + Rcpp::traits::input_parameter< const mat& >::type w(wSEXP); + Rcpp::traits::input_parameter< const vec& >::type betahat(betahatSEXP); + Rcpp::traits::input_parameter< const vec& >::type varbetahat(varbetahatSEXP); + Rcpp::traits::input_parameter< const vec& >::type ldscore(ldscoreSEXP); + Rcpp::traits::input_parameter< const vec& >::type Nstar(NstarSEXP); + Rcpp::traits::input_parameter< const int& >::type num_threads(num_threadsSEXP); + rcpp_result_gen = Rcpp::wrap(onestep_varcomponent(varcomponent, w, betahat, varbetahat, ldscore, Nstar, num_threads)); + return rcpp_result_gen; +END_RCPP +} +// EM_func +vec EM_func(const vec& par_start, const vec& betahat, const vec& varbetahat, const vec& ldscore, const vec& Nstar, const int& M, int c0, const long double& eps1, const long double& eps2, const long double& eps3, const long double& eps, const int& Meps, const int& steps, const int& num_threads, const bool& print, const int& printfreq, const bool& stratification); +RcppExport SEXP _GENESIS_EM_func(SEXP par_startSEXP, SEXP betahatSEXP, SEXP varbetahatSEXP, SEXP ldscoreSEXP, SEXP NstarSEXP, SEXP MSEXP, SEXP c0SEXP, SEXP eps1SEXP, SEXP eps2SEXP, SEXP eps3SEXP, SEXP epsSEXP, SEXP MepsSEXP, SEXP stepsSEXP, SEXP num_threadsSEXP, SEXP printSEXP, SEXP printfreqSEXP, SEXP stratificationSEXP) { +BEGIN_RCPP + Rcpp::RObject rcpp_result_gen; + Rcpp::RNGScope rcpp_rngScope_gen; + Rcpp::traits::input_parameter< const vec& >::type par_start(par_startSEXP); + Rcpp::traits::input_parameter< const vec& >::type betahat(betahatSEXP); + Rcpp::traits::input_parameter< const vec& >::type varbetahat(varbetahatSEXP); + Rcpp::traits::input_parameter< const vec& >::type ldscore(ldscoreSEXP); + Rcpp::traits::input_parameter< const vec& >::type Nstar(NstarSEXP); + Rcpp::traits::input_parameter< const int& >::type M(MSEXP); + Rcpp::traits::input_parameter< int >::type c0(c0SEXP); + Rcpp::traits::input_parameter< const long double& >::type eps1(eps1SEXP); + Rcpp::traits::input_parameter< const long double& >::type eps2(eps2SEXP); + Rcpp::traits::input_parameter< const long double& >::type eps3(eps3SEXP); + Rcpp::traits::input_parameter< const long double& >::type eps(epsSEXP); + Rcpp::traits::input_parameter< const int& >::type Meps(MepsSEXP); + Rcpp::traits::input_parameter< const int& >::type steps(stepsSEXP); + Rcpp::traits::input_parameter< const int& >::type num_threads(num_threadsSEXP); + Rcpp::traits::input_parameter< const bool& >::type print(printSEXP); + Rcpp::traits::input_parameter< const int& >::type printfreq(printfreqSEXP); + Rcpp::traits::input_parameter< const bool& >::type stratification(stratificationSEXP); + rcpp_result_gen = Rcpp::wrap(EM_func(par_start, betahat, varbetahat, ldscore, Nstar, M, c0, eps1, eps2, eps3, eps, Meps, steps, num_threads, print, printfreq, stratification)); + return rcpp_result_gen; +END_RCPP +} +// Sk +vec Sk(const vec& par, const long double& betahatk, const long double& varbetahatk, const long double& ldscorek, const int& c0, const int& Nstark); +RcppExport SEXP _GENESIS_Sk(SEXP parSEXP, SEXP betahatkSEXP, SEXP varbetahatkSEXP, SEXP ldscorekSEXP, SEXP c0SEXP, SEXP NstarkSEXP) { +BEGIN_RCPP + Rcpp::RObject rcpp_result_gen; + Rcpp::RNGScope rcpp_rngScope_gen; + Rcpp::traits::input_parameter< const vec& >::type par(parSEXP); + Rcpp::traits::input_parameter< const long double& >::type betahatk(betahatkSEXP); + Rcpp::traits::input_parameter< const long double& >::type varbetahatk(varbetahatkSEXP); + Rcpp::traits::input_parameter< const long double& >::type ldscorek(ldscorekSEXP); + Rcpp::traits::input_parameter< const int& >::type c0(c0SEXP); + Rcpp::traits::input_parameter< const int& >::type Nstark(NstarkSEXP); + rcpp_result_gen = Rcpp::wrap(Sk(par, betahatk, varbetahatk, ldscorek, c0, Nstark)); + return rcpp_result_gen; +END_RCPP +} +// Ik +mat Ik(const vec& par, const long double& betahatk, const long double& varbetahatk, const long double& ldscorek, const int& c0, const int& Nstark); +RcppExport SEXP _GENESIS_Ik(SEXP parSEXP, SEXP betahatkSEXP, SEXP varbetahatkSEXP, SEXP ldscorekSEXP, SEXP c0SEXP, SEXP NstarkSEXP) { +BEGIN_RCPP + Rcpp::RObject rcpp_result_gen; + Rcpp::RNGScope rcpp_rngScope_gen; + Rcpp::traits::input_parameter< const vec& >::type par(parSEXP); + Rcpp::traits::input_parameter< const long double& >::type betahatk(betahatkSEXP); + Rcpp::traits::input_parameter< const long double& >::type varbetahatk(varbetahatkSEXP); + Rcpp::traits::input_parameter< const long double& >::type ldscorek(ldscorekSEXP); + Rcpp::traits::input_parameter< const int& >::type c0(c0SEXP); + Rcpp::traits::input_parameter< const int& >::type Nstark(NstarkSEXP); + rcpp_result_gen = Rcpp::wrap(Ik(par, betahatk, varbetahatk, ldscorek, c0, Nstark)); + return rcpp_result_gen; +END_RCPP +} +// S +vec S(const vec& par, const vec& betahat, const vec& varbetahat, const vec& ldscore, const int& c0, const vec& Nstar, const int& num_threads); +RcppExport SEXP _GENESIS_S(SEXP parSEXP, SEXP betahatSEXP, SEXP varbetahatSEXP, SEXP ldscoreSEXP, SEXP c0SEXP, SEXP NstarSEXP, SEXP num_threadsSEXP) { +BEGIN_RCPP + Rcpp::RObject rcpp_result_gen; + Rcpp::RNGScope rcpp_rngScope_gen; + Rcpp::traits::input_parameter< const vec& >::type par(parSEXP); + Rcpp::traits::input_parameter< const vec& >::type betahat(betahatSEXP); + Rcpp::traits::input_parameter< const vec& >::type varbetahat(varbetahatSEXP); + Rcpp::traits::input_parameter< const vec& >::type ldscore(ldscoreSEXP); + Rcpp::traits::input_parameter< const int& >::type c0(c0SEXP); + Rcpp::traits::input_parameter< const vec& >::type Nstar(NstarSEXP); + Rcpp::traits::input_parameter< const int& >::type num_threads(num_threadsSEXP); + rcpp_result_gen = Rcpp::wrap(S(par, betahat, varbetahat, ldscore, c0, Nstar, num_threads)); + return rcpp_result_gen; +END_RCPP +} +// SS +mat SS(const vec& par, const vec& betahat, const vec& varbetahat, const vec& ldscore, const int& c0, const vec& Nstar, const int& num_threads); +RcppExport SEXP _GENESIS_SS(SEXP parSEXP, SEXP betahatSEXP, SEXP varbetahatSEXP, SEXP ldscoreSEXP, SEXP c0SEXP, SEXP NstarSEXP, SEXP num_threadsSEXP) { +BEGIN_RCPP + Rcpp::RObject rcpp_result_gen; + Rcpp::RNGScope rcpp_rngScope_gen; + Rcpp::traits::input_parameter< const vec& >::type par(parSEXP); + Rcpp::traits::input_parameter< const vec& >::type betahat(betahatSEXP); + Rcpp::traits::input_parameter< const vec& >::type varbetahat(varbetahatSEXP); + Rcpp::traits::input_parameter< const vec& >::type ldscore(ldscoreSEXP); + Rcpp::traits::input_parameter< const int& >::type c0(c0SEXP); + Rcpp::traits::input_parameter< const vec& >::type Nstar(NstarSEXP); + Rcpp::traits::input_parameter< const int& >::type num_threads(num_threadsSEXP); + rcpp_result_gen = Rcpp::wrap(SS(par, betahat, varbetahat, ldscore, c0, Nstar, num_threads)); + return rcpp_result_gen; +END_RCPP +} +// I +mat I(const vec& par, const vec& betahat, const vec& varbetahat, const vec& ldscore, const int& c0, const vec& Nstar, const int& num_threads); +RcppExport SEXP _GENESIS_I(SEXP parSEXP, SEXP betahatSEXP, SEXP varbetahatSEXP, SEXP ldscoreSEXP, SEXP c0SEXP, SEXP NstarSEXP, SEXP num_threadsSEXP) { +BEGIN_RCPP + Rcpp::RObject rcpp_result_gen; + Rcpp::RNGScope rcpp_rngScope_gen; + Rcpp::traits::input_parameter< const vec& >::type par(parSEXP); + Rcpp::traits::input_parameter< const vec& >::type betahat(betahatSEXP); + Rcpp::traits::input_parameter< const vec& >::type varbetahat(varbetahatSEXP); + Rcpp::traits::input_parameter< const vec& >::type ldscore(ldscoreSEXP); + Rcpp::traits::input_parameter< const int& >::type c0(c0SEXP); + Rcpp::traits::input_parameter< const vec& >::type Nstar(NstarSEXP); + Rcpp::traits::input_parameter< const int& >::type num_threads(num_threadsSEXP); + rcpp_result_gen = Rcpp::wrap(I(par, betahat, varbetahat, ldscore, c0, Nstar, num_threads)); + return rcpp_result_gen; +END_RCPP +} +// mixture_components_marginal +List mixture_components_marginal(const vec& par, const vec& ldscore, const int& c0, const vec& Nstar, const int& num_threads); +RcppExport SEXP _GENESIS_mixture_components_marginal(SEXP parSEXP, SEXP ldscoreSEXP, SEXP c0SEXP, SEXP NstarSEXP, SEXP num_threadsSEXP) { +BEGIN_RCPP + Rcpp::RObject rcpp_result_gen; + Rcpp::RNGScope rcpp_rngScope_gen; + Rcpp::traits::input_parameter< const vec& >::type par(parSEXP); + Rcpp::traits::input_parameter< const vec& >::type ldscore(ldscoreSEXP); + Rcpp::traits::input_parameter< const int& >::type c0(c0SEXP); + Rcpp::traits::input_parameter< const vec& >::type Nstar(NstarSEXP); + Rcpp::traits::input_parameter< const int& >::type num_threads(num_threadsSEXP); + rcpp_result_gen = Rcpp::wrap(mixture_components_marginal(par, ldscore, c0, Nstar, num_threads)); + return rcpp_result_gen; +END_RCPP +} +// loglikelihood3 +long double loglikelihood3(const vec& par, const vec& betahat, const vec& varbetahat, const vec& ldscore, const int& c0, const vec& Nstar, const int& num_threads); +RcppExport SEXP _GENESIS_loglikelihood3(SEXP parSEXP, SEXP betahatSEXP, SEXP varbetahatSEXP, SEXP ldscoreSEXP, SEXP c0SEXP, SEXP NstarSEXP, SEXP num_threadsSEXP) { +BEGIN_RCPP + Rcpp::RObject rcpp_result_gen; + Rcpp::RNGScope rcpp_rngScope_gen; + Rcpp::traits::input_parameter< const vec& >::type par(parSEXP); + Rcpp::traits::input_parameter< const vec& >::type betahat(betahatSEXP); + Rcpp::traits::input_parameter< const vec& >::type varbetahat(varbetahatSEXP); + Rcpp::traits::input_parameter< const vec& >::type ldscore(ldscoreSEXP); + Rcpp::traits::input_parameter< const int& >::type c0(c0SEXP); + Rcpp::traits::input_parameter< const vec& >::type Nstar(NstarSEXP); + Rcpp::traits::input_parameter< const int& >::type num_threads(num_threadsSEXP); + rcpp_result_gen = Rcpp::wrap(loglikelihood3(par, betahat, varbetahat, ldscore, c0, Nstar, num_threads)); + return rcpp_result_gen; +END_RCPP +} +// weight3 +mat weight3(const vec& par, const vec& betahat, const vec& varbetahat, const vec& ldscore, const int& c0, const vec& Nstar, const int& num_threads); +RcppExport SEXP _GENESIS_weight3(SEXP parSEXP, SEXP betahatSEXP, SEXP varbetahatSEXP, SEXP ldscoreSEXP, SEXP c0SEXP, SEXP NstarSEXP, SEXP num_threadsSEXP) { +BEGIN_RCPP + Rcpp::RObject rcpp_result_gen; + Rcpp::RNGScope rcpp_rngScope_gen; + Rcpp::traits::input_parameter< const vec& >::type par(parSEXP); + Rcpp::traits::input_parameter< const vec& >::type betahat(betahatSEXP); + Rcpp::traits::input_parameter< const vec& >::type varbetahat(varbetahatSEXP); + Rcpp::traits::input_parameter< const vec& >::type ldscore(ldscoreSEXP); + Rcpp::traits::input_parameter< const int& >::type c0(c0SEXP); + Rcpp::traits::input_parameter< const vec& >::type Nstar(NstarSEXP); + Rcpp::traits::input_parameter< const int& >::type num_threads(num_threadsSEXP); + rcpp_result_gen = Rcpp::wrap(weight3(par, betahat, varbetahat, ldscore, c0, Nstar, num_threads)); + return rcpp_result_gen; +END_RCPP +} +// weight_loglikelihood3 +List weight_loglikelihood3(const vec& par, const vec& betahat, const vec& varbetahat, const vec& ldscore, const int& c0, const vec& Nstar, const int& num_threads); +RcppExport SEXP _GENESIS_weight_loglikelihood3(SEXP parSEXP, SEXP betahatSEXP, SEXP varbetahatSEXP, SEXP ldscoreSEXP, SEXP c0SEXP, SEXP NstarSEXP, SEXP num_threadsSEXP) { +BEGIN_RCPP + Rcpp::RObject rcpp_result_gen; + Rcpp::RNGScope rcpp_rngScope_gen; + Rcpp::traits::input_parameter< const vec& >::type par(parSEXP); + Rcpp::traits::input_parameter< const vec& >::type betahat(betahatSEXP); + Rcpp::traits::input_parameter< const vec& >::type varbetahat(varbetahatSEXP); + Rcpp::traits::input_parameter< const vec& >::type ldscore(ldscoreSEXP); + Rcpp::traits::input_parameter< const int& >::type c0(c0SEXP); + Rcpp::traits::input_parameter< const vec& >::type Nstar(NstarSEXP); + Rcpp::traits::input_parameter< const int& >::type num_threads(num_threadsSEXP); + rcpp_result_gen = Rcpp::wrap(weight_loglikelihood3(par, betahat, varbetahat, ldscore, c0, Nstar, num_threads)); + return rcpp_result_gen; +END_RCPP +} +// update_p3 +vec update_p3(const mat& w, const vec& Nstar, const int& num_threads); +RcppExport SEXP _GENESIS_update_p3(SEXP wSEXP, SEXP NstarSEXP, SEXP num_threadsSEXP) { +BEGIN_RCPP + Rcpp::RObject rcpp_result_gen; + Rcpp::RNGScope rcpp_rngScope_gen; + Rcpp::traits::input_parameter< const mat& >::type w(wSEXP); + Rcpp::traits::input_parameter< const vec& >::type Nstar(NstarSEXP); + Rcpp::traits::input_parameter< const int& >::type num_threads(num_threadsSEXP); + rcpp_result_gen = Rcpp::wrap(update_p3(w, Nstar, num_threads)); + return rcpp_result_gen; +END_RCPP +} +// onestep_varcomponent3 +vec onestep_varcomponent3(const vec& varcomponent, const mat& w, const vec& betahat, const vec& varbetahat, const vec& ldscore, const vec& Nstar, const int& num_threads); +RcppExport SEXP _GENESIS_onestep_varcomponent3(SEXP varcomponentSEXP, SEXP wSEXP, SEXP betahatSEXP, SEXP varbetahatSEXP, SEXP ldscoreSEXP, SEXP NstarSEXP, SEXP num_threadsSEXP) { +BEGIN_RCPP + Rcpp::RObject rcpp_result_gen; + Rcpp::RNGScope rcpp_rngScope_gen; + Rcpp::traits::input_parameter< const vec& >::type varcomponent(varcomponentSEXP); + Rcpp::traits::input_parameter< const mat& >::type w(wSEXP); + Rcpp::traits::input_parameter< const vec& >::type betahat(betahatSEXP); + Rcpp::traits::input_parameter< const vec& >::type varbetahat(varbetahatSEXP); + Rcpp::traits::input_parameter< const vec& >::type ldscore(ldscoreSEXP); + Rcpp::traits::input_parameter< const vec& >::type Nstar(NstarSEXP); + Rcpp::traits::input_parameter< const int& >::type num_threads(num_threadsSEXP); + rcpp_result_gen = Rcpp::wrap(onestep_varcomponent3(varcomponent, w, betahat, varbetahat, ldscore, Nstar, num_threads)); + return rcpp_result_gen; +END_RCPP +} +// EM_func3 +vec EM_func3(const vec& par_start, const vec& lower_pi, const vec& upper_pi, const vec& betahat, const vec& varbetahat, const vec& ldscore, const vec& Nstar, const int& M, int c0, const long double& eps1, const long double& eps2, const long double& eps3, const long double& eps4, const long double& eps5, const long double& eps, const int& Meps, const int& steps, const int& num_threads, const bool& print, const int& printfreq, const bool& stratification); +RcppExport SEXP _GENESIS_EM_func3(SEXP par_startSEXP, SEXP lower_piSEXP, SEXP upper_piSEXP, SEXP betahatSEXP, SEXP varbetahatSEXP, SEXP ldscoreSEXP, SEXP NstarSEXP, SEXP MSEXP, SEXP c0SEXP, SEXP eps1SEXP, SEXP eps2SEXP, SEXP eps3SEXP, SEXP eps4SEXP, SEXP eps5SEXP, SEXP epsSEXP, SEXP MepsSEXP, SEXP stepsSEXP, SEXP num_threadsSEXP, SEXP printSEXP, SEXP printfreqSEXP, SEXP stratificationSEXP) { +BEGIN_RCPP + Rcpp::RObject rcpp_result_gen; + Rcpp::RNGScope rcpp_rngScope_gen; + Rcpp::traits::input_parameter< const vec& >::type par_start(par_startSEXP); + Rcpp::traits::input_parameter< const vec& >::type lower_pi(lower_piSEXP); + Rcpp::traits::input_parameter< const vec& >::type upper_pi(upper_piSEXP); + Rcpp::traits::input_parameter< const vec& >::type betahat(betahatSEXP); + Rcpp::traits::input_parameter< const vec& >::type varbetahat(varbetahatSEXP); + Rcpp::traits::input_parameter< const vec& >::type ldscore(ldscoreSEXP); + Rcpp::traits::input_parameter< const vec& >::type Nstar(NstarSEXP); + Rcpp::traits::input_parameter< const int& >::type M(MSEXP); + Rcpp::traits::input_parameter< int >::type c0(c0SEXP); + Rcpp::traits::input_parameter< const long double& >::type eps1(eps1SEXP); + Rcpp::traits::input_parameter< const long double& >::type eps2(eps2SEXP); + Rcpp::traits::input_parameter< const long double& >::type eps3(eps3SEXP); + Rcpp::traits::input_parameter< const long double& >::type eps4(eps4SEXP); + Rcpp::traits::input_parameter< const long double& >::type eps5(eps5SEXP); + Rcpp::traits::input_parameter< const long double& >::type eps(epsSEXP); + Rcpp::traits::input_parameter< const int& >::type Meps(MepsSEXP); + Rcpp::traits::input_parameter< const int& >::type steps(stepsSEXP); + Rcpp::traits::input_parameter< const int& >::type num_threads(num_threadsSEXP); + Rcpp::traits::input_parameter< const bool& >::type print(printSEXP); + Rcpp::traits::input_parameter< const int& >::type printfreq(printfreqSEXP); + Rcpp::traits::input_parameter< const bool& >::type stratification(stratificationSEXP); + rcpp_result_gen = Rcpp::wrap(EM_func3(par_start, lower_pi, upper_pi, betahat, varbetahat, ldscore, Nstar, M, c0, eps1, eps2, eps3, eps4, eps5, eps, Meps, steps, num_threads, print, printfreq, stratification)); + return rcpp_result_gen; +END_RCPP +} +// Sk3 +vec Sk3(const vec& par, const long double& betahatk, const long double& varbetahatk, const long double& ldscorek, const int& c0, const int& Nstark); +RcppExport SEXP _GENESIS_Sk3(SEXP parSEXP, SEXP betahatkSEXP, SEXP varbetahatkSEXP, SEXP ldscorekSEXP, SEXP c0SEXP, SEXP NstarkSEXP) { +BEGIN_RCPP + Rcpp::RObject rcpp_result_gen; + Rcpp::RNGScope rcpp_rngScope_gen; + Rcpp::traits::input_parameter< const vec& >::type par(parSEXP); + Rcpp::traits::input_parameter< const long double& >::type betahatk(betahatkSEXP); + Rcpp::traits::input_parameter< const long double& >::type varbetahatk(varbetahatkSEXP); + Rcpp::traits::input_parameter< const long double& >::type ldscorek(ldscorekSEXP); + Rcpp::traits::input_parameter< const int& >::type c0(c0SEXP); + Rcpp::traits::input_parameter< const int& >::type Nstark(NstarkSEXP); + rcpp_result_gen = Rcpp::wrap(Sk3(par, betahatk, varbetahatk, ldscorek, c0, Nstark)); + return rcpp_result_gen; +END_RCPP +} +// Ik3 +mat Ik3(const vec& par, const long double& betahatk, const long double& varbetahatk, const long double& ldscorek, const int& c0, const int& Nstark); +RcppExport SEXP _GENESIS_Ik3(SEXP parSEXP, SEXP betahatkSEXP, SEXP varbetahatkSEXP, SEXP ldscorekSEXP, SEXP c0SEXP, SEXP NstarkSEXP) { +BEGIN_RCPP + Rcpp::RObject rcpp_result_gen; + Rcpp::RNGScope rcpp_rngScope_gen; + Rcpp::traits::input_parameter< const vec& >::type par(parSEXP); + Rcpp::traits::input_parameter< const long double& >::type betahatk(betahatkSEXP); + Rcpp::traits::input_parameter< const long double& >::type varbetahatk(varbetahatkSEXP); + Rcpp::traits::input_parameter< const long double& >::type ldscorek(ldscorekSEXP); + Rcpp::traits::input_parameter< const int& >::type c0(c0SEXP); + Rcpp::traits::input_parameter< const int& >::type Nstark(NstarkSEXP); + rcpp_result_gen = Rcpp::wrap(Ik3(par, betahatk, varbetahatk, ldscorek, c0, Nstark)); + return rcpp_result_gen; +END_RCPP +} +// S3 +vec S3(const vec& par, const vec& betahat, const vec& varbetahat, const vec& ldscore, const int& c0, const vec& Nstar); +RcppExport SEXP _GENESIS_S3(SEXP parSEXP, SEXP betahatSEXP, SEXP varbetahatSEXP, SEXP ldscoreSEXP, SEXP c0SEXP, SEXP NstarSEXP) { +BEGIN_RCPP + Rcpp::RObject rcpp_result_gen; + Rcpp::RNGScope rcpp_rngScope_gen; + Rcpp::traits::input_parameter< const vec& >::type par(parSEXP); + Rcpp::traits::input_parameter< const vec& >::type betahat(betahatSEXP); + Rcpp::traits::input_parameter< const vec& >::type varbetahat(varbetahatSEXP); + Rcpp::traits::input_parameter< const vec& >::type ldscore(ldscoreSEXP); + Rcpp::traits::input_parameter< const int& >::type c0(c0SEXP); + Rcpp::traits::input_parameter< const vec& >::type Nstar(NstarSEXP); + rcpp_result_gen = Rcpp::wrap(S3(par, betahat, varbetahat, ldscore, c0, Nstar)); + return rcpp_result_gen; +END_RCPP +} +// SS3 +mat SS3(const vec& par, const vec& betahat, const vec& varbetahat, const vec& ldscore, const int& c0, const vec& Nstar, const int& num_threads); +RcppExport SEXP _GENESIS_SS3(SEXP parSEXP, SEXP betahatSEXP, SEXP varbetahatSEXP, SEXP ldscoreSEXP, SEXP c0SEXP, SEXP NstarSEXP, SEXP num_threadsSEXP) { +BEGIN_RCPP + Rcpp::RObject rcpp_result_gen; + Rcpp::RNGScope rcpp_rngScope_gen; + Rcpp::traits::input_parameter< const vec& >::type par(parSEXP); + Rcpp::traits::input_parameter< const vec& >::type betahat(betahatSEXP); + Rcpp::traits::input_parameter< const vec& >::type varbetahat(varbetahatSEXP); + Rcpp::traits::input_parameter< const vec& >::type ldscore(ldscoreSEXP); + Rcpp::traits::input_parameter< const int& >::type c0(c0SEXP); + Rcpp::traits::input_parameter< const vec& >::type Nstar(NstarSEXP); + Rcpp::traits::input_parameter< const int& >::type num_threads(num_threadsSEXP); + rcpp_result_gen = Rcpp::wrap(SS3(par, betahat, varbetahat, ldscore, c0, Nstar, num_threads)); + return rcpp_result_gen; +END_RCPP +} +// I3 +mat I3(const vec& par, const vec& betahat, const vec& varbetahat, const vec& ldscore, const int& c0, const vec& Nstar, const int& num_threads); +RcppExport SEXP _GENESIS_I3(SEXP parSEXP, SEXP betahatSEXP, SEXP varbetahatSEXP, SEXP ldscoreSEXP, SEXP c0SEXP, SEXP NstarSEXP, SEXP num_threadsSEXP) { +BEGIN_RCPP + Rcpp::RObject rcpp_result_gen; + Rcpp::RNGScope rcpp_rngScope_gen; + Rcpp::traits::input_parameter< const vec& >::type par(parSEXP); + Rcpp::traits::input_parameter< const vec& >::type betahat(betahatSEXP); + Rcpp::traits::input_parameter< const vec& >::type varbetahat(varbetahatSEXP); + Rcpp::traits::input_parameter< const vec& >::type ldscore(ldscoreSEXP); + Rcpp::traits::input_parameter< const int& >::type c0(c0SEXP); + Rcpp::traits::input_parameter< const vec& >::type Nstar(NstarSEXP); + Rcpp::traits::input_parameter< const int& >::type num_threads(num_threadsSEXP); + rcpp_result_gen = Rcpp::wrap(I3(par, betahat, varbetahat, ldscore, c0, Nstar, num_threads)); + return rcpp_result_gen; +END_RCPP +} +// mixture_3components_marginal +List mixture_3components_marginal(const vec& par, const vec& ldscore, const int& c0, const vec& Nstar, const int& num_threads); +RcppExport SEXP _GENESIS_mixture_3components_marginal(SEXP parSEXP, SEXP ldscoreSEXP, SEXP c0SEXP, SEXP NstarSEXP, SEXP num_threadsSEXP) { +BEGIN_RCPP + Rcpp::RObject rcpp_result_gen; + Rcpp::RNGScope rcpp_rngScope_gen; + Rcpp::traits::input_parameter< const vec& >::type par(parSEXP); + Rcpp::traits::input_parameter< const vec& >::type ldscore(ldscoreSEXP); + Rcpp::traits::input_parameter< const int& >::type c0(c0SEXP); + Rcpp::traits::input_parameter< const vec& >::type Nstar(NstarSEXP); + Rcpp::traits::input_parameter< const int& >::type num_threads(num_threadsSEXP); + rcpp_result_gen = Rcpp::wrap(mixture_3components_marginal(par, ldscore, c0, Nstar, num_threads)); + return rcpp_result_gen; +END_RCPP +} + +static const R_CallMethodDef CallEntries[] = { + {""_GENESIS_modification_loc"", (DL_FUNC) &_GENESIS_modification_loc, 3}, + {""_GENESIS_cpnorm"", (DL_FUNC) &_GENESIS_cpnorm, 1}, + {""_GENESIS_sumfactorial"", (DL_FUNC) &_GENESIS_sumfactorial, 1}, + {""_GENESIS_sumfactorial_rev"", (DL_FUNC) &_GENESIS_sumfactorial_rev, 2}, + {""_GENESIS_cdnorm"", (DL_FUNC) &_GENESIS_cdnorm, 4}, + {""_GENESIS_cdbinom"", (DL_FUNC) &_GENESIS_cdbinom, 4}, + {""_GENESIS_cdmultinom3"", (DL_FUNC) &_GENESIS_cdmultinom3, 5}, + {""_GENESIS_loglikelihood"", (DL_FUNC) &_GENESIS_loglikelihood, 7}, + {""_GENESIS_weight"", (DL_FUNC) &_GENESIS_weight, 7}, + {""_GENESIS_weight_loglikelihood"", (DL_FUNC) &_GENESIS_weight_loglikelihood, 7}, + {""_GENESIS_update_pi1"", (DL_FUNC) &_GENESIS_update_pi1, 3}, + {""_GENESIS_onestep_varcomponent"", (DL_FUNC) &_GENESIS_onestep_varcomponent, 7}, + {""_GENESIS_EM_func"", (DL_FUNC) &_GENESIS_EM_func, 17}, + {""_GENESIS_Sk"", (DL_FUNC) &_GENESIS_Sk, 6}, + {""_GENESIS_Ik"", (DL_FUNC) &_GENESIS_Ik, 6}, + {""_GENESIS_S"", (DL_FUNC) &_GENESIS_S, 7}, + {""_GENESIS_SS"", (DL_FUNC) &_GENESIS_SS, 7}, + {""_GENESIS_I"", (DL_FUNC) &_GENESIS_I, 7}, + {""_GENESIS_mixture_components_marginal"", (DL_FUNC) &_GENESIS_mixture_components_marginal, 5}, + {""_GENESIS_loglikelihood3"", (DL_FUNC) &_GENESIS_loglikelihood3, 7}, + {""_GENESIS_weight3"", (DL_FUNC) &_GENESIS_weight3, 7}, + {""_GENESIS_weight_loglikelihood3"", (DL_FUNC) &_GENESIS_weight_loglikelihood3, 7}, + {""_GENESIS_update_p3"", (DL_FUNC) &_GENESIS_update_p3, 3}, + {""_GENESIS_onestep_varcomponent3"", (DL_FUNC) &_GENESIS_onestep_varcomponent3, 7}, + {""_GENESIS_EM_func3"", (DL_FUNC) &_GENESIS_EM_func3, 21}, + {""_GENESIS_Sk3"", (DL_FUNC) &_GENESIS_Sk3, 6}, + {""_GENESIS_Ik3"", (DL_FUNC) &_GENESIS_Ik3, 6}, + {""_GENESIS_S3"", (DL_FUNC) &_GENESIS_S3, 6}, + {""_GENESIS_SS3"", (DL_FUNC) &_GENESIS_SS3, 7}, + {""_GENESIS_I3"", (DL_FUNC) &_GENESIS_I3, 7}, + {""_GENESIS_mixture_3components_marginal"", (DL_FUNC) &_GENESIS_mixture_3components_marginal, 5}, + {NULL, NULL, 0} +}; + +RcppExport void R_init_GENESIS(DllInfo *dll) { + R_registerRoutines(dll, NULL, CallEntries, NULL, NULL); + R_useDynamicSymbols(dll, FALSE); +} +","C++" +"Genesis","yandorazhang/GENESIS","R/qqplotdata.simu.R",".R","6358","128","#' Illustration of qqplotdata.simu() +#' +#' This function allows to generate expected p-values under the fitted model through simulations, hence then get the dataframe to generate qqplot. +#' @param summarydata either summay-level GWAS data, containing 3 columns: +#' SNP (SNP rsID), +#' Z (GWAS test z-statistic), +#' N (GWAS study sample size which can be different for different SNPs) +#' or a dataframe generated by preprocessing() function which allows the summarydata to merge with LD score information and contains 10 columns. +#' @param est parameter estimates by fitting either 2-component model, i.e., (pic, sigmasq, a); or 3-component model, i.e., (pic, p1, sigmasq1, sigmasq2, a). +#' @param c0 an assumed maximum number of underlying susceptibility SNPs tagged by any individual GWAS marker. By default, c0 is set at 10. +#' @param qqplotCI.coverage the coverage rate of confidence band in the QQ plot. By default, it is 0.8. +#' @param qqplot.nsim the total number of simulations to generate the expected p-values to get the QQ plot. By default, it is 100. +#' @param LDcutoff a number from (0.05, 0.1, 0.2); indicating LD score is calculated based on the particular r^2 cutoff. By default, it is 0.1. +#' @param LDwindow a number from (0.5, 1, 2); indicating LD score is calculated based on the particular window size (MB). By default, it is 1 MB. +#' @param filter logical; if TRUE, the input summary data will be filtered. +#' @param cores number of CPU threads in parallel computing; by default, it is 1. +#' @param seeds numeric random seeds used in simulation; by default, it is 123. +#' @keywords +#' @export +#' @examples qqplotdata.simu(summarydata, est, c0=10, qqplotCI.coverage=0.8, qqplot.nsim=100, LDcutoff=0.1, LDwindow=1, filter=F,cores=1,seeds=123) + +qqplotdata.simu <- function(summarydata, est, c0=10, + qqplotCI.coverage=0.8, qqplot.nsim=100, + LDcutoff=0.1, LDwindow=1, filter=F,cores=1,seeds=123){ + + # load the required R package + library(doParallel) + library(foreach) + cl <- makeCluster(cores) + registerDoParallel(cl) + + if(length(est)==3) modelcomponents=2 + if(length(est)==5) modelcomponents=3 + + #----------------------------------------------------#---------------------------------------------------- + # I. summary GWAS data format check + #----------------------------------------------------#---------------------------------------------------- + # (i) the input summarydata is the original summary lelve GWAS data, containing 3 columns: + # SNP (SNP rsID), Z (GWAS test z-statistic), N (GWAS study sample size which can be different for different SNPs) + if(ncol(summarydata)==3){ + df <- preprocessing(summarydata, LDcutoff,LDwindow,filter) + } + + # (ii) the input summarydata is already merged with the LD score data, thus no processing + if(ncol(summarydata)==10){ + df <- summarydata + } + + #----------------------------------------------------#---------------------------------------------------- + # II. extract the variables needed for analysis + #----------------------------------------------------#---------------------------------------------------- + betahat <- as.numeric(as.character(df$betahat)) + varbetahat <- as.numeric(as.character(df$varbetahat)) + ldscore <- as.numeric(as.character(df$LD.score.correct)) + Nstar <- as.numeric(as.character(df$Nstar)) + SNPsum <- df$SNP + K <- length(betahat) + n <- as.numeric(as.character(df$N)) + + obs_z <- betahat/sqrt(varbetahat) + obs_pvalues <- 2*pnorm(-abs(obs_z)) + obs_lambda <- median(obs_z^2)/qchisq(0.5,1) + log_obs_pvalues <- -log10(obs_pvalues) + log_obs_pvalues <- sort(log_obs_pvalues) + + data(list=paste0(""error_iter1"")); error.snplist = SNP; SNP = SNPsum + + #----------------------------------------------------#---------------------------------------------------- + # III. get the simulated p-values under the fitted model (needed for getting the expected p-values in qqplot) + #----------------------------------------------------#---------------------------------------------------- + if(modelcomponents == 2){ + a <- est[3]; if(a<0) a <- 0; + te <- mixture_components_marginal(est, ldscore, c0, Nstar, cores) + } + + if(modelcomponents == 3){ + a <- est[5]; if(a<0) a <- 0; + te <- mixture_3components_marginal(est, ldscore, c0, Nstar, cores) + } + + proportions <- te$proportions + varcomponents <- te$varcomponents + L <- ncol(proportions) + + # ------------------------------------------------ + # initializing the expected z and p-values + log_exp_pvalues <- matrix(0,qqplot.nsim,K); + exp_z <- matrix(0,qqplot.nsim,K) + exp_lambda <- rep(0,qqplot.nsim) + temorder <- order(match(error.snplist, SNP)) + + # ------------------------------------------------ + foreach(i=1:qqplot.nsim)%do%{ + data(list=paste0(""error_iter"",i)) + set.seed(seeds*i) + # ------------------------------------------------ + # get the marginal effect size under the fitted model + betamarginal <- rep(0,K) + for(k in 1:K){ + components <- sample(1:L, prob=proportions[k,],size=1, replace=T) + mus <- rep(0, L) + sds <- sqrt(varcomponents[k,]) + betamarginal[k] <- rnorm(n=1,mean=mus[components],sd=sds[components]) + } + + betahat <- betamarginal + error[temorder][1:K] /sqrt(n) + rnorm(1,mean=0,sd=sqrt(a)) + exp_z[i,] <- betahat*sqrt(n) + log_exp_pvalues[i,] <- -log10(2*pnorm( -abs(exp_z[i,])) ) + exp_lambda[i] <- median(exp_z[i,]^2)/qchisq(0.5,1) + log_exp_pvalues[i,] <- sort(log_exp_pvalues[i,]) + } + + mean_log_exp_pvalues <- apply(log_exp_pvalues, 2, mean) + lower <- apply(log_exp_pvalues, 2, function(t) quantile(t, (1-qqplotCI.coverage)/2)) + upper <- apply(log_exp_pvalues, 2, function(t) quantile(t, 1-(1-qqplotCI.coverage)/2)) + + m.lambda <- mean(exp_lambda); + l.lambda <- quantile(exp_lambda, (1-qqplotCI.coverage)/2) + h.lambda <- quantile(exp_lambda, 1-(1-qqplotCI.coverage)/2) + + QQdata = data.frame(cbind(log_obs_pvalues,mean_log_exp_pvalues,lower,upper)) + # colnames(QQdata) = c(""log10(observed pvalues)"", ""expected log10(fitted pvalues)"", ""lower bound of log10(fitted pvalues)"",""upper bound of log10(fitted pvalues)"" ) + + qqplotdata <- list(QQdata=QQdata, observedlambda=obs_lambda, + meanEXPlambda=m.lambda, lowEXPlambda=l.lambda, highEXPlambda=h.lambda) + + return(qqplotdata) +}","R" +"Genesis","yandorazhang/GENESIS","R/numInterval.R",".R","1486","44","#' Illustration of numInterval() +#' +#' This function allows to calculate the number of SNPs with the absolute value of effect size falling into some interval according to the fitted mixture model. +#' @param lower the lower bound of the interval. +#' @param upper the upper bound of the interval. +#' @param est parameter estimates by fitting either 2-component model, i.e., (pic, sigmasq, a); or 3-component model, i.e., (pic, p1, sigmasq1, sigmasq2, a). +#' @param M total number of SNPs in the reference panel; by default, it is the total number of common SNPs in Hapmap3 reference panel, which is equal to 1070777. +#' @keywords +#' @export +#' @examples numInterval(lower,upper,est,M=1070777) + +numInterval <- function(lower,upper,est,M=1070777){ + + if(length(est)==3) components=2 + if(length(est)==5) components=3 + + if(components==2){ + pic = est[1] + sig = sqrt(est[2]) + den <- function(x){return(dnorm(x/sig)/sig )} + cdf <- function(x){ + if(x<0) {res = pic*pnorm(x/sig)} + if(x>=0) {res = pic*pnorm(x/sig) + 1-pic} + return(res) + } + } + + if(components==3){ + pic = est[1] + p0 = est[2] + s1 = sqrt(est[3]) + s2 = sqrt(est[4]) + den <- function(x){return(p0 * dnorm(x/s1)/s1 + (1-p0)*dnorm(x/s2) /s2)} + cdf <- function(x){ + if(x<0) {res = pic*(p0*pnorm(x/s1) + (1-p0)*pnorm(x/s2))} + if(x>=0) {res = pic*(p0*pnorm(x/s1) + (1-p0)*pnorm(x/s2)) + 1-pic} + return(res) + } + } + + return(M*2*(cdf(upper) - cdf(lower))) + +} +","R" +"Genesis","yandorazhang/GENESIS","R/LDwindow1MB_cutoff0.05.R",".R","455","9","#' The dataset contains 1070777 rows and 6 columns. Each row represent a SNP. It contains the LD score which is calcluated based on 1000 Genome reference data set with LD-square threshold 0.05 and window size 1MB. +#' +#' @format A dataframe named ""dataLD"" with 1070777 rows and 6 columns (""SNPname"" , ""Nstar"", ""TaggingSNPs"",""LD.score.correct"", ""CHR"" ,""BP""). +#' @source \url{} +#' @name LDwindow1MB_cutoff0.05 +#' @usage data(""LDwindow1MB_cutoff0.05"") +NULL + +","R" +"Genesis","yandorazhang/GENESIS","R/futuregc.R",".R","3137","78","#' Illustration of futuregc() +#' +#' This function allows to predict future genomic control (GC) factor through simulations given the GWAS study sample size. +#' @param est parameter estimates by fitting either 2-component model, i.e., (pic, sigmasq, a); or 3-component model, i.e., (pic, p1, sigmasq1, sigmasq2, a). +#' @param n specifided future GWAS sample size. +#' @param nsim total number of simulations; by default, it is 1. +#' @param M total number of SNPs in the reference panel; by default, it is the total number of common SNPs in Hapmap3 reference panel, which is equal to 1070777. +#' @param seeds numeric random seeds used in simulation; by default, it is 123. +#' @keywords +#' @export +#' @examples futuregc(est,n,nsim=1,M=1070777,seeds=123) + +futuregc <- function(est,n,nsim=1,M=1070777,seeds=123){ + + lambdaGC = rep(0,nsim) + + # load the LD structure + data(""TaggingSNPinx""); data(""pairwiseLD""); data(LDwindow1MB_cutoff0.1); SNPrsID = dataLD$SNPname; K=M; + + if(length(est)==3) components=2 + if(length(est)==5) components=3 + + if(components==2){ + pic = est[1]; sigmasq = est[2]; a= est[3]; + if(a<0) a = 0 + + for(iter in 1:nsim){ + # generate the joint effect size according to the fitted distribution + set.seed(iter*seeds) + z = rbinom(K, size=1, prob=pic); nonzero = sum(z) + betajoint = rep(0, K) + betajoint[which(z==1)] = rnorm(nonzero,mean=0,sd=sqrt(sigmasq)) + + betamarginal = rep(0,K); + for(k in 1:K){ + betamarginal[k] = crossprod(betajoint[unlist(TaggingSNPinx[k])], sqrt(unlist(pairwiseLD[k])))+rnorm(1,mean=0,sd=sqrt(a)) + } + + data(list=paste0(""error_iter"",iter)) + dftem = data.frame(cbind(error,SNP)); dftem0= data.frame(SNPrsID) + dfmerge = merge(dftem0, dftem, by.x=""SNPrsID"", by.y=""SNP"",sort=F) + betahat = betamarginal + as.numeric(as.character(dfmerge$error))/sqrt(n) + varbetahat = rep(1/n,K) + lambdaGC[iter] = median( (betahat/sqrt(varbetahat))^2)/qchisq(0.5,1) + } + } + + + if(components==3){ + pic = est[1]; p1 = est[2]; sig1 = est[3]; sig2 = est[4];a=est[5] + if(a<0)a=0 + + for(iter in 1:nsim){ + # generate the joint effect size according to the fitted distribution + set.seed(iter*123) + z = sample(x=c(1,2,0), size=K,replace=T,prob=c(pic*p1, pic*(1-p1), 1-pic)) + nz1 = sum(z==1) + nz2 = sum(z==2) + betajoint = rep(0, K) + betajoint[which(z==1)] = rnorm(nz1,mean=0,sd=sqrt(sig1)) + betajoint[which(z==2)] = rnorm(nz2,mean=0,sd=sqrt(sig2)) + + betamarginal = rep(0,K); + for(k in 1:K){ + betamarginal[k] = crossprod(betajoint[unlist(TaggingSNPinx[k])], sqrt(unlist(pairwiseLD[k])))+rnorm(1,mean=0,sd=sqrt(a)) + } + + data(list=paste0(""error_iter"",iter)) + dftem = data.frame(cbind(error,SNP)); dftem0= data.frame(SNPrsID) + dfmerge = merge(dftem0, dftem, by.x=""SNPrsID"", by.y=""SNP"",sort=F) + betahat = betamarginal + as.numeric(as.character(dfmerge$error))/sqrt(n) + varbetahat = rep(1/n,K) + lambdaGC[iter] = median( (betahat/sqrt(varbetahat))^2)/qchisq(0.5,1) + } + } + return(mean(lambdaGC)) +} +","R" +"Genesis","yandorazhang/GENESIS","R/LDwindow1MB_cutoff0.1.R",".R","452","9","#' The dataset contains 1070777 rows and 6 columns. Each row represent a SNP. It contains the LD score which is calcluated based on 1000 Genome reference data set with LD-square threshold 0.1 and window size 1MB. +#' +#' @format A dataframe named ""dataLD"" with 1070777 rows and 6 columns (""SNPname"" , ""Nstar"", ""TaggingSNPs"",""LD.score.correct"", ""CHR"" ,""BP""). +#' @source \url{} +#' @name LDwindow1MB_cutoff0.1 +#' @usage data(""LDwindow1MB_cutoff0.1"") +NULL + +","R" +"Genesis","yandorazhang/GENESIS","R/dmixssnp.R",".R","800","26","#' Illustration of dmixssnp() +#' +#' This function allows to get the (mixture) normal probability density function for the effect-size of susceptibility SNP (sSNP) based on the fitted 2- or 3-component model. +#' @param x value of quantile. +#' @param est parameter estimates by fitting either 2-component model, i.e., (pic, sigmasq, a); or 3-component model, i.e., (pic, p1, sigmasq1, sigmasq2, a). +#' @keywords +#' @export +#' @examples dmixssnp(x,est) + +dmixssnp <- function(x,est){ + + if(length(est)==5){ + pic = est[1] + p0 = est[2] + s1 = sqrt(est[3]) + s2 = sqrt(est[4]) + den <- function(x){return((p0 * dnorm(x/s1)/s1 + (1-p0)*dnorm(x/s2) /s2))} + } + + if(length(est)==3){ + pic = est[1] + s1 = sqrt(est[2]) + den <- function(x){return(dnorm(x/s1)/s1)} + } + return(den(x)) +}","R" +"Genesis","yandorazhang/GENESIS","R/w_hm3.noMHC.snplist.R",".R","343","9","#' The dataset contains SNP list of Hapmap3 SNPs after extracting SNPs within MHC region. +#' The dataset contains 1215001 rows and 3 columns. Each row represent a SNP. +#' +#' @format A matrix with 1215001 rows and 3 columns (SNP, A1, A2) +#' @references +#' @source \url{} +#' @name w_hm3.noMHC.snplist +#' @usage data(""w_hm3.noMHC.snplist"") +NULL","R" +"Genesis","yandorazhang/GENESIS","R/LDwindow1MB_cutoff0.2.R",".R","452","9","#' The dataset contains 1070777 rows and 6 columns. Each row represent a SNP. It contains the LD score which is calcluated based on 1000 Genome reference data set with LD-square threshold 0.2 and window size 1MB. +#' +#' @format A dataframe named ""dataLD"" with 1070777 rows and 6 columns (""SNPname"" , ""Nstar"", ""TaggingSNPs"",""LD.score.correct"", ""CHR"" ,""BP""). +#' @source \url{} +#' @name LDwindow1MB_cutoff0.2 +#' @usage data(""LDwindow1MB_cutoff0.2"") +NULL + +","R" +"Genesis","yandorazhang/GENESIS","R/LDwindow0.5MB_cutoff0.05.R",".R","461","9","#' The dataset contains 1070777 rows and 6 columns. Each row represent a SNP. It contains the LD score which is calcluated based on 1000 Genome reference data set with LD-square threshold 0.05 and window size 0.5MB. +#' +#' @format A dataframe named ""dataLD"" with 1070777 rows and 6 columns (""SNPname"" , ""Nstar"", ""TaggingSNPs"",""LD.score.correct"", ""CHR"" ,""BP""). +#' @source \url{} +#' @name LDwindow0.5MB_cutoff0.05 +#' @usage data(""LDwindow0.5MB_cutoff0.05"") +NULL + +","R" +"Genesis","yandorazhang/GENESIS","R/genesis.R",".R","24686","422","#' Illustration of genesis() +#' +#' This function allows to get parameter estimates from fitting the mixture model. +#' @param summarydata summay-level GWAS data, containing 3 columns: +#' SNP (SNP rsID), +#' Z (GWAS test z-statistic), +#' N (GWAS study sample size which can be different for different SNPs) +#' @param filter logical; if TRUE, the input summary data will be filtered. +#' @param modelcomponents 2 or 3, indicating fitting 2-component or 3-component model. +#' @param cores number of CPU threads in parallel computing. +#' @param LDcutoff a number from (0.05, 0.1, 0.2); indicating LD score is calculated based on the particular r^2 cutoff. By default, it is 0.1. +#' @param LDwindow a number from (0.5, 1, 2); indicating LD score is calculated based on the particular window size (MB). By default, it is 1 MB. +#' @param M total number of SNPs in the reference panel; by default, it is the total number of common SNPs in Hapmap3 reference panel, i.e., 1070777. +#' @param c0 an assumed maximum number of underlying susceptibility SNPs tagged by any individual GWAS marker. By default, c0 is set at 10. +#' @param BIC.gamma a tuning parameter in calculating BIC with a range of (0,1). By default, BIC.gamma is set at 0.5. +#' @param print logical; if TRUE, the EM algorithm iteration details will be output. +#' @param printfreq a number indicating every ""printfreq"" iterations of EM algorithms, results will be output. +#' @param starting the starting values for the model. For 2-component model, the starting value of (pic, sigmasq, a); for 3-component model, (pic, p1, sigmasq1, sigmasq2, a). +#' @param staringpic the starting value for pic when staring==NA. +#' @param tolerance the accuracy of the tolerance. For 2-component model, it is a 6-dim vector with tolerance value for (pic,sigmasq,a,llk,maxEM,steps). For 3-component model, it is a 8-dim vector with tolerance value for (pic,p1,sigmasq1,sigmasq2,a,llk,maxEM,steps). +#' @param qqplot logical; if TRUE, the QQ plot will be ploted into a pdf file. +#' @param qqplotCI.coverage the coverage rate of confidence band in the QQ plot. By default, it is 0.8. +#' @param qqplot.name the name of the QQ plot pdf files. +#' @param qqplot.axis numeric; the x- and y-axis limits is set from 0 to ""qqplot.axis"" in the QQ plot. By default, it is 10. +#' @param qqplot.nsim the total number of simulations to generate the expected p-values to get the QQ plot. By default, it is 100. +#' @param summaryGWASdata.save logical; if TRUE, the filtered summary GWAS data after merged with the LD information will be saved as a dataframe. +#' @param qqplotdata.save logical; if TRUE, the simulated data to generate the QQ plot will be saved. +#' @param herit.liability logical; if TRUE, the heritability in log-odds-ratio scale will be transferred to liability scale. +#' @param sample.prevalence sample prevalence for the disease trait. +#' @param population.prevalence population prevalence for the disease trait. +#' @param stratification logical; if TRUE, the population stratification effect is considered. +#' @param seeds numeric; random seeds used in simulation; by default, it is 123. +#' @keywords +#' @export +#' +#' @examples +#' genesis(summarydata, filter=FALSE,modelcomponents=2, cores=10, LDcutoff=0.1, LDwindow=1, M=1070777,c0=10, BIC.gamma=0.5,print=TRUE, printfreq=10, starting=NA, startingpic=NA, tolerance=NA,qqplot=TRUE, qqplotCI.coverage=0.8, qqplot.name="""", qqplot.axis=10, qqplot.nsim=100,summaryGWASdata.save=FALSE, qqplotdata.save=FALSE,herit.liability=FALSE, sample.prevalence=NA, population.prevalence=NA,stratification=TRUE, seeds=123) + +genesis <- function(summarydata, filter=FALSE, + modelcomponents=2, cores=10, LDcutoff=0.1, LDwindow=1, M=1070777, + c0=10, BIC.gamma=0.5, + print=TRUE, printfreq=10, starting=NA, startingpic=NA, tolerance=NA, + qqplot=TRUE, qqplotCI.coverage=0.8, qqplot.name="""", qqplot.axis=10, qqplot.nsim=100, + summaryGWASdata.save=FALSE, qqplotdata.save=FALSE, + herit.liability=FALSE, sample.prevalence=NA, population.prevalence=NA, + stratification=TRUE, seeds=123){ + + #----------------------------------------------------#---------------------------------------------------- + # I. Check input variables + #----------------------------------------------------#---------------------------------------------------- + # summary GWAS data format check + if(ncol(summarydata)!=3){ + stop(""The summary GWAS data should have 3 columns with (SNP rsID, Z-statistic, Sample size)!"") + } + + if(any(!is.na(starting))){ + l = length(starting) + if(modelcomponents==2){if(l!=3) stop(""2-component model: the input starting values should be a 3-dim vector with value (pic,sigmasq,a)!"")} + if(modelcomponents==3){if(l!=5) stop(""3-component model: the input starting values should be a 5-dim vector with value (pic,p1,sigmasq1,sigmasq2,a)!"")} + } + + #---------------------------------------------------- + # tolerance check + if(all(is.na(tolerance))){ + tolerance_pic=1e-6; + tolerance_p1=1e-5; + tolerance_sigsq1=1e-8; + tolerance_sigsq2=1e-8; + tolerance_a=1e-9; + tolerance_llk=1e-6; + maxEM=3e3; + steps=2; + } + + if(any(!is.na(tolerance))){ + l = length(tolerance) + if(modelcomponents==2){ + if(l!=6) stop(""2-component model: the tolerance values should be a 6-dim vector with tolerance value for (pic,sigmasq,a,llk,maxEM,steps)!"") + if(l==6) {tolerance_pic=tolerance[1]; tolerance_sigsq1=tolerance[2]; tolerance_a=tolerance[3]; tolerance_llk=tolerance[4]; maxEM=tolerance[5]; steps=tolerance[6];} + } + if(modelcomponents==3){ + if(l!=8) stop(""3-component model: the tolerance values should be a 8-dim vector with tolerance value for (pic,p1,sigmasq1,sigmasq2,a,llk,maxEM,steps)!"") + if(l==8) {tolerance_pic=tolerance[1]; tolerance_p1=tolerance[2]; tolerance_sigsq1=tolerance[3]; tolerance_sigsq2=tolerance[4]; tolerance_a=tolerance[5]; tolerance_llk=tolerance[6]; maxEM=tolerance[7]; steps=tolerance[8];} + } + } + + #---------------------------------------------------- + # load the required R package + library(doParallel) + library(foreach) + cl <- makeCluster(cores) + registerDoParallel(cl) + + #----------------------------------------------------#---------------------------------------------------- + # II. preliminary summary GWAS data processing, then merge the summary GWAS data with the LD score data, + # and then extract the variables needed for analysis. + #----------------------------------------------------#---------------------------------------------------- + df <- preprocessing(summarydata, LDcutoff,LDwindow,filter) + + betahat <- as.numeric(as.character(df$betahat)) + varbetahat <- as.numeric(as.character(df$varbetahat)) + ldscore <- as.numeric(as.character(df$LD.score.correct)) + Nstar <- as.numeric(as.character(df$Nstar)) + SNP <- df$SNP + TaggingSNPs <- df$TaggingSNPs + K <- length(betahat) + n <- as.numeric(as.character(df$N)) + N_SNPs_summary <- length(betahat) + + #----------------------------------------------------#---------------------------------------------------- + # III. starting value + #----------------------------------------------------#---------------------------------------------------- + if(all(is.na(starting))){ + #---------------------------------------------------- + # LD score regression to get the starting values. + chisq <- (betahat/sqrt(varbetahat))^2 + tem <- ldscore*mean(1/varbetahat)/M + ld.fit <- lm(chisq ~ tem) + + if(modelcomponents==2){ + starting <- rep(0,3) + if(is.na(startingpic)){starting[1] <- 0.01} + if(!is.na(startingpic)){starting[1] <- startingpic} + starting[2] <- ld.fit$coefficients[2]/(M*starting[1]) + starting[3] <- (ld.fit$coefficients[1]-1)/M + if(starting[2]<0){starting[2] <- 1e-5; starting[3] <- 2e-6} + + llk = loglikelihood(starting, betahat,varbetahat, ldscore,c0,Nstar,cores) + if(is.na(llk)){ + if(is.na(startingpic)){starting[1] <- 5e-3} + if(!is.na(startingpic)){starting[1] <- startingpic} + starting[2] <- ld.fit$coefficients[2]/(M*starting[1]) + starting[3] <- (ld.fit$coefficients[1]-1)/M + if(starting[2]<0){starting[2] <- 1e-5; starting[3] <- 2e-6} + } + } + + if(modelcomponents==3){ + #---------------------------------------------------- + # run 2-component model to get a rough starting value + starting <- rep(0,3) + if(is.na(startingpic)){starting[1] <- 0.01} + if(!is.na(startingpic)){starting[1] <- startingpic} + starting[2] <- ld.fit$coefficients[2]/(M*starting[1]) + starting[3] <- (ld.fit$coefficients[1]-1)/M + if(starting[2]<0){starting[2] <- 1e-5; starting[3] <- 2e-6} + llk = loglikelihood(starting, betahat,varbetahat, ldscore,c0,Nstar,cores) + if(is.na(llk)){ + if(is.na(startingpic)){starting[1] <- 5e-3} + if(!is.na(startingpic)){starting[1] <- startingpic} + starting[2] <- ld.fit$coefficients[2]/(M*starting[1]) + starting[3] <- (ld.fit$coefficients[1]-1)/M + if(starting[2]<0){starting[2] <- 1e-5; starting[3] <- 2e-6} + } + + fit <- EM_func(starting, betahat,varbetahat,ldscore,Nstar,M, c0, max(tolerance_pic, 1e-6), max(tolerance_sigsq1,1e-7),max(tolerance_a,1e-7),max(tolerance_llk,1e-5),maxEM,steps,cores,print=F,printfreq,stratification) + est <- fit[3:5]; + + starting <- rep(0,5) + starting[1] <- est[1] + starting[2] <- 1.0/9 + starting[3] <- est[2]*5 + starting[4] <- starting[3]/10 + starting[5] <- est[3] + } + } + zero.omit <- function(v){v[which(v!=0)]} + + #----------------------------------------------------#---------------------------------------------------- + # IV. Data analysis, 2-component model. + #----------------------------------------------------#---------------------------------------------------- + if(modelcomponents == 2){ + + time0 <- proc.time()[3] + fit <- EM_func(starting, betahat,varbetahat,ldscore,Nstar,M, c0, tolerance_pic, tolerance_sigsq1,tolerance_a,tolerance_llk,maxEM,steps,cores,print,printfreq,stratification) + est <- fit[3:5]; c0 = fit[7] + runhour <- (proc.time()[3] - time0)/3600 + + causalnum <- est[1]*M + heritability <- est[1]*est[2]*M + + #---------------------------------------------------- + # calculate variance + m_S <- SS(est,betahat, varbetahat, ldscore, c0, Nstar, cores); # score matrix K*3 + m_I <- I(est,betahat, varbetahat, ldscore, c0, Nstar, cores); # information matrix 3*3 + + #---------------------------------------------------- + # get sum of scores in each neighbor of SNP, K*3 matrix + m_Sbar <- matrix(0,K,length(est)); + inx_name <- apply(matrix(SNP,ncol=1), 1, function(t) as.numeric(strsplit(t, ""rs"")[[1]][2])) + dictionary <- modification_loc(inx_name,K,max(inx_name)) + tem <- lapply(TaggingSNPs, function(t) {inx <- zero.omit(dictionary[as.vector(na.omit(as.numeric(unlist(strsplit(strsplit(t, "","")[[1]], ""rs"")))))]); colSums(matrix(m_S[inx,],ncol=length(est)))}) + m_Sbar = matrix(unlist(tem),ncol=ncol(m_S),byrow=T) + m_S + + #---------------------------------------------------- + inv_I <- solve(m_I,tol=1e-20); + J <- (t(m_S)%*%m_Sbar); + var_est <- inv_I %*% J %*% inv_I; # variance matrix of parameter est + sd_est <- sqrt(diag(var_est)); # standard error for each parameter estimate + + #---------------------------------------------------- + # calculate AIC, BIC - d_s + #---------------------------------------------------- + llk = fit[2] + ds = sum(diag(inv_I %*% J)) + aic = -2*llk + 2*ds + bic = -2*llk + ds*log(mean(n)) + 2*BIC.gamma*log(5^ds) + + temtem <- matrix(c(M*est[2], M*est[1], 0), ncol=1) + sd_heritability <- sqrt( t(temtem) %*% var_est %*% temtem) # standard error of heritability + sd_causalnum <- M*sd_est[1] + + risk <- sqrt(exp(heritability)) + sd_risk <- sqrt(risk*sd_heritability^2/2) + + if(herit.liability==F | (herit.liability==T & (is.na(population.prevalence) | is.na(sample.prevalence)))){ + if(herit.liability==T) print(""No population prevalence or sample prevalence input, the heritability will be output in log-odds-ratio scale!"") + + estimates <- list(""Number of sSNPs (sd)""=paste0(format(causalnum,digits=3),"" ("",format(sd_causalnum,digits=4), "")""), + ""Total heritability in log-odds-ratio scale (sd)""=paste0(format(heritability,digits=3),"" ("",format(sd_heritability,digits=4), "")""), + ""Sibling risk (sd)""=paste0(format(risk,digits=3),"" ("",format(sd_risk,digits=4), "")""), + ""Parameter (pic, sigmasq, a) estimates"" = est, + ""S.D. of parameter estimates""=sd_est, + ""Covariance matrix of parameter estimates""=var_est, + ""Composite log-likelihood of fitted model"" = fit[2], + ""Model selection related"" = list( + ""BIC"" = bic, + ""ds"" = ds, + ""Information matrix"" = m_I, + ""J""=J + ), + ""c0"" = c0, + ""Total number of SNPs in the GWAS study after quality control""=N_SNPs_summary, + ""Total time in fitting the 2-component model (in hours)"" = runhour) + } + + + if(herit.liability==T & (!is.na(population.prevalence) & (!is.na(sample.prevalence)))){ + tem <- h2transfer(heritability,sd_heritability, population.prevalence,sample.prevalence) + + estimates <- list(""Number of sSNPs (sd)""=paste0(format(causalnum,digits=3),"" ("",format(sd_causalnum,digits=4), "")""), + ""Total heritability in log-odds-ratio scale (sd)""=paste0(format(heritability,digits=3),"" ("",format(sd_heritability,digits=4), "")""), + ""Total heritability in observed scale (sd)""=paste0(format(tem$Hobserved,digits=3),"" ("",format(tem$se_Hobserved,digits=4), "")""), + ""Total heritability in liability scale (sd)""=paste0(format(tem$Hliability,digits=3),"" ("",format(tem$se_Hliability,digits=4), "")""), + ""Sibling risk (sd)""=paste0(format(risk,digits=3),"" ("",format(sd_risk,digits=4), "")""), + ""Parameter (pic, sigmasq, a) estimates"" = est, + ""S.D. of parameter estimates""=sd_est, + ""Covariance matrix of parameter estimates""=var_est, + ""Composite log-likelihood of fitted model"" = fit[2], + ""Model selection related"" = list( + ""BIC"" = bic, + ""ds"" = ds, + ""Information matrix"" = m_I, + ""J""=J + ), + ""c0"" = c0, + ""Total number of SNPs in the GWAS study after quality control""=N_SNPs_summary, + ""Total time in fitting the 2-component model (in hours)"" = runhour) + } + + if(qqplot==T){ + qqplotdata <- qqplotdata.simu(df, est, c0,qqplotCI.coverage, qqplot.nsim, LDcutoff, LDwindow, filter=F,cores,seeds) + + pdf(file=paste0(qqplot.name,""qq2com.pdf"")) + qqplot.plot(qqplotdata,seq_inx=1,qqplot.axis) + dev.off() + + if(qqplotdata.save==T & summaryGWASdata.save==T){result <- list(estimates=estimates,summaryGWASdata=df,qqplotdata=qqplotdata)} + if(qqplotdata.save==T & summaryGWASdata.save==F){result <- list(estimates=estimates,qqplotdata=qqplotdata)} + if(qqplotdata.save==F & summaryGWASdata.save==T){result <- list(estimates=estimates,summaryGWASdata=df)} + if(qqplotdata.save==F & summaryGWASdata.save==F){result <- list(estimates=estimates)} + } + + if(qqplot==F){ + if(summaryGWASdata.save==T){result <- list(estimates=estimates,summaryGWASdata=df)} + if(summaryGWASdata.save==F){result <- list(estimates=estimates)} + } + + } + + #----------------------------------------------------#---------------------------------------------------- + # V. Data analysis, 3-component model. + #----------------------------------------------------#---------------------------------------------------- + if(modelcomponents == 3){ + #---------------------------------------------------- + # run 3-component model. + #---------------------------------------------------- + time0 <- proc.time()[3] + fit <- EM_func3(starting,lower_pi=c(1e-7,1e-7),upper_pi=c(0.5,0.5),betahat,varbetahat,ldscore,Nstar,M,c0,tolerance_pic,tolerance_p1,tolerance_sigsq1,tolerance_sigsq2,tolerance_a,tolerance_llk,maxEM,steps,cores,print,printfreq,stratification) + est <- fit[3:7]; c0 = fit[9] + runhour <- (proc.time()[3] - time0)/3600 + llk = fit[2] + + causalnum <- est[1]*M + largenum = M*est[1]*est[2] + heritlarge = M*est[1]*est[2]*est[3] + heritsmall = M*est[1]*(1-est[2])*est[4] + heritability <- est[1]*(est[2]*est[3] + (1-est[2])*est[4])*M + + #---------------------------------------------------- + # calculate variance + #---------------------------------------------------- + m_S <- SS3(est,betahat, varbetahat, ldscore, c0, Nstar, cores); # score matrix K*3 + m_I <- I3(est,betahat, varbetahat, ldscore, c0, Nstar, cores); # information matrix 3*3 + + #---------------------------------------------------- + # get sum of scores in each neighbor of SNP, K*3 matrix + m_Sbar <- matrix(0,K,length(est)); + inx_name <- apply(matrix(SNP,ncol=1), 1, function(t) as.numeric(strsplit(t, ""rs"")[[1]][2])) + dictionary <- modification_loc(inx_name,K,max(inx_name)) + tem <- lapply(TaggingSNPs, function(t) {inx <- zero.omit(dictionary[as.vector(na.omit(as.numeric(unlist(strsplit(strsplit(t, "","")[[1]], ""rs"")))))]); colSums(matrix(m_S[inx,],ncol=length(est)))}) + m_Sbar = matrix(unlist(tem),ncol=ncol(m_S),byrow=T) + m_S + + #---------------------------------------------------- + inv_I <- solve(m_I,tol=1e-20); + J <- (t(m_S)%*%m_Sbar); + var_est <- inv_I %*% J %*% inv_I; # variance matrix of parameter est + sd_est <- sqrt(diag(var_est)); # standard error for each parameter estimate + + #---------------------------------------------------- + # calculate AIC, BIC - d_s + #---------------------------------------------------- + ds = sum(diag(inv_I %*% J)) + aic = -2*llk + 2*ds + bic = -2*llk + ds*log(mean(n)) + 2*BIC.gamma*log(5^ds) + + temtem = matrix(c(M*(est[2]*est[3] + (1-est[2])*est[4]), + est[1]*(est[3] - est[4])*M, + est[1]*(est[2])*M, + est[1]*(1-est[2])*M, + 0), ncol=1) + sd_heritability = sqrt( t(temtem) %*% var_est %*% temtem) # standard error of heritability + + sd_causalnum = M*sd_est[1] + tem_largenum = matrix(c(M*est[2], M*est[1], 0, 0, 0), ncol=1) + sd_largenum = sqrt( t(tem_largenum) %*% var_est %*% tem_largenum) + + tem_heritlarge = matrix(c(M*est[2]*est[3], M*est[1]*est[3], M*est[1]*est[2], 0, 0),ncol=1) + sd_heritlarge = sqrt( t(tem_heritlarge) %*% var_est %*% tem_heritlarge) + + tem_heritsmall = matrix(c(M*(1-est[2])*est[4],-M*est[1]*est[4], 0, M*est[1]*(1-est[2]), 0),ncol=1) + sd_heritsmall = sqrt( t(tem_heritsmall) %*% var_est %*% tem_heritsmall) + + + risk <- sqrt(exp(heritability)) + sd_risk <- sqrt(risk*sd_heritability^2/2) + + + if(herit.liability==F | (herit.liability==T & (is.na(population.prevalence) | is.na(sample.prevalence)))){ + if(herit.liability==T) print(""No population prevalence or sample prevalence input, the heritability will be output in log-odds-ratio scale!"") + + estimates <- list(""Number of sSNPs (sd)""=paste0(format(causalnum,digits=3),"" ("",format(sd_causalnum,digits=4), "")""), + ""Number of sSNPs in the cluster with larger variance component (sd)""=paste0(format(largenum,digits=3),"" ("",format(sd_largenum,digits=4), "")""), + ""Total heritability in log-odds-ratio scale (sd)""=paste0(format(heritability,digits=3),"" ("",format(sd_heritability,digits=4), "")""), + ""Sibling risk (sd)""=paste0(format(risk,digits=3),"" ("",format(sd_risk,digits=4), "")""), + ""Heritability explained by the cluster with larger variance component (sd)""=paste0(format(heritlarge,digits=3),"" ("",format(sd_heritlarge,digits=4), "")""), + ""Heritability explained by the cluster with samller variance component""=paste0(format(heritsmall,digits=3),"" ("",format(sd_heritsmall,digits=4), "")""), + ""Parameter (pic, p1, sigmasq1, sigmasq2, a) estimates"" = est, + ""S.D. of parameter estimates""=sd_est, + ""Covariance matrix of parameter estimates"" = var_est, + ""Composite log-likelihood of fitted model"" = fit[2], + ""Model selection related"" = list( + ""BIC"" = bic, + ""ds"" = ds, + ""Information matrix"" = m_I, + ""J""=J + ), + ""c0"" = c0, + ""Total number of SNPs in the GWAS study after quality control""=N_SNPs_summary, + ""Total time in fitting the 3-component model (in hours)"" = runhour) + + } + + + if(herit.liability==T & (!is.na(population.prevalence) & (!is.na(sample.prevalence)))){ + + tem <- h2transfer(heritability,sd_heritability, population.prevalence,sample.prevalence) + + estimates <- list(""Number of sSNPs (sd)""=paste0(format(causalnum,digits=3),"" ("",format(sd_causalnum,digits=4), "")""), + ""Number of sSNPs in the cluster with larger variance component (sd)""=paste0(format(largenum,digits=3),"" ("",format(sd_largenum,digits=4), "")""), + ""Total heritability in log-odds-ratio scale (sd)""=paste0(format(heritability,digits=3),"" ("",format(sd_heritability,digits=4), "")""), + ""Total heritability in observed scale (sd)""=paste0(format(tem$Hobserved,digits=3),"" ("",format(tem$se_Hobserved,digits=4), "")""), + ""Total heritability in liability scale (sd)""=paste0(format(tem$Hliability,digits=3),"" ("",format(tem$se_Hliability,digits=4), "")""), + ""Sibling risk (sd)""=paste0(format(risk,digits=3),"" ("",format(sd_risk,digits=4), "")""), + ""Heritability explained by the cluster with larger variance component (sd)""=paste0(format(heritlarge,digits=3),"" ("",format(sd_heritlarge,digits=4), "")""), + ""Heritability explained by the cluster with samller variance component""=paste0(format(heritsmall,digits=3),"" ("",format(sd_heritsmall,digits=4), "")""), + ""Parameter (pic, p1, sigmasq1, sigmasq2, a) estimates"" = est, + ""S.D. of parameter estimates""=sd_est, + ""Covariance matrix of parameter estimates"" = var_est, + ""Composite log-likelihood of fitted model"" = fit[2], + ""Model selection related"" = list( + ""BIC"" = bic, + ""ds"" = ds, + ""Information matrix"" = m_I, + ""J""=J + ), + ""c0"" = c0, + ""Total number of SNPs in the GWAS study after quality control""=N_SNPs_summary, + ""Total time in fitting the 3-component model (in hours)"" = runhour) + } + + if(qqplot==T){ + qqplotdata <- qqplotdata.simu(df, est, c0,qqplotCI.coverage, qqplot.nsim, LDcutoff, LDwindow, filter=F,cores,seeds) + + pdf(file=paste0(qqplot.name,""qq3com.pdf"")) + qqplot.plot(qqplotdata,seq_inx=1,qqplot.axis) + dev.off() + + if(qqplotdata.save==T & summaryGWASdata.save==T){result <- list(estimates=estimates,summaryGWASdata=df,qqplotdata=qqplotdata)} + if(qqplotdata.save==T & summaryGWASdata.save==F){result <- list(estimates=estimates,qqplotdata=qqplotdata)} + if(qqplotdata.save==F & summaryGWASdata.save==T){result <- list(estimates=estimates,summaryGWASdata=df)} + if(qqplotdata.save==F & summaryGWASdata.save==F){result <- list(estimates=estimates)} + } + + if(qqplot==F){ + if(summaryGWASdata.save==T){result <- list(estimates=estimates,summaryGWASdata=df)} + if(summaryGWASdata.save==F){result <- list(estimates=estimates)} + } + } + + return(result) +}","R" +"Genesis","yandorazhang/GENESIS","R/LDwindow2MB_cutoff0.2.R",".R","452","9","#' The dataset contains 1070777 rows and 6 columns. Each row represent a SNP. It contains the LD score which is calcluated based on 1000 Genome reference data set with LD-square threshold 0.2 and window size 2MB. +#' +#' @format A dataframe named ""dataLD"" with 1070777 rows and 6 columns (""SNPname"" , ""Nstar"", ""TaggingSNPs"",""LD.score.correct"", ""CHR"" ,""BP""). +#' @source \url{} +#' @name LDwindow2MB_cutoff0.2 +#' @usage data(""LDwindow2MB_cutoff0.2"") +NULL + +","R" +"Genesis","yandorazhang/GENESIS","R/LDwindow0.5MB_cutoff0.1.R",".R","458","9","#' The dataset contains 1070777 rows and 6 columns. Each row represent a SNP. It contains the LD score which is calcluated based on 1000 Genome reference data set with LD-square threshold 0.1 and window size 0.5MB. +#' +#' @format A dataframe named ""dataLD"" with 1070777 rows and 6 columns (""SNPname"" , ""Nstar"", ""TaggingSNPs"",""LD.score.correct"", ""CHR"" ,""BP""). +#' @source \url{} +#' @name LDwindow0.5MB_cutoff0.1 +#' @usage data(""LDwindow0.5MB_cutoff0.1"") +NULL + +","R" +"Genesis","yandorazhang/GENESIS","R/LDwindow0.5MB_cutoff0.2.R",".R","458","9","#' The dataset contains 1070777 rows and 6 columns. Each row represent a SNP. It contains the LD score which is calcluated based on 1000 Genome reference data set with LD-square threshold 0.2 and window size 0.5MB. +#' +#' @format A dataframe named ""dataLD"" with 1070777 rows and 6 columns (""SNPname"" , ""Nstar"", ""TaggingSNPs"",""LD.score.correct"", ""CHR"" ,""BP""). +#' @source \url{} +#' @name LDwindow0.5MB_cutoff0.2 +#' @usage data(""LDwindow0.5MB_cutoff0.2"") +NULL + +","R" +"Genesis","yandorazhang/GENESIS","R/qqplot.plot.R",".R","1528","33","#' Illustration of qqplot.plot() +#' +#' This function generate the qqplot by plotting expected p-values under the fitted model through simulations against the observed p-values. +#' @param qqplotdata The QQ plot object got from qqplotdata.simu() function. +#' @param seq_inx numeric; QQdata will be thinned every seq_inx. +#' @param qqplot.axis numeric; the x- and y-axis limits is set from 0 to ""qqplot.axis"" in the QQ plot. By default, it is 10. +#' @keywords +#' @export +#' @examples qqplot.plot(qqplotdata,seq_inx=1,qqplot.axis=10) + +qqplot.plot <- function(qqplotdata,seq_inx=1,qqplot.axis=10){ + + QQdata = data.frame(qqplotdata$QQdata) + obs_lambda = qqplotdata$observedlambda + m.lambda = qqplotdata$meanEXPlambda + l.lambda = qqplotdata$lowEXPlambda + h.lambda = qqplotdata$highEXPlambda + + inx <- seq(1,nrow(QQdata),seq_inx) + QQdata <- QQdata[inx,] + + plot(QQdata$mean_log_exp_pvalues, QQdata$log_obs_pvalues, type=""l"",xlab=expression(Expected~~-log[10](italic(P)~value)), xlim=c(0,qqplot.axis),ylim=c(0,qqplot.axis),ylab=expression(Observed~~-log[10](italic(P)~value))) + polygon(c(QQdata$lower,rev(QQdata$upper)),c(QQdata$log_obs_pvalues,rev(QQdata$log_obs_pvalues)),col = ""grey75"", border = FALSE) + abline(a=0,b=1 ,col = ""gray50"",lty=2) + + mylabel = bquote(italic(lambda[obs])== .(formatC(obs_lambda,format=""f"", digits = 3))) + text(x = 7.5, y = 2.5, labels = mylabel) + + mylabel = bquote(italic(lambda[fit])== .(formatC(m.lambda,format=""f"", digits = 3))) + text(x = 7.5, y = 1.5, labels = mylabel) + +} + ","R" +"Genesis","yandorazhang/GENESIS","R/preprocessing.R",".R","3290","67","#' Illustration of preprocessing() +#' +#' This function allows to preprocess the summary-level GWAS statistics. +#' @param summarydata summay-level GWAS data, containing 3 columns: +#' SNP (SNP rsID), +#' Z (GWAS test z-statistic), +#' N (GWAS study sample size which can be different for different SNPs) +#' @param LDcutoff a number from (0.05, 0.1, 0.2); indicating LD score is calculated based on the particular r^2 cutoff. By default, it is 0.1. +#' @param LDwindow a number from (0.5, 1, 2); indicating LD score is calculated based on the particular window size (MB). By default, it is 1 MB. +#' @param filter logical; if TRUE, the input summary data will be filtered. +#' @keywords +#' @export +#' @examples preprocessing(summarydata, LDcutoff=0.1,LDwindow=1,filter=FALSE) + +preprocessing <- function(summarydata, LDcutoff=0.1,LDwindow=1,filter=FALSE){ + #----------------------------------------------------#---------------------------------------------------- + # I. summary GWAS data format check + #----------------------------------------------------#---------------------------------------------------- + if(ncol(summarydata)!=3){ + stop(""The summary GWAS data should have 3 columns with (SNP rsID, Z-statistic, Sample size)!"") + } + + #----------------------------------------------------#---------------------------------------------------- + # II. preliminary summary GWAS data filtering + #----------------------------------------------------#---------------------------------------------------- + colnames(summarydata) <- c(""SNP"",""Z"",""N"") + + if(filter==TRUE){ + #a. If sample size varies from SNP to SNP, remove SNPs with an effective sample size less than 0.67 times the 90th percentile of sample size. + ikeep1 <- which(as.numeric(as.character(summarydata$N))>=0.67*quantile(as.numeric(as.character(summarydata$N)), 0.9)) + summarydata <- summarydata[ikeep1,] + + #b. Remove SNPs within the major histocompatibility complex (MHR) region; filter SNPs to Hapmap3 SNPs. + data(w_hm3.noMHC.snplist) + ikeep2 <- which(as.character(summarydata$SNP) %in% w_hm3.noMHC.snplist$SNP) + summarydata <- summarydata[ikeep2,] + + #c. Remove SNPs with extremely large effect sizes (chi^2 > 80). + ikeep3 <- which(as.numeric(as.character(summarydata$Z))^2 <=80) + summarydata <- summarydata[ikeep3,] + } + + #----------------------------------------------------#---------------------------------------------------- + # III. merge the summary GWAS data with the LD score data + #----------------------------------------------------#---------------------------------------------------- + data(list=paste0(""LDwindow"",LDwindow,""MB_cutoff"",LDcutoff)) + + summarydata$SNP <- as.character(summarydata$SNP) + summarydata$Z <- as.numeric(as.character(summarydata$Z)) + summarydata$N <- as.numeric(as.character(summarydata$N)) + + # remove NA values. + inx <- which(is.na(summarydata$Z)) + if(length(inx)>1) summarydata <- summarydata[-inx,] + inx <- which(is.na(summarydata$N)) + if(length(inx)>1) summarydata <- summarydata[-inx,] + + # get the variable needed for our method. + summarydata$betahat <- summarydata$Z/sqrt(summarydata$N) + summarydata$varbetahat <- 1/summarydata$N + df <- merge(summarydata, dataLD,by.x=""SNP"",by.y=""SNPname"",sort=F) + + return(df) +} + + +","R" +"Genesis","yandorazhang/GENESIS","R/RcppExports.R",".R","5783","128","# Generated by using Rcpp::compileAttributes() -> do not edit by hand +# Generator token: 10BE3573-1514-4C36-9D1C-5A225CD40393 + +modification_loc <- function(inx_name, K, mx_k) { + .Call('_GENESIS_modification_loc', PACKAGE = 'GENESIS', inx_name, K, mx_k) +} + +cpnorm <- function(x) { + .Call('_GENESIS_cpnorm', PACKAGE = 'GENESIS', x) +} + +sumfactorial <- function(n) { + .Call('_GENESIS_sumfactorial', PACKAGE = 'GENESIS', n) +} + +sumfactorial_rev <- function(n, k0) { + .Call('_GENESIS_sumfactorial_rev', PACKAGE = 'GENESIS', n, k0) +} + +cdnorm <- function(x, mean, sd, loglog) { + .Call('_GENESIS_cdnorm', PACKAGE = 'GENESIS', x, mean, sd, loglog) +} + +cdbinom <- function(k, size, prob, loglog) { + .Call('_GENESIS_cdbinom', PACKAGE = 'GENESIS', k, size, prob, loglog) +} + +cdmultinom3 <- function(k0, k1, k2, prob, loglog) { + .Call('_GENESIS_cdmultinom3', PACKAGE = 'GENESIS', k0, k1, k2, prob, loglog) +} + +loglikelihood <- function(par, betahat, varbetahat, ldscore, c0, Nstar, num_threads) { + .Call('_GENESIS_loglikelihood', PACKAGE = 'GENESIS', par, betahat, varbetahat, ldscore, c0, Nstar, num_threads) +} + +weight <- function(par, betahat, varbetahat, ldscore, c0, Nstar, num_threads) { + .Call('_GENESIS_weight', PACKAGE = 'GENESIS', par, betahat, varbetahat, ldscore, c0, Nstar, num_threads) +} + +weight_loglikelihood <- function(par, betahat, varbetahat, ldscore, c0, Nstar, num_threads) { + .Call('_GENESIS_weight_loglikelihood', PACKAGE = 'GENESIS', par, betahat, varbetahat, ldscore, c0, Nstar, num_threads) +} + +update_pi1 <- function(w, Nstar, num_threads) { + .Call('_GENESIS_update_pi1', PACKAGE = 'GENESIS', w, Nstar, num_threads) +} + +onestep_varcomponent <- function(varcomponent, w, betahat, varbetahat, ldscore, Nstar, num_threads) { + .Call('_GENESIS_onestep_varcomponent', PACKAGE = 'GENESIS', varcomponent, w, betahat, varbetahat, ldscore, Nstar, num_threads) +} + +EM_func <- function(par_start, betahat, varbetahat, ldscore, Nstar, M, c0, eps1, eps2, eps3, eps, Meps, steps, num_threads, print, printfreq, stratification) { + .Call('_GENESIS_EM_func', PACKAGE = 'GENESIS', par_start, betahat, varbetahat, ldscore, Nstar, M, c0, eps1, eps2, eps3, eps, Meps, steps, num_threads, print, printfreq, stratification) +} + +Sk <- function(par, betahatk, varbetahatk, ldscorek, c0, Nstark) { + .Call('_GENESIS_Sk', PACKAGE = 'GENESIS', par, betahatk, varbetahatk, ldscorek, c0, Nstark) +} + +Ik <- function(par, betahatk, varbetahatk, ldscorek, c0, Nstark) { + .Call('_GENESIS_Ik', PACKAGE = 'GENESIS', par, betahatk, varbetahatk, ldscorek, c0, Nstark) +} + +S <- function(par, betahat, varbetahat, ldscore, c0, Nstar, num_threads) { + .Call('_GENESIS_S', PACKAGE = 'GENESIS', par, betahat, varbetahat, ldscore, c0, Nstar, num_threads) +} + +SS <- function(par, betahat, varbetahat, ldscore, c0, Nstar, num_threads) { + .Call('_GENESIS_SS', PACKAGE = 'GENESIS', par, betahat, varbetahat, ldscore, c0, Nstar, num_threads) +} + +I <- function(par, betahat, varbetahat, ldscore, c0, Nstar, num_threads) { + .Call('_GENESIS_I', PACKAGE = 'GENESIS', par, betahat, varbetahat, ldscore, c0, Nstar, num_threads) +} + +mixture_components_marginal <- function(par, ldscore, c0, Nstar, num_threads) { + .Call('_GENESIS_mixture_components_marginal', PACKAGE = 'GENESIS', par, ldscore, c0, Nstar, num_threads) +} + +loglikelihood3 <- function(par, betahat, varbetahat, ldscore, c0, Nstar, num_threads) { + .Call('_GENESIS_loglikelihood3', PACKAGE = 'GENESIS', par, betahat, varbetahat, ldscore, c0, Nstar, num_threads) +} + +weight3 <- function(par, betahat, varbetahat, ldscore, c0, Nstar, num_threads) { + .Call('_GENESIS_weight3', PACKAGE = 'GENESIS', par, betahat, varbetahat, ldscore, c0, Nstar, num_threads) +} + +weight_loglikelihood3 <- function(par, betahat, varbetahat, ldscore, c0, Nstar, num_threads) { + .Call('_GENESIS_weight_loglikelihood3', PACKAGE = 'GENESIS', par, betahat, varbetahat, ldscore, c0, Nstar, num_threads) +} + +update_p3 <- function(w, Nstar, num_threads) { + .Call('_GENESIS_update_p3', PACKAGE = 'GENESIS', w, Nstar, num_threads) +} + +onestep_varcomponent3 <- function(varcomponent, w, betahat, varbetahat, ldscore, Nstar, num_threads) { + .Call('_GENESIS_onestep_varcomponent3', PACKAGE = 'GENESIS', varcomponent, w, betahat, varbetahat, ldscore, Nstar, num_threads) +} + +EM_func3 <- function(par_start, lower_pi, upper_pi, betahat, varbetahat, ldscore, Nstar, M, c0, eps1, eps2, eps3, eps4, eps5, eps, Meps, steps, num_threads, print, printfreq, stratification) { + .Call('_GENESIS_EM_func3', PACKAGE = 'GENESIS', par_start, lower_pi, upper_pi, betahat, varbetahat, ldscore, Nstar, M, c0, eps1, eps2, eps3, eps4, eps5, eps, Meps, steps, num_threads, print, printfreq, stratification) +} + +Sk3 <- function(par, betahatk, varbetahatk, ldscorek, c0, Nstark) { + .Call('_GENESIS_Sk3', PACKAGE = 'GENESIS', par, betahatk, varbetahatk, ldscorek, c0, Nstark) +} + +Ik3 <- function(par, betahatk, varbetahatk, ldscorek, c0, Nstark) { + .Call('_GENESIS_Ik3', PACKAGE = 'GENESIS', par, betahatk, varbetahatk, ldscorek, c0, Nstark) +} + +S3 <- function(par, betahat, varbetahat, ldscore, c0, Nstar) { + .Call('_GENESIS_S3', PACKAGE = 'GENESIS', par, betahat, varbetahat, ldscore, c0, Nstar) +} + +SS3 <- function(par, betahat, varbetahat, ldscore, c0, Nstar, num_threads) { + .Call('_GENESIS_SS3', PACKAGE = 'GENESIS', par, betahat, varbetahat, ldscore, c0, Nstar, num_threads) +} + +I3 <- function(par, betahat, varbetahat, ldscore, c0, Nstar, num_threads) { + .Call('_GENESIS_I3', PACKAGE = 'GENESIS', par, betahat, varbetahat, ldscore, c0, Nstar, num_threads) +} + +mixture_3components_marginal <- function(par, ldscore, c0, Nstar, num_threads) { + .Call('_GENESIS_mixture_3components_marginal', PACKAGE = 'GENESIS', par, ldscore, c0, Nstar, num_threads) +} + +","R" +"Genesis","yandorazhang/GENESIS","R/projection.R",".R","3936","97","#' Illustration of projection() +#' +#' This function allows to make future projections according to the fitted model. +#' @param est parameter estimates by fitting either 2-component model, i.e., (pic, sigmasq, a); or 3-component model, i.e., (pic, p1, sigmasq1, sigmasq2, a). +#' @param v covariance matrix of parameter estimates by fitting the 2- or 3-component model. +#' @param n specifided future GWAS study sample size. +#' @param gwas.significance genome-wide significance level, by default it is 5e-8. +#' @param tol tolerance accuracy vector for intgrate() function. +#' @param M total number of SNPs in the reference panel; by default, it is the total number of common SNPs in Hapmap3 reference panel, which is equal to 1070777. +#' @param CI whether to calculate CI or not; by default, CI=FALSE. +#' @param nsim the total number of bootstrap samplers in order to calculate CI; by default, it is 1000. +#' @param CI.coverage coverage level of confidence interval; by default, it is 0.95, i.e., 95% CI. +#' @param seeds numeric; random seeds used in simulation; by default, it is 123. +#' @keywords +#' @export +#' @examples projection(est,v=NULL,n,gwas.significance=5e-8,tol=c(1e-12,1e-15),M=1070777,CI=FALSE,nsim=1000,CI.coverage=0.95,seeds=123) + +projection <- function(est,v=NULL,n,gwas.significance=5e-8,tol=c(1e-12,1e-15),M=1070777,CI=FALSE,nsim=1000,CI.coverage=0.95,seeds=123){ + # within function + pp <- function(est,n,gwas.significance=5e-8,tol=c(1e-12,1e-15),M=1070777){ + + if(length(est)==3)components=2 + if(length(est)==5)components=3 + + if(components==2){ + pic = est[1] + sig = sqrt(est[2]) + den <- function(x){return(dnorm(x/sig)/sig )} + herit0 <- pic*M*sig^2 + } + + if(components==3){ + pic = est[1] + p1 = est[2] + s1 = sqrt(est[3]) + s2 = sqrt(est[4]) + den <- function(x){return(p1 * dnorm(x/s1)/s1 + (1-p1)*dnorm(x/s2) /s2)} + herit0 <- pic*M*(p1*est[3] + (1-p1)*est[4]) + } + + tem0 <- function(x){return(x^2*den(x))} + + c_gwsignificance = abs(qnorm(gwas.significance/2)) + pow <- function(x){return(1 - pnorm(c_gwsignificance - sqrt(n)*x) + pnorm(-c_gwsignificance - sqrt(n)*x) )} + tem <- function(x){return(pow(x)*den(x))} + Numdiscoveries = M*pic * integrate(tem, -Inf, Inf,rel.tol=tol[1], abs.tol=tol[2])[[1]] + + tem1 <- function(x){return(pow(x)*den(x)*x^2)} + GVpercentage = M*pic * integrate(tem1, -Inf, Inf,rel.tol=tol[1], abs.tol=tol[2])[[1]]*100/herit0 + pheno.variance = GVpercentage*herit0/100 + + return(list(Numdicoveries=Numdiscoveries, GVpercentage=GVpercentage, pheno.variance=pheno.variance,herit=herit0)) + } + + + library(MASS) + logest = log(est) + + if(CI==TRUE){ + set.seed((seeds)) + alpha = (1-CI.coverage)/2 + logv = diag(1/est)%*%v%*%diag(1/est) + estmat = exp(mvrnorm(nsim,mu=logest,Sigma=logv)) + + tem = pp(est,n,gwas.significance,tol,M) + tem1 = apply(estmat, 1, function(t) {pp(t,n,gwas.significance,tol,M)}) + + pest = tem$Numdicoveries; + gvest = tem$GVpercentage; + pheno.variance = tem$pheno.variance; + herit = tem$herit + + re = unlist(lapply(tem1,function(t) t[1])) + rere = apply(matrix(re,ncol=1),1,function(t) rbinom(1,size=M,prob=t/M)) + regv = unlist(lapply(tem1,function(t)t[2])) + + return(list(heritability = herit, + Numdiscoveries = c(pest,quantile(rere,alpha),quantile(rere,1-alpha)), + pheno.variance = c(gvest,quantile(regv,alpha),quantile(regv,1-alpha))*herit/100, + GVpercentage = c(gvest,quantile(regv,alpha),quantile(regv,1-alpha)) + )) + } + + if(CI==FALSE){ + tem = pp(est,n,gwas.significance,tol,M) + pest = tem$Numdicoveries; + gvest = tem$GVpercentage; + pheno.variance = tem$pheno.variance; + herit = tem$herit; + + return(list(heritability = herit, Numdiscoveries = pest, pheno.variance=pheno.variance, GVpercentage = gvest)) + } + +} + + +","R" +"Genesis","yandorazhang/GENESIS","R/heightGWAS.R",".R","756","8","#' The dataset contains a GWAS summary data example, i.e., GIANT_HEIGHT_LangoAllen2010_publicrelease_HapMapCeuFreq.txt after quality control, containing 1037172 rows and 3 columns. Each row represent a SNP. +#' +#' @format A matrix with 1037172 rows and 3 columns (snp, z, n) +#' @references{Lango Allen, H., Estrada, K., Lettre, G., Berndt, S.I., Weedon, M.N., Rivadeneira, F., Willer, C.J., Jackson, A.U., Vedantam, S., Raychaudhuri, S., et al. (2010). Hundreds of variants clustered in genomic loci and biological pathways affect human height. Nature 467, 832-838.} +#' @source \url{http://portals.broadinstitute.org/collaboration/giant/index.php/GIANT_consortium_data_files#Height_.28download_GZIP.29} +#' @name heightGWAS +#' @usage data(""heightGWAS"") +NULL","R" +"Genesis","yandorazhang/GENESIS","R/postmarginal.R",".R","4790","107","#' Illustration of postmarginal() +#' +#' This function allows to estimate posterior probability with a null marginal effect size and posterior mean of marginal effect size. +#' @param summarydata either summay-level GWAS data, containing 3 columns: +#' SNP (SNP rsID), +#' Z (GWAS test z-statistic), +#' N (GWAS study sample size which can be different for different SNPs) +#' or a dataframe generated by preprocessing() function which allows the summarydata to merge with LD score information and contains 10 columns. +#' @param est parameter estimates by fitting either 2-component model, i.e., (pic, sigmasq, a); or 3-component model, i.e., (pic, p1, sigmasq1, sigmasq2, a). +#' @param c0 an assumed maximum number of underlying susceptibility SNPs tagged by any individual GWAS marker. By default, c0 is set at 10. +#' @param LDcutoff a number from (0.05, 0.1, 0.2); indicating LD score is calculated based on the particular cutoff. By default, it is 0.1. +#' @param LDwindow a number from (0.5, 1, 2); indicating LD score is calculated based on the particular window size (MB). By default, it is 1 MB. +#' @param filter logical; if TRUE, the input summary data will be filtered. +#' @keywords +#' @export +#' @examples postmarginal(summarydata,est, c0=10, LDcutoff=0.1, LDwindow=1, filter=F) +#' +postmarginal <- function(summarydata, est, c0=10, LDcutoff=0.1, LDwindow=1, filter=F){ + + #---------------------------------------------------- + # I. summary GWAS data format check + #---------------------------------------------------- + # (i) the input summarydata is the original summary lelve GWAS data, containing 3 columns: + # SNP (SNP rsID), Z (GWAS test z-statistic), N (GWAS study sample size which can be different for different SNPs) + if(ncol(summarydata)==3){ + df <- preprocessing(summarydata, LDcutoff,LDwindow,filter) + } + + # (ii) the input summarydata is already merged with the LD score data, thus no processing + if(ncol(summarydata)==10){ + df <- summarydata + } + + betahat <- as.numeric(as.character(df$betahat)) + varbetahat <- as.numeric(as.character(df$varbetahat)) + ldscore <- as.numeric(as.character(df$LD.score.correct)) + Nstar <- as.numeric(as.character(df$Nstar)) + SNP <- df$SNP + TaggingSNPs <- df$TaggingSNPs + K <- length(betahat) + n <- as.numeric(as.character(df$N)) + + #---------------------------------------------------- + # II. posterior marginal effect size + #---------------------------------------------------- + if(length(est)==3) components=2 + if(length(est)==5) components=3 + + if(components==2){ + q0 = (1-est[1])^Nstar * 1/sqrt(2*pi*(varbetahat)) * exp(-0.5*betahat^2/(varbetahat)) + beta_exp = 0 # posterior expectation of marginal effect size (zero when Nk0=Nstar) + # each SNP has a different N_k^*, therefore goes thru different number of loops. + qsum = q0 + for (i in 1:c0){ # i is Nk1: first type of causal marker + # print(i) + qtemp = rep(0, length(Nstar)) + delta = i*est[2]*ldscore/Nstar + ind = Nstar>=i + + qtemp[ind] = choose(Nstar[ind],i) * + (1-est[1])^(Nstar[ind]-i)*(est[1]^i) / sqrt(2*pi*(delta[ind]+varbetahat[ind])) * + exp(-0.5*betahat[ind]^2/(delta[ind]+varbetahat[ind])) + + qsum = qsum + qtemp # Summation of q's the denominator + beta_exp = beta_exp + qtemp * (varbetahat)^(-1)*betahat/(1/delta+(varbetahat)^(-1)) + } + + p_beta0 = q0 / qsum + beta_exp = beta_exp / qsum + + } + + if(components==3){ + q0 = (1-est[1])^Nstar * 1/sqrt(2*pi*(varbetahat)) * exp(-0.5*betahat^2/(varbetahat)) + beta_exp = 0 # posterior expectation of marginal effect size (zero when Nk0=Nstar) + # each SNP has a different N_k^*, therefore goes thru different number of loops. + qsum = q0 + for (i in 0:c0){ # i is Nk1: number of the first type of causal SNPs + # print(i) + for (j in 0:c0){ # j is Nk2: number of the second type of causal SNPs + if (i==0 & j==0){ + next + } else{ + # print(paste(i,j)) + qtemp = rep(0, length(Nstar)) + delta = (i*est[3]+j*est[4])*ldscore/Nstar + ind = Nstar>=(i+j) + + qtemp[ind] = choose(Nstar[ind],i)*choose(Nstar[ind]-i,j) * + ((1-est[1])^(Nstar[ind]-i-j))*((est[2]*est[1])^i)*(((1-est[2])*est[1]))^j * 1/sqrt(2*pi*(delta[ind]+varbetahat[ind])) * + exp(-0.5*betahat[ind]^2/(delta[ind]+varbetahat[ind])) + + qsum = qsum + qtemp # Summation of q's the denominator + beta_exp = beta_exp + qtemp * (varbetahat)^(-1)*betahat/(1/delta+(varbetahat)^(-1)) + } + } + } + p_beta0 = q0 / qsum + beta_exp = beta_exp / qsum + } + posterior = list() + posterior$snp = SNP + posterior$prob.beta0 = p_beta0 + posterior$posteriormean.marginal = beta_exp + return(posterior) +} +","R" +"Genesis","yandorazhang/GENESIS","R/polyriskpredict.R",".R","4862","91","#' Illustration of polyriskpredict() +#' +#' This function allows to make polygenic risk prediction at given sample size with SNPs included at optimum p-value threshold or genome-wide significance level (5e-8). +#' +#' @param N sample size, at which the predictive performance is calculated. It must be a scalar. +#' @param Ps a vector of two mixture weights in the effect-size distribution for susceptibility SNPs. The sum should be equal to one. +#' @param Sig2s a vector of two component-specific variances for susceptibility SNPs. In a case where the one-component +#' normal distribution is assumed for the effect-size distribution, then the two elements of the vector should be set to the same value. +#' @param M the number of independent set of susceptibility SNPs with the default of 1070777. +#' @param M1 the estimated number of susceptibility SNPs. +#' @param type which threshold should be applied. Either ""optimum"" (default) or ""GWAS"" can be chosen. +#' @param alp.GWAS the (scalar) genome-wide significance level, if type==""GWAS"". The default value is 5*10^(-8). +#' @param k.fold a vector of multiplicative constants, at which or higher risk then the average population risk, +#' proportions of population and cases are calculated. The default value is set at 3:5. +#' @keywords +#' @export +#' @examples polyriskpredict(N, Ps, Sig2s, M=1070777, M1, type=""optimum"", alp.GWAS=5*10^(-8), k.fold=3:5) + +polyriskpredict <- function(N, Ps, Sig2s, M=1070777, M1, type=""optimum"", alp.GWAS=5*10^(-8), k.fold=3:5){ + + ### functions used within polyriskpredict + mu.func<-function(N, c.alp.half, Ps, Sig2s,M, M1){ + # this function calculates the expeced value of RN, defined in Chatterjee et al (2013, NG) + # by assuming a mixture of two normal distributions with mean zero and two different variances + # for the non-null regression coefficients, betas. + # In a case where a single normal distribution is appropriate, then two variances are set to be the same, + # and the sum of weights, Ps, is set to be 1. + cmp1<-M1*(Ps[1]*comp1(N, c.alp.half, Sig2s[1])+Ps[2]*comp1(N, c.alp.half, Sig2s[2])) + cmp2<-M1*(Ps[1]*comp2(N, c.alp.half, Sig2s[1])+Ps[2]*comp2(N, c.alp.half, Sig2s[2])) + cmp3<-M1*c.alp.half/N*(Ps[1]*comp3(N, c.alp.half, Sig2s[1])+Ps[2]*comp3(N, c.alp.half, Sig2s[2])) + cmp4<-M1/sqrt(N)*(Ps[1]*comp4(N, c.alp.half, Sig2s[1])+Ps[2]*comp4(N, c.alp.half, Sig2s[2])) + cmp5<-(M-M1)*(1-pnorm(c.alp.half))*2/N*(1+c.alp.half*2*dnorm(c.alp.half)/((1-pnorm(c.alp.half))*2)) + mus<-(cmp1+cmp4)/sqrt(cmp1+cmp2+cmp3+cmp4+cmp5) + return(mus) + } + + comp1<-function(N, c.alp.half, sig2.0){ + sig2.0*(1-pnorm(c.alp.half/sqrt(N),mean=0, sd=sqrt((1+N*sig2.0)/N))+pnorm(-c.alp.half/sqrt(N),mean=0, sd=sqrt((1+N*sig2.0)/N)))- + N*sig2.0^2/(1+N*sig2.0)*(-c.alp.half/sqrt(N)*dnorm(-c.alp.half/sqrt(N),mean=0, sd=sqrt((1+N*sig2.0)/N))*2) + } + + comp2<-function(N, c.alp.half, sig2.0){ + 1/N*(1-pnorm(c.alp.half/sqrt(N),mean=0, sd=sqrt((1+N*sig2.0)/N))+pnorm(-c.alp.half/sqrt(N),mean=0, sd=sqrt((1+N*sig2.0)/N))) + } + + comp3<-function(N, c.alp.half, sig2.0){ + (dnorm(-c.alp.half, mean=0, sd=sqrt(1+N*sig2.0))+dnorm(-c.alp.half, mean=0, sd=sqrt(1+N*sig2.0))) + } + + comp4<-function(N, c.alp.half, sig2.0){ + sig2.0*sqrt(N)*c.alp.half/(1+N*sig2.0)*dnorm(c.alp.half, mean=0, sd=sqrt(1+N*sig2.0))*2 + } + ### + + ### The function definition begins here + if (length(N)!=1) {stop(""The length of N must be one."")} + if (length(Sig2s)!=2) {stop(""The length of Sig2s is not equal to two."")} + if (length(Ps)!=2) {stop(""The length of Ps is not equal to two."")} + if (missing(M1)) {stop(""M1 is missing!"")} + + if (type==""optimum""){ + # to vectorize function mu.func over the argument c.alp.half + mu.vec.func<-Vectorize(mu.func, ""c.alp.half"") + c.seq<-exp(seq(log(10^(-8)), log(7), length=1000)) # generate candidate grid points for c.alp.half + + ## optimized threshold + tmp.mus<-mu.vec.func(N=N, c.alp.half=c.seq, Ps=Ps, Sig2s=Sig2s, M=M, M1=M1) + loc.max<-which.max(tmp.mus) + + if (loc.max==1){ + result.vec<-c(c.seq[loc.max], tmp.mus[loc.max]) + }else{ + result.vec<-unlist(optimize(mu.func, lower=c.seq[loc.max-1], upper=c.seq[loc.max+1], maximum=T, N=N, Sig2s=Sig2s, Ps=Ps, M=M, M1=M1, tol=10^(-5))) + } + result.vec[1]<-(1-pnorm(result.vec[1]))*2 # the first element is replaced with the optimal alpha level. + }else{ + ## at the GWAS significance + c.GWAS<-qnorm(1-alp.GWAS/2) + result.vec<-c(alp.GWAS, mu.func(N=N, c.alp.half=c.GWAS, Ps=Ps, Sig2s=Sig2s, M=M, M1=M1)) + } + # AUC + AUC<-pnorm(result.vec[2]/sqrt(2)) + + xi.k<-log(k.fold)/sqrt(result.vec[2]) + PPI<-1-pnorm(xi.k+sqrt(result.vec[2])/2) + PCI<-1-pnorm(xi.k-sqrt(result.vec[2])/2) + names(PPI)<-names(PCI)<-paste(""at "", k.fold, ""-fold or higher"", sep="""") + + list(alpha=result.vec[1], AUC=AUC, PPI=PPI, PCI=PCI) +} +","R" +"Genesis","yandorazhang/GENESIS","R/h2transfer.R",".R","1042","26","#' Illustration of h2transfer +#' +#' This function allows to transfer the heritability in log-odds-ratio (frailty) scale to other scale (observed and liability scale). +#' @param h2.log heritability in log-odds-ratio (frailty) scale. +#' @param se.h2.log standard error of heritability in log-odds-ratio scale. +#' @param population.prevalence population prevalence. +#' @param sample.prevalence sample prevalence. +#' @keywords +#' @export +#' @examples h2transfer(h2.log, se.h2.log, population.prevalence, sample.prevalence) + +h2transfer <- function(h2.log, se.h2.log, population.prevalence, sample.prevalence){ + + P <- sample.prevalence + K <- population.prevalence + z <- dnorm(qnorm(1-K)) + + h2.observed <- h2.log*P*(1-P) + se.h2.observed <- se.h2.log*P*(1-P) + + h2.liability <- h2.observed*(K^2)*((1-K)^2)/((z^2)*P*(1-P)) + se.h2.liability <- se.h2.observed*(K^2)*((1-K)^2)/((z^2)*P*(1-P)) + + return(list(h2.observed=h2.observed, se.h2.observed=se.h2.observed, + h2.liability=h2.liability,se.h2.liability=se.h2.liability)) +}","R" +"Genesis","yandorazhang/GENESIS","R/error_iter1.R",".R","283","8","#' The errors generated based on 1000 Genome reference panel that follow N(0, R), where R is the LD correlation matrix. +#' +#' @format A vector with 1070777 errors corresponding to 1070777 SNPs +#' @references +#' @source \url{} +#' @name error_iter1 +#' @usage data(""error_iter1"") +NULL","R" +"Genesis","yandorazhang/GENESIS","R/LDwindow2MB_cutoff0.1.R",".R","452","9","#' The dataset contains 1070777 rows and 6 columns. Each row represent a SNP. It contains the LD score which is calcluated based on 1000 Genome reference data set with LD-square threshold 0.1 and window size 2MB. +#' +#' @format A dataframe named ""dataLD"" with 1070777 rows and 6 columns (""SNPname"" , ""Nstar"", ""TaggingSNPs"",""LD.score.correct"", ""CHR"" ,""BP""). +#' @source \url{} +#' @name LDwindow2MB_cutoff0.1 +#' @usage data(""LDwindow2MB_cutoff0.1"") +NULL + +","R" +"Genesis","yandorazhang/GENESIS","R/LDwindow2MB_cutoff0.05.R",".R","455","9","#' The dataset contains 1070777 rows and 6 columns. Each row represent a SNP. It contains the LD score which is calcluated based on 1000 Genome reference data set with LD-square threshold 0.05 and window size 2MB. +#' +#' @format A dataframe named ""dataLD"" with 1070777 rows and 6 columns (""SNPname"" , ""Nstar"", ""TaggingSNPs"",""LD.score.correct"", ""CHR"" ,""BP""). +#' @source \url{} +#' @name LDwindow2MB_cutoff0.05 +#' @usage data(""LDwindow2MB_cutoff0.05"") +NULL + +","R" +"Genesis","yandorazhang/GENESIS","examples/examples.md",".md","4764","124","Examples +=== + +## Load summary GWAS data - Height (allen2010) +The summary GWAS dataframe has 3 columns: (SNPname, z-statistic, sample size). + +```{r height} +library(GENESIS) +data(heightGWAS) +# heightGWAS has been pre-processed according to function preprocessing(). +``` + + +## Fit the 2-component model +#### 1. Fitting to the model + +Note the startingpic value can be specifided at a list of values, and then the one with the largest log-likelihood is selected as the final model. + +```{r 2-component model} +fit2 <- genesis(heightGWAS, filter=F, modelcomponents=2, cores=2, LDcutoff=0.1, LDwindow=1, c0=10, startingpic=0.005) +fit2$estimates + +est <- fit2$estimates$`Parameter (pic, sigmasq, a) estimates` # the model parameter estimates +v <- fit2$estimates$`Covariance matrix of parameter estimates` # the covariance matrix of model parameter estimtaes + +# the est and v should have below values +est <- c(4.906797e-03, 5.733005e-05, 1.861491e-06) +v <- matrix(c(2.870170e-07, -2.426575e-09, -6.077209e-11, -2.426575e-09, 2.491765e-11, 4.715283e-13, -6.077209e-11, 4.715283e-13, 1.720162e-14),3,3) +``` + +#### 2. Get the density plot for the susceptibility SNPs +```{r density plot} +x_seq <- seq(-0.02,0.02,length.out = 1000); +y_seq <- apply(matrix(x_seq,ncol=1),1,function(t) dmixssnp(t,est)) +plot(x_seq, y_seq,type=""l"",xlab=""Joint effect size"", ylab=""Probability Density"") +``` + +#### 3. Make future projections with specified sample size +```{r future projections} +projection(est,v,n=253288, CI=TRUE) +``` + +#### 4. Calculate number of SNPs falling in an interval +```{r number of SNPs in an interval} +numInterval(0.005,Inf,est) +``` + +#### 5. Predict genomic control factor in future GWAS with specified sample size +```{r prediction} +futuregc(est,n=253288,nsim=1) +``` + + +#### 6. Calculate AUC of polygenic risk prediction model at given sample size +```{r function} +# PRS is calculated with SNPs included at optimum p-value threshold + +polyriskpredict(N=253288, Ps=c(0.5,0.5), Sig2s=c(est[3],est[3]), M=1070777, M1=1070777*est[1], type=""optimum"", k.fold=3:5) + +# PRS is calculated with SNPs included at genome-wide significance level +polyriskpredict(N=253288, Ps=c(0.5,0.5), Sig2s=c(est[3],est[3]), M=1070777, M1=1070777*est[1], type=""GWAS"",alp.GWAS=5e-8, k.fold=3:5) +``` + + +## Fit the 3-component model +#### 1. Fitting to the model + +Note the startingpic value can be specifided at a list of values, and then the one with the largest log-likelihood is selected as the final model. + +```{r 3-component model} + +# starting value of 3-component model comes from 2-component model estimates. +starting <- rep(0,5) +starting[1] <- est[1] +starting[2] <- 1/9 +starting[3] <- est[2]*5 +starting[4] <- starting[3]/10 +starting[5] <- est[3] + +fit3 <- genesis(heightGWAS, filter=F, modelcomponents=3, cores=24, LDcutoff=0.1, LDwindow=1, c0=10,starting=starting) +fit3$estimates + +est <- fit3$estimates$`Parameter (pic, p1, sigmasq1, sigmasq2, a) estimates` # the model parameter estimates +v <- fit3$estimates$`Covariance matrix of parameter estimates` # the covariance matrix of model parameter estimtaes + +# est and v should have below values +est <- c(8.899809e-03, 9.476025e-02, 1.458650e-04, 2.227118e-05, 1.567643e-06) +v <- matrix(c(1.327856e-06, -1.131049e-05, 6.912489e-10, -2.901301e-09, -9.388865e-11, -8.568269e-06, 3.380985e-04, -1.543479e-07, 1.771036e-09,8.989668e-10, -1.846216e-09, -1.353542e-07, 1.517545e-10, 1.166563e-11, 1.901686e-14, -3.068113e-09, 9.520417e-09, 5.633801e-12, 9.427492e-12, 1.220433e-13, -9.111521e-11, 1.063848e-09, -1.526281e-13, 1.042123e-13, 1.406410e-14), 5,5) +``` + +#### 2. Get the density plot for the susceptibility SNPs +```{r density plot} +x_seq = seq(-0.02,0.02,length.out = 1000); +y_seq = apply(matrix(x_seq,ncol=1),1,function(t) dmixssnp(t,est)) +plot(x_seq, y_seq,type=""l"",xlab=""Joint effect size"", ylab=""Probability Density"") +``` + +#### 3. Make future projections with specified sample size +```{r future projections} +projection(est,v, n=253288, CI=TRUE); +``` + +#### 4. Calculate number of SNPs falling in an interval +```{r number of SNPs in an interval} +numInterval(0.005,Inf,est) +``` + +#### 5. Predict genomic control factor in future GWAS with specified sample size +```{r prediction} +futuregc(est,n=253288,nsim=1) +``` + + +#### 6. Calculate AUC of polygenic risk prediction model at given sample size +```{r function} +# PRS is calculated with SNPs included at optimum p-value threshold + +polyriskpredict(N=253288, Ps=c(est[2],1-est[2]), Sig2s=c(est[3],est[4]), M=1070777, M1=1070777*est[1], type=""optimum"", k.fold=3:5) + + +# PRS is calculated with SNPs included at genome-wide significance level + +polyriskpredict(N=253288, Ps=c(est[2],1-est[2]), Sig2s=c(est[3],est[4]), M=1070777, M1=1070777*est[1], type=""GWAS"",alp.GWAS=5e-8, k.fold=3:5) +```","Markdown" +"Genesis","genesis-release-r-ccs/genesis","fortdep.py",".py","3768","144","#!/usr/bin/env python +# -*- encoding: utf-8 -*- + +## @package fortdep +# @brief fortran dependency inspector +# @author Motoshi Kamiya +# +# (c) Copyright 2016-2017 RIKEN. All rights reserved. + +# Simple fortran dependency checker for GENESIS +# +# usage: fortdep.py [list of files (*.fpp)] + +from __future__ import print_function +import os +import re +import sys +import getopt +import glob + +class FortranFile: + + # class variables + re_fcomment = re.compile( ""(^[^!]*)!(.*$)"" ) + re_module = re.compile( ""^(.*;+|\s*)\s*module\s*([^\s,]*)\s*"", re.I ) + re_use = re.compile( ""^(.*;+\s*|\s*)use\s+([^\s,]*)\s*"", re.I ) + mod_ext = "".mod"" + + def __init__( self, fname = """", ext = "".o"" ): + self.setFilename( fname, ext ) + self.filename = fname + self.modules = [] + self.depmods = [] + + def setFilename( self, fname, ext = "".o"" ): + if len(fname) == 0: + self.filename = """" + self.objname = """" + return + self.filename = fname + self.objname = re.sub( r'\.[a-zA-Z0-9]+$', ext, self.filename ) + + def parse( self ): + myf = open( self.filename, 'r' ) + for line in myf: + ma = FortranFile.re_fcomment.search( line ) + if ( ma ): # replace string + line = ma.group(1) + mb = FortranFile.re_module.search( line ) + if ( mb ): + modname = mb.group(2).lower() + self.appendModuleNames( modname ) + ma = FortranFile.re_use.search( line ) + if ( ma ): + modname = ma.group(2).lower() + self.appendDependMods( modname ) + myf.close() + self.makeUniqList() + + def appendModuleNames( self, modname ): + self.modules.append( modname + FortranFile.mod_ext ) + + def appendDependMods( self, modname ): + self.depmods.append( modname + FortranFile.mod_ext ) + + def makeUniqList( self ): + self.modules = list(set(self.modules)) + self.depmods = list(set(self.depmods)) + + def getMyModuleFilenames( self ): + return self.modules + + def getDepModuleFilenames( self ): + return self.depmods + + def showModuleList( self, output ): + for m in self.modules: + print( m, file=output ) + + def showDepModList( self, output ): + for m in self.depmods: + print( m, file=output ) + + def recipe( self, mods_avail = [], static_deps = """" ): + depmods = [] + for m in self.depmods: + if m.lower() in mods_avail or not mods_avail: + depmods.append(m) + ret = """" + ret += self.objname + "": "" + self.filename + "" "" + "" "".join(depmods) + "" "" + static_deps + if len(self.modules) > 0: + ret += ""\n"" + ret += "" "".join(self.modules) + "": "" + self.filename + "" "" + self.objname + return ret + +def usage( ret = 0 ): + print( ""fortdep: fortran dependency inspector"", file = sys.stderr ) + print( ""usage: fortdep [options] files > [output]"", file = sys.stderr ) + print( ""options:"", file = sys.stderr ) + print( "" -s [static dependencies for obj]"", file = sys.stderr ) + + sys.exit(ret) + +if __name__ == ""__main__"": + static_deps = """" + + # for future extension + try: + opts, args = getopt.getopt( sys.argv[1:], ""hs:e:f:"" ) + except getopt.GetoptError: + print( ""Error, failed to parse options"", file = sys.stderr ) + + if len(args) == 0: + usage() + + # parse opts + for o,a in opts: + if o in ( ""-s"" ): + static_deps = a + elif o in ( ""-h"" ): + usage() + + files = [] + mods_in_this_dir = [] + # build a list of modules + for f in args: + ff = FortranFile( f ) + ff.parse() + files.append( ff ) + mods = ff.getMyModuleFilenames() + for m in mods: + mods_in_this_dir.append( m ) + + # unique + mods_in_this_dir = list(set(mods_in_this_dir)) + + ## for debug + #for ff in files: + # ff.showModuleList( sys.stderr ) + # ff.showDepModList( sys.stderr ) + + for ff in files: + print( ff.recipe( mods_in_this_dir, static_deps ) ) +","Python" +"Genesis","genesis-release-r-ccs/genesis","doc/conf.py",".py","16165","468","# -*- coding: utf-8 -*- +# +# GENESIS documentation build configuration file, created by +# sphinx-quickstart on Wed Feb 22 10:21:04 2017. +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +# +import os +import sys +# sys.path.insert(0, os.path.abspath('.')) + + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +# +# needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = ['sphinx.ext.imgmath', 'sphinxcontrib.bibtex'] +bibtex_bibfiles = ['refs.bib'] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# +# source_suffix = ['.rst', '.md'] +source_suffix = '.rst' + +# The master toctree document. +master_doc = 'index' + +# General information about the project. +project = u'GENESIS' +copyright = u'2014-2025' +author = u'RIKEN' + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The short X.Y version. +version = u'2.1.6' +# The full version, including alpha/beta/rc tags. +release = u'2.1.6' + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set ""language"" from the command line for these cases. +language = None + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This patterns also effect to html_static_path and html_extra_path +exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] + +# The name of the Pygments (syntax highlighting) style to use. +#pygments_style = 'sphinx' +pygments_style = 'bw' + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = False + +# Put numbers to figures +numfig = True + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# +html_theme = 'alabaster' + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +# +# html_theme_options = {} + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named ""default.css"" will overwrite the builtin ""default.css"". +html_static_path = ['_static'] + + +# -- Options for HTMLHelp output ------------------------------------------ + +# Output file base name for HTML help builder. +htmlhelp_basename = 'GENESISdoc' + + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # + # 'papersize': 'letterpaper', + 'papersize': 'a4paper', + + # The font size ('10pt', '11pt' or '12pt'). + # + # 'pointsize': '10pt', + 'pointsize': '11pt', + + # Additional stuff for the LaTeX preamble. + # + # 'preamble': '', + 'preamble': r''' +\renewcommand{\releasename}{Version} +\let\OrigVerbatim\OriginalVerbatim +\renewcommand{\OriginalVerbatim}[1][1]{\OrigVerbatim[#1,frame=single]} +\pagestyle{normal} +\thispagestyle{normal} +\pagenumbering{arabic} +\usepackage{braket} +''', + + # Latex figure (float) alignment + # + # 'figure_align': 'htbp', + + # remove blank page + 'classoptions': ',oneside', + 'babel': '\\usepackage[english]{babel}', + 'date': '2017/02/21', + + 'tableofcontents': r''' +%%%%%%% +\begin{center} +{\Huge GENESIS 2.1.6} +\end{center} + +\vspace{2ex} + +\begin{quote} + Project Leader: Yuji Sugita (RIKEN) + + Current main developers: + Jaewoon Jung (RIKEN), + Shingo Ito (RIKEN), + Chigusa Kobayashi (RIKEN), + Takaharu Mori (RIKEN/Tokyo University of Science), + Hiraku Oshima (RIKEN/University of Hyogo), + Cheng Tan (RIKEN), + Diego Ugarte (RIKEN), + Kiyoshi Yagi (RIKEN/University of Tsukuba) + + Current developers/contributors + Motoshi Kamiya (RIKEN/IMS), + Kento Kasahara (RIKEN/Osaka University), + Yasuhiro Matsunaga (RIKEN/Saitama University), + Daisuke Matsuoka (RIKEN/RIST), + Osamu Miyashita (RIKEN), + Suyong Re (RIKEN/NIBIOHN), + Ai Shinobu (RIKEN/Osaka University), + Florence Tama (RIKEN/Nagoya University), + Shoji Takada (Kyoto University), + Isseki Yu (RIKEN/Maebashi Institute of Technology), + + Other developers/contributors for older versions: + Tadashi Ando (RIKEN/Tokyo University of Science), + Michael Feig (Michigan State University), + Raimondas Galvelis (RIKEN), + Ryuhei Harada (RIKEN/University of Tsukuba), + Takashi Imai (RIKEN), + Yasuaki Komuro (RIKEN), + Yasuhito Karino (RIKEN), + Naoyuki Miyashita (RIKEN/Kindai University), + Wataru Nishima (RIKEN), + Yosuke Sumiya (RIKEN/Yamaguchi University), + Donatas Surblys (RIKEN/Tohoku University), + Koichi Tamura (RIKEN), + Kenta Yamada (RIKEN), + Takao Yoda (Nagahama Institute of Bio-Science and Technology) + + Acknowledgments: + Norio Takase (Isogo Soft), + Yasumasa Joti (RIKEN SPring8), + Akira Naruse (NVIDIA), + Yukihiko Hirano (NVIDIA Japan), + Hikaru Inoue (Fujitsu Ltd.), + Tomoyuki Noda (Fujitsu Ltd.), + Kiyotaka Sakamoto (Fujitsu Ltd.), + Yoshinobu Akinaga (VINAS), + Yoshitake Sakae (RIST), + Nobuhiko Kato (ASTOM R\&D), + Toru Shiozaki (QSimulate), + Klaas Gunst (QSimulate), + Hideyo Yoshida (JSOL Corporation), + Kenta Chaki (JSOL Corporation) + + Copyright \copyright 2014-2025 RIKEN. All Rights Reserved +\end{quote} + +\vspace{3ex} + +%%%%%%% +{\LARGE GENESIS website} + +\begin{quote} + \url{https://mdgenesis.org/} +\end{quote} + +\vspace{3ex} + +%%%%%%% +{\LARGE Citation Information} + +\begin{enumerate} + \item When using GENESIS 2.x please cite the following references: + + \begin{itemize} + \item J. Jung, K. Yagi, C. Tan, H. Oshima, T. Mori, I. Yu, Y. Matsunaga, C. Kobayashi, S. Ito, D. Ugarte La Torre, Y. Sugita, J. Phys. Chem. B 128, 25, 6028-6048 (2024). + \end{itemize} + + \item When using GENESIS 1.x please cite the following references: + \begin{itemize} + \item C. Kobayashi, J. Jung, Y. Matsunaga, T. Mori, T. Ando, K. Tamura, M. Kamiya, and Y. Sugita, ""GENESIS 1.1: A hybrid-parallel molecular dynamics simulator with enhanced sampling algorithms on multiple computational platforms"", J. Comput. Chem. 38, 2193-2206 (2017). + + \item J. Jung, T. Mori, C. Kobayashi, Y. Matsunaga, T. Yoda, M. Feig, and Y. Sugita, ""GENESIS: A hybrid-parallel and multi-scale molecular dynamics simulator with enhanced sampling algorithms for biomolecular and cellular simulations"", WIREs Computational Molecular Science 5, 310-323 (2015). + \end{itemize} + + \item Additionally, if you have used other specific features of GENESIS, + please make sure to properly cite the corresponding papers. + + \item We ask users to make their best effort to cite the relevant papers in the main text. While it is acceptable to cite some of the papers in the supporting information or supplementary materials due to space limitations, the name of the software, GENESIS, and at least one of the citations should appear in the main text. + + \end{enumerate} + + +\vspace{3ex} + +%%%%%%% +{\LARGE Copyright Notices} + +\begin{quote} + Copyright \copyright 2014-2025 RIKEN. + + GENESIS is free software; you can redistribute it and/or + modify it provided that the following conditions are met: + + GENESIS is released under the terms of the GNU Lesser General Public + License as published by the Free Software Foundation; either + version 3 of the License, or (at your option) any later version. + + GENESIS is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. + See the GNU Lesser General Public License for more details. + + You should have received a copy of the GNU Lesser General Public + License along with GENESIS -- see the file COPYING and COPYING.LESSER. + If not, see \url{https://www.gnu.org/licenses/}. + +\end{quote} + +It should be mentioned this package contains the following softwares +for convenience. Please note that these are not covered by the license +under which a copy of GENESIS is licensed to you, while neither +composition nor distribution of any derivative work of GENESIS with +these software violates the terms of each license, provided that it meets +every condition of the respective licenses. + +\vspace{1ex} + +{\large SIMD-oriented Fast Mersenne Twister (SFMT)} + +SFMT is a new variant of Mersenne Twister (MT) introduced by Mutsuo Saito and Makoto Matsumoto in 2006. The algorithm was reported at MCQMC 2006. +The routine is distributed under the New BSD License. + +\begin{quote} + Copyright \copyright 2006,2007 Mutsuo Saito, Makoto Matsumoto and Hiroshima University. + Copyright \copyright 2012 Mutsuo Saito, Makoto Matsumoto, Hiroshima University and The University of Tokyo. + All rights reserved. + + Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + + * Neither the names of Hiroshima University, The University of + Tokyo nor the names of its contributors may be used to endorse + or promote products derived from this software without specific + prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + ""AS IS"" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +\end{quote} + +%\vspace{2ex} +% +%{\large Mersenne Twister: A random number generator} +% +%A Mersenne Twister random number generator was originally written in C +%by Makoto Matsumoto and Takuji Nishimura, and later translated into +%Fortran by Hiroshi Takano and Richard Woloshyn. This routine is +%distributed under the GNU General Public License version 2. +% +%\begin{quote} +% Copyright \copyright 1997 Makoto Matsumoto and Takuji Nishimura. +% +% Copyright \copyright 1999 Hiroshi Takano. +% +% Copyright \copyright 1999 Richard Woloshyn. +% +% This library is free software; you can redistribute it and/or +% modify it under the terms of the GNU Library General Public +% License as published by the Free Software Foundation; either +% version 2 of the License, or (at your option) any later +% version. +% +% This library is distributed in the hope that it will be useful, +% but WITHOUT ANY WARRANTY; without even the implied warranty of +% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +% See the GNU Library General Public License for more details. +% You should have received a copy of the GNU Library General +% Public License along with this library; if not, write to the +% Free Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA +% 02111-1307 USA +%\end{quote} + +\vspace{2ex} + +{\large FFTE: A Fast Fourier Transform Package} + +FFTE (\url{http://www.ffte.jp/}) is written by Daisuke Takahashi (Tsukuba +University). + +\begin{quote} + Copyright \copyright 2000-2004, 2008-2011 Daisuke Takahashi (Tsukuba + University). + + You may use, copy, modify this code for any purpose (include + commercial use) and without fee. You may distribute this ORIGINAL + package. +\end{quote} + +\vspace{2ex} + +{\large Complementary error function: erfc04} + +A Complementary error function routine (erfc04) is written by SunSoft, +a Sun Microsystems, Inc. business. + +\begin{quote} + Copyright \copyright 1993 Sun Microsystems, Inc. + + Developed at SunSoft, a Sun Microsystems, Inc. business. + Permission to use, copy, modify, and distribute this + software is freely granted, provided that this notice + is preserved (see math\_libs.fpp). +\end{quote} + +\vspace{2ex} + +{\large L-BFGS-B (version 3.0)} + +L-BFGS-B (\url{http://users.iems.northwestern.edu/~nocedal/lbfgsb.html}) +is written by C. Zhu, R. Byrd, J. Nocedal and J. L. Morales. + +\begin{quote} + This software is freely available, but we expect that all publications + describing work using this software, or all commercial products using + it, quote at least one of the references given below. This software is + released under the ""New BSD License"" (aka ""Modified BSD License"" or + ""3-clause license""). + + R. H. Byrd, P. Lu and J. Nocedal. A Limited Memory Algorithm for Bound Constrained Optimization, (1995), SIAM Journal on Scientific and Statistical Computing, 16, 5, pp. 1190-1208. + + C. Zhu, R. H. Byrd and J. Nocedal. L-BFGS-B: Algorithm 778: L-BFGS-B, FORTRAN routines for large scale bound constrained optimization (1997), ACM Transactions on Mathematical Software, Vol 23, Num. 4, pp. 550-560. + + J.L. Morales and J. Nocedal. L-BFGS-B: Remark on Algorithm 778: L-BFGS-B, FORTRAN routines for large scale bound constrained optimization (2011), ACM Transactions on Mathematical Software, Vol 38, Num. 1, Article No. 7. + +\end{quote} + +\vspace{2ex} + +{\large JSON-Fortran (version 8.2.5)} + +JSON-Fortran (\url{https://github.com/jacobwilliams/json-fortran}) +developed by J. Williams, +is a user-friendly, thread-safe, and object-oriented API for reading and +writing JSON files, written in modern Fortran + +\begin{quote} + The JSON-Fortran source code and related files and documentation are distributed + under a permissive free software license (BSD-style). See the LICENSE file + (src/lib/json-fortran/LICENSE) for more details. +\end{quote} + +\vspace{3ex} + +\tableofcontents +''', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +#latex_documents = [ +# (master_doc, 'GENESIS.tex', u'GENESIS Documentation', +# u'RIKEN', 'manual'), +#] +latex_documents = [ + (master_doc, 'GENESIS.tex', '', + u'RIKEN', 'manual'), +] + +latex_toplevel_sectioning = 'chapter' +latex_show_urls = 'inline' +latex_show_pagerrefs = False + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + (master_doc, 'genesis', u'GENESIS Documentation', + [author], 1) +] + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + (master_doc, 'GENESIS', u'GENESIS Documentation', + author, 'GENESIS', 'One line description of project.', + 'Miscellaneous'), +] + + + +","Python" +"Genesis","genesis-release-r-ccs/genesis","src/lib/ffte_param.h",".h","1342","39","!C +!C FFTE: A FAST FOURIER TRANSFORM PACKAGE +!C +!C (C) COPYRIGHT SOFTWARE, 2000-2004, ALL RIGHTS RESERVED +!C BY +!C DAISUKE TAKAHASHI +!C GRADUATE SCHOOL OF SYSTEMS AND INFORMATION ENGINEERING +!C UNIVERSITY OF TSUKUBA +!C 1-1-1 TENNODAI, TSUKUBA, IBARAKI 305-8573, JAPAN +!C E-MAIL: daisuke@cs.tsukuba.ac.jp +!C +!C +!C HEADER FILE FOR PARAMETERS +!C +!C FORTRAN77 SOURCE PROGRAM +!C +!C WRITTEN BY DAISUKE TAKAHASHI +!C +!C The maximum supported number of processors is 65536. + PARAMETER (MAXNPU=65536) +!C The maximum supported 2-D transform length is 65536. + PARAMETER (NDA2=65536) +!C The maximum supported 3-D transform length is 4096. + PARAMETER (NDA3=4096) + PARAMETER (NDA4=256) +!C The parameter NBLK is a blocking parameter. + PARAMETER (NBLK=16) +!C PARAMETER (NBLK=8) (for PentiumIII and Athlon) +!C PARAMETER (NBLK=16) (for Pentium4, Athlon XP, Opteron, Itanium +!C and Itanium2) +!C The parameter NP is a padding parameter to avoid cache conflicts in +!C the FFT routines. + PARAMETER (NP=8) +!C PARAMETER (NP=2) (for PentiumIII) +!C PARAMETER (NP=4) (for Athlon, Athlon XP, Opteron and Itanium) +!C PARAMETER (NP=8) (for Pentium4 and Itanium2) +!C Size of L2 cache + PARAMETER (L2SIZE=2097152) +","Unknown" +"Genesis","genesis-release-r-ccs/genesis","src/lib/fileio_data_.c",".c","1290","69"," +#include +#include ""../config.h"" + +#define MaxFile 10 + +static FILE* Files [MaxFile] = { 0 }; + +void fd_open__(char* filename, const int* read_only, int* unit_no, + int fnlen) +{ + int i; + for(i = fnlen-1; i >= 0; --i) + if (filename[i] == ' ') + filename[i] = 0; + else + break; + + for(i = 0; i < MaxFile; ++i) + if (Files[i] == 0) + { + *unit_no = i; + Files[i] = fopen(filename, (*read_only) == 1 ? ""r"" : ""w""); + break; + } + + if (i == MaxFile) + *unit_no = -1; +} + +void fd_close__(int* unit_no) +{ + if (*unit_no < 0 || *unit_no >= MaxFile) + return ; + fclose(Files[*unit_no]); + Files[*unit_no] = 0; +} + +void fd_flen__(int* unit_no, long long* flen) +{ +#ifdef KCOMP + FILE* fp = Files[*unit_no]; + fseeko64(fp, 0, SEEK_END); + *flen = ftello64(fp); + fseeko64(fp, 0, SEEK_SET); +#else + fprintf(stderr, ""fd_flen__ doesn't work on the machine\n""); + *flen = -1; +#endif +} + +void fd_read__(int* unit_no, long long *pos, unsigned char* b, long long *blen) +{ +#ifdef KCOMP + FILE* fp = Files[*unit_no]; + fseeko64(fp, (*pos)-1, SEEK_SET); + fread(b, *blen, 1, fp); +#else + fprintf(stderr, ""fd_read__ doesn't work on the machine\n""); + *blen = -1; +#endif +} + +void fd_write__(int* unit_no, unsigned char* b, long long *blen) +{ + FILE* fp = Files[*unit_no]; + fwrite(b, *blen, 1, fp); +} +","C" +"Genesis","genesis-release-r-ccs/genesis","src/lib/qsimulate/interface.cc",".cc","7450","205","#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +extern ""C"" { + const char *qsimulate_interface(MPI_Fint*, const int*, const char*, + const int, const int*, const double*, const double*, + double*, double*, const double*, bool*); +} + +static const char *qsimulate_interface_(MPI_Fint& fcomm, const int world_rank, const char* finput, + const int natoms, const int* const labels, const double* const coord, const double* const charges, + double* const force, double* const qmcharges, const double* const born_radii, bool& error); + +const char *qsimulate_interface(MPI_Fint* fcomm, const int* world_rank, const char* input, + const int natoms, const int * labels, const double* coord, const double* charges, + double* force, double* qmcharges, const double* born_radii, bool* error) { + + return qsimulate_interface_(*fcomm, *world_rank, input, + natoms, labels, coord, charges, + force, qmcharges, born_radii, *error); +} + +using namespace std; + +// quick look up table for elements +static const vector elem = {""Q"",""H"",""He"",""Li"",""Be"",""B"",""C"",""N"",""O"",""F"",""Ne"",""Na"",""Mg"",""Al"",""Si"",""P"",""S"",""Cl"",""Ar"",""K"",""Ca"", + ""Sc"",""Ti"",""V"",""Cr"",""Mn"",""Fe"",""Co"",""Ni"",""Cu"",""Zn"",""Ga"",""Ge"",""As"",""Se"",""Br"",""Kr"",""Rr"",""Sr"",""Y"",""Zr"",""Nb"",""Mo"",""Tc"",""Ru"",""Rh"",""Pd"",""Ag"",""Cd"",""In"",""Sn"", + ""Sb"",""Te"",""I"",""Xe"",""Cs"",""Ba"",""La"",""Ce"",""Pr"",""Nd"",""Pm"",""Sm"",""Eu"",""Gd"",""Tb"",""Dy"",""Ho"",""Er"",""Tm"",""Yb"",""Lu"",""Hf"",""Ta"",""W"",""Re"",""Os"",""Ir"",""Pt"",""Au"",""Hg"", + ""Tl"",""Pb"",""Bi"",""Po"",""At"",""Rn"",""Fr"",""Ra"",""Ac"",""Th"",""Pa"",""U"",""Np"",""Pu"",""Am"",""Cm"",""Bk"",""Cf"",""Es"",""Fm"",""Md"",""No"",""Lr"",""Rf"",""Db"",""Sg"",""Bh"",""Hs"",""Mt"",""Ds"", + ""Rg"",""Cn"",""Uut"",""Fl"",""Uup"",""Lv"",""Uus"",""Uuo""}; + + +shared_ptr saveref; +vector output_buffer; + +static const char *qsimulate_interface_(MPI_Fint& fcomm, const int world_rank, const char* finput, + const int natoms, const int* const labels, const double* const coord, const double* const charges, + double* const force, double* const qmcharges, + const double* const born_radii, bool& error) { + + MPI_Comm comm = MPI_Comm_f2c(fcomm); + + int size, rank; + MPI_Comm_rank(comm, &rank); + MPI_Comm_size(comm, &size); + + // assuming that the input will not change in the course of the calculation + const string input(finput); + stringstream ss(input); + bagel::Json jinp; + ss >> jinp; + + if (jinp.find(""bagel"") == jinp.end() || !jinp[""bagel""].is_array() || jinp[""bagel""].size() < 2) + throw runtime_error(""key \""bagel\"" not found""); + + if (jinp[""bagel""][0].find(""title"") == jinp[""bagel""][0].end() || jinp[""bagel""][0].at(""title"") != ""molecule"") + throw runtime_error(""molecule block is not at the beginning of the input""); + + // try to find ""basis_link"" + auto basis_link = jinp[""bagel""][0].find(""basis_link""); + auto df_basis_link = jinp[""bagel""][0].find(""df_basis_link""); + const string blink = basis_link != jinp[""bagel""][0].end() ? *basis_link : """"; + const string dlink = df_basis_link != jinp[""bagel""][0].end() ? *df_basis_link : """"; + + bagel::Json geometry = bagel::Json::array(); + int ncharges = 0; + for (int i = 0; i != natoms; ++i) { + if (labels[i] == 0) { + ++ncharges; + continue; + } + bagel::Json atom = bagel::Json({}); + atom[""atom""] = elem.at(abs(labels[i])); + atom[""xyz""] = {coord[3*i], coord[3*i+1], coord[3*i+2]}; + + // link atoms + if (labels[i] < 0 and !blink.empty()) + atom[""basis""] = blink; + if (labels[i] < 0 and !dlink.empty()) + atom[""df_basis""] = dlink; + + geometry.push_back(atom); + } + jinp[""bagel""][0][""geometry""] = geometry; + + vector extcharges(ncharges * 4); + ncharges = 0; + for (int i = 0; i != natoms; ++i) { + if (labels[i] != 0) + continue; + extcharges[0 + 4 * ncharges] = coord[0 + 3 * i]; + extcharges[1 + 4 * ncharges] = coord[1 + 3 * i]; + extcharges[2 + 4 * ncharges] = coord[2 + 3 * i]; + extcharges[3 + 4 * ncharges] = charges[i]; + ++ncharges; + } + + if (ncharges != 0) { + jinp[""bagel""][0][""charges""] = bagel::Json(move(extcharges)); + } + + // For GBSA + if (born_radii) { + vector qm_born_radii(natoms - ncharges); + vector ext_born_radii(ncharges); + size_t cnt_i = 0; + size_t cnt_j = 0; + for (int i = 0; i != natoms; ++i) { + if (labels[i] == 0) { + ext_born_radii[cnt_i++] = born_radii[i]; + } else { + qm_born_radii[cnt_j++] = born_radii[i]; + } + } + assert(cnt_i == ncharges); + + for (auto& block : jinp[""bagel""]) { + if (block[""title""] == ""force"") { + for (auto& subblock : block[""method""]) { + if (subblock[""title""] == ""dftb"" || subblock[""title""] == ""xtb"") { + subblock[""born_rad""] = bagel::Json(move(qm_born_radii)); + subblock[""external_born_rad""] = bagel::Json(move(ext_born_radii)); + } + } + } + // For single point calculations + if (block[""title""] == ""dftb"" || block[""title""] == ""xtb"") { + block[""born_rad""] = bagel::Json(move(qm_born_radii)); + block[""external_born_rad""] = bagel::Json(move(ext_born_radii)); + } + } + } + + shared_ptr out_c; + error = false; + try { + tie(out_c, saveref) = bagel::run_bagel_from_json(make_shared(move(jinp)), &comm, world_rank, saveref); + } catch (const exception& e) { + cout << ""exception was thrown in QSimulate: "" << e.what() << endl; + out_c = make_shared(); + error = true; + } + shared_ptr out = const_pointer_cast(out_c); + + bool no_gradient = false; + if (!error) { + if (out->find(""energy"") == out->end()) { + cout << ""energy is not found in QSimulate"" << endl; + error = true; + } + if (out->find(""gradient"") == out->end()) { + cout << ""gradient is not found in QSimulate"" << endl; + // Not finding a gradient does not throw an error. + // This is to allow single-shot calculations with genesis + no_gradient = true; + } + } + + if (!error) { + if (no_gradient) { + double* cptr = qmcharges; + for (auto& charge : out->at(""charges"")) + *cptr++ = static_cast(charge); + } else { + // retrieving nuclear force, and optionally qm charges + double* ptr = force; + double* cptr = qmcharges; + for (auto& iatom : out->at(""gradient"")) { + for (auto& xyz : iatom.at(""xyz"")) + *ptr++ = xyz; + *cptr++ = iatom.find(""charge"") != iatom.end() ? static_cast(iatom.at(""charge"")) : 0.0; + } + out->erase(""gradient""); + + if (out->find(""external_gradient"") != out->end()) { + const vector& egrad = out->at(""external_gradient""); + assert(egrad.size() == ncharges * 3); + for (int i = 0; i != ncharges; ++i) { + *ptr++ = egrad[0 + 3*i]; + *ptr++ = egrad[1 + 3*i]; + *ptr++ = egrad[2 + 3*i]; + *cptr++ = 0.0; + } + out->erase(""external_gradient""); + } + } + } + + string output = out->dump(); + output_buffer.clear(); + std::copy(output.begin(), output.end() + 1, std::back_inserter(output_buffer)); + + return output_buffer.data(); + +} +","Unknown" +"Genesis","genesis-release-r-ccs/genesis","src/lib/qsimulate/interface.hpp",".hpp","803","24","#ifdef QSIMULATE + interface + function qsimulate_interface(mpicomm, myrank, input, & + natoms, atoms, coord, charges, & + force, qmcharges, & + born_radii, error) bind(C) + use constants_mod + use iso_c_binding + + type(C_ptr) :: qsimulate_interface + integer :: mpicomm, myrank + character(kind=c_char) :: input(*) + integer(c_int), value :: natoms + type(c_ptr), value :: atoms + type(c_ptr), value :: coord + type(c_ptr), value :: charges + type(c_ptr), value :: force + type(c_ptr), value :: qmcharges + type(c_ptr), value :: born_radii + logical :: error + end function qsimulate_interface + end interface +#endif +","Unknown" +"Genesis","genesis-release-r-ccs/genesis","src/lib/dSFMT/dSFMT.c",".c","19893","639","/** + * @file dSFMT.c + * @brief double precision SIMD-oriented Fast Mersenne Twister (dSFMT) + * based on IEEE 754 format. + * + * @author Mutsuo Saito (Hiroshima University) + * @author Makoto Matsumoto (Hiroshima University) + * + * Copyright (C) 2007,2008 Mutsuo Saito, Makoto Matsumoto and Hiroshima + * University. All rights reserved. + * + * The new BSD License is applied to this software, see LICENSE.txt + */ + +#ifdef HAVE_CONFIG_H +#include ""../../config.h"" +#endif + +#include +#include +#include +#include ""dSFMT-params.h"" +#include ""dSFMT-common.h"" + +#if defined(__cplusplus) +extern ""C"" { +#endif + +/** dsfmt internal state vector */ +dsfmt_t dsfmt_global_data; +/** dsfmt mexp for check */ +static const int dsfmt_mexp = DSFMT_MEXP; + +/*---------------- + STATIC FUNCTIONS + ----------------*/ +inline static uint32_t ini_func1(uint32_t x); +inline static uint32_t ini_func2(uint32_t x); +inline static void gen_rand_array_c1o2(dsfmt_t *dsfmt, w128_t *array, + int size); +inline static void gen_rand_array_c0o1(dsfmt_t *dsfmt, w128_t *array, + int size); +inline static void gen_rand_array_o0c1(dsfmt_t *dsfmt, w128_t *array, + int size); +inline static void gen_rand_array_o0o1(dsfmt_t *dsfmt, w128_t *array, + int size); +inline static int idxof(int i); +static void initial_mask(dsfmt_t *dsfmt); +static void period_certification(dsfmt_t *dsfmt); + +#if defined(HAVE_SSE2) +/** 1 in 64bit for sse2 */ +static const union X128I_T sse2_int_one = {{1, 1}}; +/** 2.0 double for sse2 */ +static const union X128D_T sse2_double_two = {{2.0, 2.0}}; +/** -1.0 double for sse2 */ +static const union X128D_T sse2_double_m_one = {{-1.0, -1.0}}; +#endif + +/** + * This function simulate a 32-bit array index overlapped to 64-bit + * array of LITTLE ENDIAN in BIG ENDIAN machine. + */ +#if defined(DSFMT_BIG_ENDIAN) +inline static int idxof(int i) { + return i ^ 1; +} +#else +inline static int idxof(int i) { + return i; +} +#endif + +#if defined(HAVE_SSE2) +/** + * This function converts the double precision floating point numbers which + * distribute uniformly in the range [1, 2) to those which distribute uniformly + * in the range [0, 1). + * @param w 128bit stracture of double precision floating point numbers (I/O) + */ +inline static void convert_c0o1(w128_t *w) { + w->sd = _mm_add_pd(w->sd, sse2_double_m_one.d128); +} + +/** + * This function converts the double precision floating point numbers which + * distribute uniformly in the range [1, 2) to those which distribute uniformly + * in the range (0, 1]. + * @param w 128bit stracture of double precision floating point numbers (I/O) + */ +inline static void convert_o0c1(w128_t *w) { + w->sd = _mm_sub_pd(sse2_double_two.d128, w->sd); +} + +/** + * This function converts the double precision floating point numbers which + * distribute uniformly in the range [1, 2) to those which distribute uniformly + * in the range (0, 1). + * @param w 128bit stracture of double precision floating point numbers (I/O) + */ +inline static void convert_o0o1(w128_t *w) { + w->si = _mm_or_si128(w->si, sse2_int_one.i128); + w->sd = _mm_add_pd(w->sd, sse2_double_m_one.d128); +} +#else /* standard C and altivec */ +/** + * This function converts the double precision floating point numbers which + * distribute uniformly in the range [1, 2) to those which distribute uniformly + * in the range [0, 1). + * @param w 128bit stracture of double precision floating point numbers (I/O) + */ +inline static void convert_c0o1(w128_t *w) { + w->d[0] -= 1.0; + w->d[1] -= 1.0; +} + +/** + * This function converts the double precision floating point numbers which + * distribute uniformly in the range [1, 2) to those which distribute uniformly + * in the range (0, 1]. + * @param w 128bit stracture of double precision floating point numbers (I/O) + */ +inline static void convert_o0c1(w128_t *w) { + w->d[0] = 2.0 - w->d[0]; + w->d[1] = 2.0 - w->d[1]; +} + +/** + * This function converts the double precision floating point numbers which + * distribute uniformly in the range [1, 2) to those which distribute uniformly + * in the range (0, 1). + * @param w 128bit stracture of double precision floating point numbers (I/O) + */ +inline static void convert_o0o1(w128_t *w) { + w->u[0] |= 1; + w->u[1] |= 1; + w->d[0] -= 1.0; + w->d[1] -= 1.0; +} +#endif + +/** + * This function fills the user-specified array with double precision + * floating point pseudorandom numbers of the IEEE 754 format. + * @param dsfmt dsfmt state vector. + * @param array an 128-bit array to be filled by pseudorandom numbers. + * @param size number of 128-bit pseudorandom numbers to be generated. + */ +inline static void gen_rand_array_c1o2(dsfmt_t *dsfmt, w128_t *array, + int size) { + int i, j; + w128_t lung; + + lung = dsfmt->status[DSFMT_N]; + do_recursion(&array[0], &dsfmt->status[0], &dsfmt->status[DSFMT_POS1], + &lung); + for (i = 1; i < DSFMT_N - DSFMT_POS1; i++) { + do_recursion(&array[i], &dsfmt->status[i], + &dsfmt->status[i + DSFMT_POS1], &lung); + } + for (; i < DSFMT_N; i++) { + do_recursion(&array[i], &dsfmt->status[i], + &array[i + DSFMT_POS1 - DSFMT_N], &lung); + } + for (; i < size - DSFMT_N; i++) { + do_recursion(&array[i], &array[i - DSFMT_N], + &array[i + DSFMT_POS1 - DSFMT_N], &lung); + } + for (j = 0; j < 2 * DSFMT_N - size; j++) { + dsfmt->status[j] = array[j + size - DSFMT_N]; + } + for (; i < size; i++, j++) { + do_recursion(&array[i], &array[i - DSFMT_N], + &array[i + DSFMT_POS1 - DSFMT_N], &lung); + dsfmt->status[j] = array[i]; + } + dsfmt->status[DSFMT_N] = lung; +} + +/** + * This function fills the user-specified array with double precision + * floating point pseudorandom numbers of the IEEE 754 format. + * @param dsfmt dsfmt state vector. + * @param array an 128-bit array to be filled by pseudorandom numbers. + * @param size number of 128-bit pseudorandom numbers to be generated. + */ +inline static void gen_rand_array_c0o1(dsfmt_t *dsfmt, w128_t *array, + int size) { + int i, j; + w128_t lung; + + lung = dsfmt->status[DSFMT_N]; + do_recursion(&array[0], &dsfmt->status[0], &dsfmt->status[DSFMT_POS1], + &lung); + for (i = 1; i < DSFMT_N - DSFMT_POS1; i++) { + do_recursion(&array[i], &dsfmt->status[i], + &dsfmt->status[i + DSFMT_POS1], &lung); + } + for (; i < DSFMT_N; i++) { + do_recursion(&array[i], &dsfmt->status[i], + &array[i + DSFMT_POS1 - DSFMT_N], &lung); + } + for (; i < size - DSFMT_N; i++) { + do_recursion(&array[i], &array[i - DSFMT_N], + &array[i + DSFMT_POS1 - DSFMT_N], &lung); + convert_c0o1(&array[i - DSFMT_N]); + } + for (j = 0; j < 2 * DSFMT_N - size; j++) { + dsfmt->status[j] = array[j + size - DSFMT_N]; + } + for (; i < size; i++, j++) { + do_recursion(&array[i], &array[i - DSFMT_N], + &array[i + DSFMT_POS1 - DSFMT_N], &lung); + dsfmt->status[j] = array[i]; + convert_c0o1(&array[i - DSFMT_N]); + } + for (i = size - DSFMT_N; i < size; i++) { + convert_c0o1(&array[i]); + } + dsfmt->status[DSFMT_N] = lung; +} + +/** + * This function fills the user-specified array with double precision + * floating point pseudorandom numbers of the IEEE 754 format. + * @param dsfmt dsfmt state vector. + * @param array an 128-bit array to be filled by pseudorandom numbers. + * @param size number of 128-bit pseudorandom numbers to be generated. + */ +inline static void gen_rand_array_o0o1(dsfmt_t *dsfmt, w128_t *array, + int size) { + int i, j; + w128_t lung; + + lung = dsfmt->status[DSFMT_N]; + do_recursion(&array[0], &dsfmt->status[0], &dsfmt->status[DSFMT_POS1], + &lung); + for (i = 1; i < DSFMT_N - DSFMT_POS1; i++) { + do_recursion(&array[i], &dsfmt->status[i], + &dsfmt->status[i + DSFMT_POS1], &lung); + } + for (; i < DSFMT_N; i++) { + do_recursion(&array[i], &dsfmt->status[i], + &array[i + DSFMT_POS1 - DSFMT_N], &lung); + } + for (; i < size - DSFMT_N; i++) { + do_recursion(&array[i], &array[i - DSFMT_N], + &array[i + DSFMT_POS1 - DSFMT_N], &lung); + convert_o0o1(&array[i - DSFMT_N]); + } + for (j = 0; j < 2 * DSFMT_N - size; j++) { + dsfmt->status[j] = array[j + size - DSFMT_N]; + } + for (; i < size; i++, j++) { + do_recursion(&array[i], &array[i - DSFMT_N], + &array[i + DSFMT_POS1 - DSFMT_N], &lung); + dsfmt->status[j] = array[i]; + convert_o0o1(&array[i - DSFMT_N]); + } + for (i = size - DSFMT_N; i < size; i++) { + convert_o0o1(&array[i]); + } + dsfmt->status[DSFMT_N] = lung; +} + +/** + * This function fills the user-specified array with double precision + * floating point pseudorandom numbers of the IEEE 754 format. + * @param dsfmt dsfmt state vector. + * @param array an 128-bit array to be filled by pseudorandom numbers. + * @param size number of 128-bit pseudorandom numbers to be generated. + */ +inline static void gen_rand_array_o0c1(dsfmt_t *dsfmt, w128_t *array, + int size) { + int i, j; + w128_t lung; + + lung = dsfmt->status[DSFMT_N]; + do_recursion(&array[0], &dsfmt->status[0], &dsfmt->status[DSFMT_POS1], + &lung); + for (i = 1; i < DSFMT_N - DSFMT_POS1; i++) { + do_recursion(&array[i], &dsfmt->status[i], + &dsfmt->status[i + DSFMT_POS1], &lung); + } + for (; i < DSFMT_N; i++) { + do_recursion(&array[i], &dsfmt->status[i], + &array[i + DSFMT_POS1 - DSFMT_N], &lung); + } + for (; i < size - DSFMT_N; i++) { + do_recursion(&array[i], &array[i - DSFMT_N], + &array[i + DSFMT_POS1 - DSFMT_N], &lung); + convert_o0c1(&array[i - DSFMT_N]); + } + for (j = 0; j < 2 * DSFMT_N - size; j++) { + dsfmt->status[j] = array[j + size - DSFMT_N]; + } + for (; i < size; i++, j++) { + do_recursion(&array[i], &array[i - DSFMT_N], + &array[i + DSFMT_POS1 - DSFMT_N], &lung); + dsfmt->status[j] = array[i]; + convert_o0c1(&array[i - DSFMT_N]); + } + for (i = size - DSFMT_N; i < size; i++) { + convert_o0c1(&array[i]); + } + dsfmt->status[DSFMT_N] = lung; +} + +/** + * This function represents a function used in the initialization + * by init_by_array + * @param x 32-bit integer + * @return 32-bit integer + */ +static uint32_t ini_func1(uint32_t x) { + return (x ^ (x >> 27)) * (uint32_t)1664525UL; +} + +/** + * This function represents a function used in the initialization + * by init_by_array + * @param x 32-bit integer + * @return 32-bit integer + */ +static uint32_t ini_func2(uint32_t x) { + return (x ^ (x >> 27)) * (uint32_t)1566083941UL; +} + +/** + * This function initializes the internal state array to fit the IEEE + * 754 format. + * @param dsfmt dsfmt state vector. + */ +static void initial_mask(dsfmt_t *dsfmt) { + int i; + uint64_t *psfmt; + + psfmt = &dsfmt->status[0].u[0]; + for (i = 0; i < DSFMT_N * 2; i++) { + psfmt[i] = (psfmt[i] & DSFMT_LOW_MASK) | DSFMT_HIGH_CONST; + } +} + +/** + * This function certificate the period of 2^{SFMT_MEXP}-1. + * @param dsfmt dsfmt state vector. + */ +static void period_certification(dsfmt_t *dsfmt) { + uint64_t pcv[2] = {DSFMT_PCV1, DSFMT_PCV2}; + uint64_t tmp[2]; + uint64_t inner; + int i; +#if (DSFMT_PCV2 & 1) != 1 + int j; + uint64_t work; +#endif + + tmp[0] = (dsfmt->status[DSFMT_N].u[0] ^ DSFMT_FIX1); + tmp[1] = (dsfmt->status[DSFMT_N].u[1] ^ DSFMT_FIX2); + + inner = tmp[0] & pcv[0]; + inner ^= tmp[1] & pcv[1]; + for (i = 32; i > 0; i >>= 1) { + inner ^= inner >> i; + } + inner &= 1; + /* check OK */ + if (inner == 1) { + return; + } + /* check NG, and modification */ +#if (DSFMT_PCV2 & 1) == 1 + dsfmt->status[DSFMT_N].u[1] ^= 1; +#else + for (i = 1; i >= 0; i--) { + work = 1; + for (j = 0; j < 64; j++) { + if ((work & pcv[i]) != 0) { + dsfmt->status[DSFMT_N].u[i] ^= work; + return; + } + work = work << 1; + } + } +#endif + return; +} + +/*---------------- + PUBLIC FUNCTIONS + ----------------*/ +/** + * This function returns the identification string. The string shows + * the Mersenne exponent, and all parameters of this generator. + * @return id string. + */ +const char *dsfmt_get_idstring(void) { + return DSFMT_IDSTR; +} + +/** + * This function returns the minimum size of array used for \b + * fill_array functions. + * @return minimum size of array used for fill_array functions. + */ +int dsfmt_get_min_array_size(void) { + return DSFMT_N64; +} + +/** + * This function fills the internal state array with double precision + * floating point pseudorandom numbers of the IEEE 754 format. + * @param dsfmt dsfmt state vector. + */ +void dsfmt_gen_rand_all(dsfmt_t *dsfmt) { + int i; + w128_t lung; + + lung = dsfmt->status[DSFMT_N]; + do_recursion(&dsfmt->status[0], &dsfmt->status[0], + &dsfmt->status[DSFMT_POS1], &lung); + for (i = 1; i < DSFMT_N - DSFMT_POS1; i++) { + do_recursion(&dsfmt->status[i], &dsfmt->status[i], + &dsfmt->status[i + DSFMT_POS1], &lung); + } + for (; i < DSFMT_N; i++) { + do_recursion(&dsfmt->status[i], &dsfmt->status[i], + &dsfmt->status[i + DSFMT_POS1 - DSFMT_N], &lung); + } + dsfmt->status[DSFMT_N] = lung; +} + +/** + * This function generates double precision floating point + * pseudorandom numbers which distribute in the range [1, 2) to the + * specified array[] by one call. The number of pseudorandom numbers + * is specified by the argument \b size, which must be at least (SFMT_MEXP + * / 128) * 2 and a multiple of two. The function + * get_min_array_size() returns this minimum size. The generation by + * this function is much faster than the following fill_array_xxx functions. + * + * For initialization, init_gen_rand() or init_by_array() must be called + * before the first call of this function. This function can not be + * used after calling genrand_xxx functions, without initialization. + * + * @param dsfmt dsfmt state vector. + * @param array an array where pseudorandom numbers are filled + * by this function. The pointer to the array must be ""aligned"" + * (namely, must be a multiple of 16) in the SIMD version, since it + * refers to the address of a 128-bit integer. In the standard C + * version, the pointer is arbitrary. + * + * @param size the number of 64-bit pseudorandom integers to be + * generated. size must be a multiple of 2, and greater than or equal + * to (SFMT_MEXP / 128) * 2. + * + * @note \b memalign or \b posix_memalign is available to get aligned + * memory. Mac OSX doesn't have these functions, but \b malloc of OSX + * returns the pointer to the aligned memory block. + */ +void dsfmt_fill_array_close1_open2(dsfmt_t *dsfmt, double array[], int size) { + assert(size % 2 == 0); + assert(size >= DSFMT_N64); + gen_rand_array_c1o2(dsfmt, (w128_t *)array, size / 2); +} + +/** + * This function generates double precision floating point + * pseudorandom numbers which distribute in the range (0, 1] to the + * specified array[] by one call. This function is the same as + * fill_array_close1_open2() except the distribution range. + * + * @param dsfmt dsfmt state vector. + * @param array an array where pseudorandom numbers are filled + * by this function. + * @param size the number of pseudorandom numbers to be generated. + * see also \sa fill_array_close1_open2() + */ +void dsfmt_fill_array_open_close(dsfmt_t *dsfmt, double array[], int size) { + assert(size % 2 == 0); + assert(size >= DSFMT_N64); + gen_rand_array_o0c1(dsfmt, (w128_t *)array, size / 2); +} + +/** + * This function generates double precision floating point + * pseudorandom numbers which distribute in the range [0, 1) to the + * specified array[] by one call. This function is the same as + * fill_array_close1_open2() except the distribution range. + * + * @param array an array where pseudorandom numbers are filled + * by this function. + * @param dsfmt dsfmt state vector. + * @param size the number of pseudorandom numbers to be generated. + * see also \sa fill_array_close1_open2() + */ +void dsfmt_fill_array_close_open(dsfmt_t *dsfmt, double array[], int size) { + assert(size % 2 == 0); + assert(size >= DSFMT_N64); + gen_rand_array_c0o1(dsfmt, (w128_t *)array, size / 2); +} + +/** + * This function generates double precision floating point + * pseudorandom numbers which distribute in the range (0, 1) to the + * specified array[] by one call. This function is the same as + * fill_array_close1_open2() except the distribution range. + * + * @param dsfmt dsfmt state vector. + * @param array an array where pseudorandom numbers are filled + * by this function. + * @param size the number of pseudorandom numbers to be generated. + * see also \sa fill_array_close1_open2() + */ +void dsfmt_fill_array_open_open(dsfmt_t *dsfmt, double array[], int size) { + assert(size % 2 == 0); + assert(size >= DSFMT_N64); + gen_rand_array_o0o1(dsfmt, (w128_t *)array, size / 2); +} + +#if defined(__INTEL_COMPILER) +# pragma warning(disable:981) +#endif +/** + * This function initializes the internal state array with a 32-bit + * integer seed. + * @param dsfmt dsfmt state vector. + * @param seed a 32-bit integer used as the seed. + * @param mexp caller's mersenne expornent + */ +void dsfmt_chk_init_gen_rand(dsfmt_t *dsfmt, uint32_t seed, int mexp) { + int i; + uint32_t *psfmt; + + /* make sure caller program is compiled with the same MEXP */ + if (mexp != dsfmt_mexp) { + fprintf(stderr, ""DSFMT_MEXP doesn't match with dSFMT.c\n""); + exit(1); + } + psfmt = &dsfmt->status[0].u32[0]; + psfmt[idxof(0)] = seed; + for (i = 1; i < (DSFMT_N + 1) * 4; i++) { + psfmt[idxof(i)] = 1812433253UL + * (psfmt[idxof(i - 1)] ^ (psfmt[idxof(i - 1)] >> 30)) + i; + } + initial_mask(dsfmt); + period_certification(dsfmt); + dsfmt->idx = DSFMT_N64; +} + +/** + * This function initializes the internal state array, + * with an array of 32-bit integers used as the seeds + * @param dsfmt dsfmt state vector. + * @param init_key the array of 32-bit integers, used as a seed. + * @param key_length the length of init_key. + * @param mexp caller's mersenne expornent + */ +void dsfmt_chk_init_by_array(dsfmt_t *dsfmt, uint32_t init_key[], + int key_length, int mexp) { + int i, j, count; + uint32_t r; + uint32_t *psfmt32; + int lag; + int mid; + int size = (DSFMT_N + 1) * 4; /* pulmonary */ + + /* make sure caller program is compiled with the same MEXP */ + if (mexp != dsfmt_mexp) { + fprintf(stderr, ""DSFMT_MEXP doesn't match with dSFMT.c\n""); + exit(1); + } + if (size >= 623) { + lag = 11; + } else if (size >= 68) { + lag = 7; + } else if (size >= 39) { + lag = 5; + } else { + lag = 3; + } + mid = (size - lag) / 2; + + psfmt32 = &dsfmt->status[0].u32[0]; + memset(dsfmt->status, 0x8b, sizeof(dsfmt->status)); + if (key_length + 1 > size) { + count = key_length + 1; + } else { + count = size; + } + r = ini_func1(psfmt32[idxof(0)] ^ psfmt32[idxof(mid % size)] + ^ psfmt32[idxof((size - 1) % size)]); + psfmt32[idxof(mid % size)] += r; + r += key_length; + psfmt32[idxof((mid + lag) % size)] += r; + psfmt32[idxof(0)] = r; + count--; + for (i = 1, j = 0; (j < count) && (j < key_length); j++) { + r = ini_func1(psfmt32[idxof(i)] + ^ psfmt32[idxof((i + mid) % size)] + ^ psfmt32[idxof((i + size - 1) % size)]); + psfmt32[idxof((i + mid) % size)] += r; + r += init_key[j] + i; + psfmt32[idxof((i + mid + lag) % size)] += r; + psfmt32[idxof(i)] = r; + i = (i + 1) % size; + } + for (; j < count; j++) { + r = ini_func1(psfmt32[idxof(i)] + ^ psfmt32[idxof((i + mid) % size)] + ^ psfmt32[idxof((i + size - 1) % size)]); + psfmt32[idxof((i + mid) % size)] += r; + r += i; + psfmt32[idxof((i + mid + lag) % size)] += r; + psfmt32[idxof(i)] = r; + i = (i + 1) % size; + } + for (j = 0; j < size; j++) { + r = ini_func2(psfmt32[idxof(i)] + + psfmt32[idxof((i + mid) % size)] + + psfmt32[idxof((i + size - 1) % size)]); + psfmt32[idxof((i + mid) % size)] ^= r; + r -= i; + psfmt32[idxof((i + mid + lag) % size)] ^= r; + psfmt32[idxof(i)] = r; + i = (i + 1) % size; + } + initial_mask(dsfmt); + period_certification(dsfmt); + dsfmt->idx = DSFMT_N64; +} +#if defined(__INTEL_COMPILER) +# pragma warning(default:981) +#endif + +#if defined(__cplusplus) +} +#endif +","C" +"Genesis","genesis-release-r-ccs/genesis","src/lib/dSFMT/dSFMT.h",".h","22124","637","#pragma once +/** + * @file dSFMT.h + * + * @brief double precision SIMD oriented Fast Mersenne Twister(dSFMT) + * pseudorandom number generator based on IEEE 754 format. + * + * @author Mutsuo Saito (Hiroshima University) + * @author Makoto Matsumoto (Hiroshima University) + * + * Copyright (C) 2007, 2008 Mutsuo Saito, Makoto Matsumoto and + * Hiroshima University. All rights reserved. + * Copyright (C) 2012 Mutsuo Saito, Makoto Matsumoto, + * Hiroshima University and The University of Tokyo. + * All rights reserved. + * + * The new BSD License is applied to this software. + * see LICENSE.txt + * + * @note We assume that your system has inttypes.h. If your system + * doesn't have inttypes.h, you have to typedef uint32_t and uint64_t, + * and you have to define PRIu64 and PRIx64 in this file as follows: + * @verbatim + typedef unsigned int uint32_t + typedef unsigned long long uint64_t + #define PRIu64 ""llu"" + #define PRIx64 ""llx"" +@endverbatim + * uint32_t must be exactly 32-bit unsigned integer type (no more, no + * less), and uint64_t must be exactly 64-bit unsigned integer type. + * PRIu64 and PRIx64 are used for printf function to print 64-bit + * unsigned int and 64-bit unsigned int in hexadecimal format. + */ + +#ifndef DSFMT_H +#define DSFMT_H +#if defined(__cplusplus) +extern ""C"" { +#endif + +#include +#include + +#if !defined(DSFMT_MEXP) +#ifdef __GNUC__ + #warning ""DSFMT_MEXP is not defined. I assume DSFMT_MEXP is 19937."" +#endif + #define DSFMT_MEXP 19937 +#endif +/*----------------- + BASIC DEFINITIONS + -----------------*/ +/* Mersenne Exponent. The period of the sequence + * is a multiple of 2^DSFMT_MEXP-1. + * #define DSFMT_MEXP 19937 */ +/** DSFMT generator has an internal state array of 128-bit integers, + * and N is its size. */ +#define DSFMT_N ((DSFMT_MEXP - 128) / 104 + 1) +/** N32 is the size of internal state array when regarded as an array + * of 32-bit integers.*/ +#define DSFMT_N32 (DSFMT_N * 4) +/** N64 is the size of internal state array when regarded as an array + * of 64-bit integers.*/ +#define DSFMT_N64 (DSFMT_N * 2) + +#if !defined(DSFMT_BIG_ENDIAN) +# if defined(__BYTE_ORDER) && defined(__BIG_ENDIAN) +# if __BYTE_ORDER == __BIG_ENDIAN +# define DSFMT_BIG_ENDIAN 1 +# endif +# elif defined(_BYTE_ORDER) && defined(_BIG_ENDIAN) +# if _BYTE_ORDER == _BIG_ENDIAN +# define DSFMT_BIG_ENDIAN 1 +# endif +# elif defined(__BYTE_ORDER__) && defined(__BIG_ENDIAN__) +# if __BYTE_ORDER__ == __BIG_ENDIAN__ +# define DSFMT_BIG_ENDIAN 1 +# endif +# elif defined(BYTE_ORDER) && defined(BIG_ENDIAN) +# if BYTE_ORDER == BIG_ENDIAN +# define DSFMT_BIG_ENDIAN 1 +# endif +# elif defined(__BIG_ENDIAN) || defined(_BIG_ENDIAN) \ + || defined(__BIG_ENDIAN__) || defined(BIG_ENDIAN) +# define DSFMT_BIG_ENDIAN 1 +# endif +#endif + +#if defined(DSFMT_BIG_ENDIAN) && defined(__amd64) +# undef DSFMT_BIG_ENDIAN +#endif + +#if defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) +# include +#elif defined(_MSC_VER) || defined(__BORLANDC__) +# if !defined(DSFMT_UINT32_DEFINED) && !defined(SFMT_UINT32_DEFINED) +typedef unsigned int uint32_t; +typedef unsigned __int64 uint64_t; +# ifndef UINT64_C +# define UINT64_C(v) (v ## ui64) +# endif +# define DSFMT_UINT32_DEFINED +# if !defined(inline) && !defined(__cplusplus) +# define inline __inline +# endif +# endif +#else +# include +# if !defined(inline) && !defined(__cplusplus) +# if defined(__GNUC__) +# define inline __inline__ +# else +# define inline +# endif +# endif +#endif + +#ifndef PRIu64 +# if defined(_MSC_VER) || defined(__BORLANDC__) +# define PRIu64 ""I64u"" +# define PRIx64 ""I64x"" +# else +# define PRIu64 ""llu"" +# define PRIx64 ""llx"" +# endif +#endif + +#ifndef UINT64_C +# define UINT64_C(v) (v ## ULL) +#endif + +/*------------------------------------------ + 128-bit SIMD like data type for standard C + ------------------------------------------*/ +#if defined(HAVE_ALTIVEC) +# if !defined(__APPLE__) +# include +# endif +/** 128-bit data structure */ +union W128_T { + vector unsigned int s; + uint64_t u[2]; + uint32_t u32[4]; + double d[2]; +}; + +#elif defined(HAVE_SSE2) +# include + +/** 128-bit data structure */ +union W128_T { + __m128i si; + __m128d sd; + uint64_t u[2]; + uint32_t u32[4]; + double d[2]; +}; +#else /* standard C */ +/** 128-bit data structure */ +union W128_T { + uint64_t u[2]; + uint32_t u32[4]; + double d[2]; +}; +#endif + +/** 128-bit data type */ +typedef union W128_T w128_t; + +/** the 128-bit internal state array */ +struct DSFMT_T { + w128_t status[DSFMT_N + 1]; + int idx; +}; +typedef struct DSFMT_T dsfmt_t; + +/** dsfmt internal state vector */ +extern dsfmt_t dsfmt_global_data; +/** dsfmt mexp for check */ +extern const int dsfmt_global_mexp; + +void dsfmt_gen_rand_all(dsfmt_t *dsfmt); +void dsfmt_fill_array_open_close(dsfmt_t *dsfmt, double array[], int size); +void dsfmt_fill_array_close_open(dsfmt_t *dsfmt, double array[], int size); +void dsfmt_fill_array_open_open(dsfmt_t *dsfmt, double array[], int size); +void dsfmt_fill_array_close1_open2(dsfmt_t *dsfmt, double array[], int size); +void dsfmt_chk_init_gen_rand(dsfmt_t *dsfmt, uint32_t seed, int mexp); +void dsfmt_chk_init_by_array(dsfmt_t *dsfmt, uint32_t init_key[], + int key_length, int mexp); +const char *dsfmt_get_idstring(void); +int dsfmt_get_min_array_size(void); + +#if defined(__GNUC__) +# define DSFMT_PRE_INLINE inline static +# define DSFMT_PST_INLINE __attribute__((always_inline)) +#elif defined(_MSC_VER) && _MSC_VER >= 1200 +# define DSFMT_PRE_INLINE __forceinline static +# define DSFMT_PST_INLINE +#else +# define DSFMT_PRE_INLINE inline static +# define DSFMT_PST_INLINE +#endif +DSFMT_PRE_INLINE uint32_t dsfmt_genrand_uint32(dsfmt_t *dsfmt) DSFMT_PST_INLINE; +DSFMT_PRE_INLINE double dsfmt_genrand_close1_open2(dsfmt_t *dsfmt) + DSFMT_PST_INLINE; +DSFMT_PRE_INLINE double dsfmt_genrand_close_open(dsfmt_t *dsfmt) + DSFMT_PST_INLINE; +DSFMT_PRE_INLINE double dsfmt_genrand_open_close(dsfmt_t *dsfmt) + DSFMT_PST_INLINE; +DSFMT_PRE_INLINE double dsfmt_genrand_open_open(dsfmt_t *dsfmt) + DSFMT_PST_INLINE; +DSFMT_PRE_INLINE uint32_t dsfmt_gv_genrand_uint32(void) DSFMT_PST_INLINE; +DSFMT_PRE_INLINE double dsfmt_gv_genrand_close1_open2(void) DSFMT_PST_INLINE; +DSFMT_PRE_INLINE double dsfmt_gv_genrand_close_open(void) DSFMT_PST_INLINE; +DSFMT_PRE_INLINE double dsfmt_gv_genrand_open_close(void) DSFMT_PST_INLINE; +DSFMT_PRE_INLINE double dsfmt_gv_genrand_open_open(void) DSFMT_PST_INLINE; +DSFMT_PRE_INLINE void dsfmt_gv_fill_array_open_close(double array[], int size) + DSFMT_PST_INLINE; +DSFMT_PRE_INLINE void dsfmt_gv_fill_array_close_open(double array[], int size) + DSFMT_PST_INLINE; +DSFMT_PRE_INLINE void dsfmt_gv_fill_array_open_open(double array[], int size) + DSFMT_PST_INLINE; +DSFMT_PRE_INLINE void dsfmt_gv_fill_array_close1_open2(double array[], int size) + DSFMT_PST_INLINE; +DSFMT_PRE_INLINE void dsfmt_gv_init_gen_rand(uint32_t seed) DSFMT_PST_INLINE; +DSFMT_PRE_INLINE void dsfmt_gv_init_by_array(uint32_t init_key[], + int key_length) DSFMT_PST_INLINE; +DSFMT_PRE_INLINE void dsfmt_init_gen_rand(dsfmt_t *dsfmt, uint32_t seed) + DSFMT_PST_INLINE; +DSFMT_PRE_INLINE void dsfmt_init_by_array(dsfmt_t *dsfmt, uint32_t init_key[], + int key_length) DSFMT_PST_INLINE; + +/** + * This function generates and returns unsigned 32-bit integer. + * This is slower than SFMT, only for convenience usage. + * dsfmt_init_gen_rand() or dsfmt_init_by_array() must be called + * before this function. + * @param dsfmt dsfmt internal state date + * @return double precision floating point pseudorandom number + */ +inline static uint32_t dsfmt_genrand_uint32(dsfmt_t *dsfmt) { + uint32_t r; + uint64_t *psfmt64 = &dsfmt->status[0].u[0]; + + if (dsfmt->idx >= DSFMT_N64) { + dsfmt_gen_rand_all(dsfmt); + dsfmt->idx = 0; + } + r = psfmt64[dsfmt->idx++] & 0xffffffffU; + return r; +} + +/** + * This function generates and returns double precision pseudorandom + * number which distributes uniformly in the range [1, 2). This is + * the primitive and faster than generating numbers in other ranges. + * dsfmt_init_gen_rand() or dsfmt_init_by_array() must be called + * before this function. + * @param dsfmt dsfmt internal state date + * @return double precision floating point pseudorandom number + */ +inline static double dsfmt_genrand_close1_open2(dsfmt_t *dsfmt) { + double r; + double *psfmt64 = &dsfmt->status[0].d[0]; + + if (dsfmt->idx >= DSFMT_N64) { + dsfmt_gen_rand_all(dsfmt); + dsfmt->idx = 0; + } + r = psfmt64[dsfmt->idx++]; + return r; +} + +/** + * This function generates and returns unsigned 32-bit integer. + * This is slower than SFMT, only for convenience usage. + * dsfmt_gv_init_gen_rand() or dsfmt_gv_init_by_array() must be called + * before this function. This function uses \b global variables. + * @return double precision floating point pseudorandom number + */ +inline static uint32_t dsfmt_gv_genrand_uint32(void) { + return dsfmt_genrand_uint32(&dsfmt_global_data); +} + +/** + * This function generates and returns double precision pseudorandom + * number which distributes uniformly in the range [1, 2). + * dsfmt_gv_init_gen_rand() or dsfmt_gv_init_by_array() must be called + * before this function. This function uses \b global variables. + * @return double precision floating point pseudorandom number + */ +inline static double dsfmt_gv_genrand_close1_open2(void) { + return dsfmt_genrand_close1_open2(&dsfmt_global_data); +} + +/** + * This function generates and returns double precision pseudorandom + * number which distributes uniformly in the range [0, 1). + * dsfmt_init_gen_rand() or dsfmt_init_by_array() must be called + * before this function. + * @param dsfmt dsfmt internal state date + * @return double precision floating point pseudorandom number + */ +inline static double dsfmt_genrand_close_open(dsfmt_t *dsfmt) { + return dsfmt_genrand_close1_open2(dsfmt) - 1.0; +} + +/** + * This function generates and returns double precision pseudorandom + * number which distributes uniformly in the range [0, 1). + * dsfmt_gv_init_gen_rand() or dsfmt_gv_init_by_array() must be called + * before this function. This function uses \b global variables. + * @return double precision floating point pseudorandom number + */ +inline static double dsfmt_gv_genrand_close_open(void) { + return dsfmt_gv_genrand_close1_open2() - 1.0; +} + +/** + * This function generates and returns double precision pseudorandom + * number which distributes uniformly in the range (0, 1]. + * dsfmt_init_gen_rand() or dsfmt_init_by_array() must be called + * before this function. + * @param dsfmt dsfmt internal state date + * @return double precision floating point pseudorandom number + */ +inline static double dsfmt_genrand_open_close(dsfmt_t *dsfmt) { + return 2.0 - dsfmt_genrand_close1_open2(dsfmt); +} + +/** + * This function generates and returns double precision pseudorandom + * number which distributes uniformly in the range (0, 1]. + * dsfmt_gv_init_gen_rand() or dsfmt_gv_init_by_array() must be called + * before this function. This function uses \b global variables. + * @return double precision floating point pseudorandom number + */ +inline static double dsfmt_gv_genrand_open_close(void) { + return 2.0 - dsfmt_gv_genrand_close1_open2(); +} + +/** + * This function generates and returns double precision pseudorandom + * number which distributes uniformly in the range (0, 1). + * dsfmt_init_gen_rand() or dsfmt_init_by_array() must be called + * before this function. + * @param dsfmt dsfmt internal state date + * @return double precision floating point pseudorandom number + */ +inline static double dsfmt_genrand_open_open(dsfmt_t *dsfmt) { + double *dsfmt64 = &dsfmt->status[0].d[0]; + union { + double d; + uint64_t u; + } r; + + if (dsfmt->idx >= DSFMT_N64) { + dsfmt_gen_rand_all(dsfmt); + dsfmt->idx = 0; + } + r.d = dsfmt64[dsfmt->idx++]; + r.u |= 1; + return r.d - 1.0; +} + +/** + * This function generates and returns double precision pseudorandom + * number which distributes uniformly in the range (0, 1). + * dsfmt_gv_init_gen_rand() or dsfmt_gv_init_by_array() must be called + * before this function. This function uses \b global variables. + * @return double precision floating point pseudorandom number + */ +inline static double dsfmt_gv_genrand_open_open(void) { + return dsfmt_genrand_open_open(&dsfmt_global_data); +} + +/** + * This function generates double precision floating point + * pseudorandom numbers which distribute in the range [1, 2) to the + * specified array[] by one call. This function is the same as + * dsfmt_fill_array_close1_open2() except that this function uses + * \b global variables. + * @param array an array where pseudorandom numbers are filled + * by this function. + * @param size the number of pseudorandom numbers to be generated. + * see also \sa dsfmt_fill_array_close1_open2() + */ +inline static void dsfmt_gv_fill_array_close1_open2(double array[], int size) { + dsfmt_fill_array_close1_open2(&dsfmt_global_data, array, size); +} + +/** + * This function generates double precision floating point + * pseudorandom numbers which distribute in the range (0, 1] to the + * specified array[] by one call. This function is the same as + * dsfmt_gv_fill_array_close1_open2() except the distribution range. + * This function uses \b global variables. + * @param array an array where pseudorandom numbers are filled + * by this function. + * @param size the number of pseudorandom numbers to be generated. + * see also \sa dsfmt_fill_array_close1_open2() and \sa + * dsfmt_gv_fill_array_close1_open2() + */ +inline static void dsfmt_gv_fill_array_open_close(double array[], int size) { + dsfmt_fill_array_open_close(&dsfmt_global_data, array, size); +} + +/** + * This function generates double precision floating point + * pseudorandom numbers which distribute in the range [0, 1) to the + * specified array[] by one call. This function is the same as + * dsfmt_gv_fill_array_close1_open2() except the distribution range. + * This function uses \b global variables. + * @param array an array where pseudorandom numbers are filled + * by this function. + * @param size the number of pseudorandom numbers to be generated. + * see also \sa dsfmt_fill_array_close1_open2() \sa + * dsfmt_gv_fill_array_close1_open2() + */ +inline static void dsfmt_gv_fill_array_close_open(double array[], int size) { + dsfmt_fill_array_close_open(&dsfmt_global_data, array, size); +} + +/** + * This function generates double precision floating point + * pseudorandom numbers which distribute in the range (0, 1) to the + * specified array[] by one call. This function is the same as + * dsfmt_gv_fill_array_close1_open2() except the distribution range. + * This function uses \b global variables. + * @param array an array where pseudorandom numbers are filled + * by this function. + * @param size the number of pseudorandom numbers to be generated. + * see also \sa dsfmt_fill_array_close1_open2() \sa + * dsfmt_gv_fill_array_close1_open2() + */ +inline static void dsfmt_gv_fill_array_open_open(double array[], int size) { + dsfmt_fill_array_open_open(&dsfmt_global_data, array, size); +} + +/** + * This function initializes the internal state array with a 32-bit + * integer seed. + * @param dsfmt dsfmt state vector. + * @param seed a 32-bit integer used as the seed. + */ +inline static void dsfmt_init_gen_rand(dsfmt_t *dsfmt, uint32_t seed) { + dsfmt_chk_init_gen_rand(dsfmt, seed, DSFMT_MEXP); +} + +/** + * This function initializes the internal state array with a 32-bit + * integer seed. This function uses \b global variables. + * @param seed a 32-bit integer used as the seed. + * see also \sa dsfmt_init_gen_rand() + */ +inline static void dsfmt_gv_init_gen_rand(uint32_t seed) { + dsfmt_init_gen_rand(&dsfmt_global_data, seed); +} + +/** + * This function initializes the internal state array, + * with an array of 32-bit integers used as the seeds. + * @param dsfmt dsfmt state vector + * @param init_key the array of 32-bit integers, used as a seed. + * @param key_length the length of init_key. + */ +inline static void dsfmt_init_by_array(dsfmt_t *dsfmt, uint32_t init_key[], + int key_length) { + dsfmt_chk_init_by_array(dsfmt, init_key, key_length, DSFMT_MEXP); +} + +/** + * This function initializes the internal state array, + * with an array of 32-bit integers used as the seeds. + * This function uses \b global variables. + * @param init_key the array of 32-bit integers, used as a seed. + * @param key_length the length of init_key. + * see also \sa dsfmt_init_by_array() + */ +inline static void dsfmt_gv_init_by_array(uint32_t init_key[], int key_length) { + dsfmt_init_by_array(&dsfmt_global_data, init_key, key_length); +} + +#if !defined(DSFMT_DO_NOT_USE_OLD_NAMES) +DSFMT_PRE_INLINE const char *get_idstring(void) DSFMT_PST_INLINE; +DSFMT_PRE_INLINE int get_min_array_size(void) DSFMT_PST_INLINE; +DSFMT_PRE_INLINE void init_gen_rand(uint32_t seed) DSFMT_PST_INLINE; +DSFMT_PRE_INLINE void init_by_array(uint32_t init_key[], int key_length) + DSFMT_PST_INLINE; +DSFMT_PRE_INLINE double genrand_close1_open2(void) DSFMT_PST_INLINE; +DSFMT_PRE_INLINE double genrand_close_open(void) DSFMT_PST_INLINE; +DSFMT_PRE_INLINE double genrand_open_close(void) DSFMT_PST_INLINE; +DSFMT_PRE_INLINE double genrand_open_open(void) DSFMT_PST_INLINE; +DSFMT_PRE_INLINE void fill_array_open_close(double array[], int size) + DSFMT_PST_INLINE; +DSFMT_PRE_INLINE void fill_array_close_open(double array[], int size) + DSFMT_PST_INLINE; +DSFMT_PRE_INLINE void fill_array_open_open(double array[], int size) + DSFMT_PST_INLINE; +DSFMT_PRE_INLINE void fill_array_close1_open2(double array[], int size) + DSFMT_PST_INLINE; + +/** + * This function is just the same as dsfmt_get_idstring(). + * @return id string. + * see also \sa dsfmt_get_idstring() + */ +inline static const char *get_idstring(void) { + return dsfmt_get_idstring(); +} + +/** + * This function is just the same as dsfmt_get_min_array_size(). + * @return minimum size of array used for fill_array functions. + * see also \sa dsfmt_get_min_array_size() + */ +inline static int get_min_array_size(void) { + return dsfmt_get_min_array_size(); +} + +/** + * This function is just the same as dsfmt_gv_init_gen_rand(). + * @param seed a 32-bit integer used as the seed. + * see also \sa dsfmt_gv_init_gen_rand(), \sa dsfmt_init_gen_rand(). + */ +inline static void init_gen_rand(uint32_t seed) { + dsfmt_gv_init_gen_rand(seed); +} + +/** + * This function is just the same as dsfmt_gv_init_by_array(). + * @param init_key the array of 32-bit integers, used as a seed. + * @param key_length the length of init_key. + * see also \sa dsfmt_gv_init_by_array(), \sa dsfmt_init_by_array(). + */ +inline static void init_by_array(uint32_t init_key[], int key_length) { + dsfmt_gv_init_by_array(init_key, key_length); +} + +/** + * This function is just the same as dsfmt_gv_genrand_close1_open2(). + * @return double precision floating point number. + * see also \sa dsfmt_genrand_close1_open2() \sa + * dsfmt_gv_genrand_close1_open2() + */ +inline static double genrand_close1_open2(void) { + return dsfmt_gv_genrand_close1_open2(); +} + +/** + * This function is just the same as dsfmt_gv_genrand_close_open(). + * @return double precision floating point number. + * see also \sa dsfmt_genrand_close_open() \sa + * dsfmt_gv_genrand_close_open() + */ +inline static double genrand_close_open(void) { + return dsfmt_gv_genrand_close_open(); +} + +/** + * This function is just the same as dsfmt_gv_genrand_open_close(). + * @return double precision floating point number. + * see also \sa dsfmt_genrand_open_close() \sa + * dsfmt_gv_genrand_open_close() + */ +inline static double genrand_open_close(void) { + return dsfmt_gv_genrand_open_close(); +} + +/** + * This function is just the same as dsfmt_gv_genrand_open_open(). + * @return double precision floating point number. + * see also \sa dsfmt_genrand_open_open() \sa + * dsfmt_gv_genrand_open_open() + */ +inline static double genrand_open_open(void) { + return dsfmt_gv_genrand_open_open(); +} + +/** + * This function is juset the same as dsfmt_gv_fill_array_open_close(). + * @param array an array where pseudorandom numbers are filled + * by this function. + * @param size the number of pseudorandom numbers to be generated. + * see also \sa dsfmt_gv_fill_array_open_close(), \sa + * dsfmt_fill_array_close1_open2(), \sa + * dsfmt_gv_fill_array_close1_open2() + */ +inline static void fill_array_open_close(double array[], int size) { + dsfmt_gv_fill_array_open_close(array, size); +} + +/** + * This function is juset the same as dsfmt_gv_fill_array_close_open(). + * @param array an array where pseudorandom numbers are filled + * by this function. + * @param size the number of pseudorandom numbers to be generated. + * see also \sa dsfmt_gv_fill_array_close_open(), \sa + * dsfmt_fill_array_close1_open2(), \sa + * dsfmt_gv_fill_array_close1_open2() + */ +inline static void fill_array_close_open(double array[], int size) { + dsfmt_gv_fill_array_close_open(array, size); +} + +/** + * This function is juset the same as dsfmt_gv_fill_array_open_open(). + * @param array an array where pseudorandom numbers are filled + * by this function. + * @param size the number of pseudorandom numbers to be generated. + * see also \sa dsfmt_gv_fill_array_open_open(), \sa + * dsfmt_fill_array_close1_open2(), \sa + * dsfmt_gv_fill_array_close1_open2() + */ +inline static void fill_array_open_open(double array[], int size) { + dsfmt_gv_fill_array_open_open(array, size); +} + +/** + * This function is juset the same as dsfmt_gv_fill_array_close1_open2(). + * @param array an array where pseudorandom numbers are filled + * by this function. + * @param size the number of pseudorandom numbers to be generated. + * see also \sa dsfmt_fill_array_close1_open2(), \sa + * dsfmt_gv_fill_array_close1_open2() + */ +inline static void fill_array_close1_open2(double array[], int size) { + dsfmt_gv_fill_array_close1_open2(array, size); +} +#endif /* DSFMT_DO_NOT_USE_OLD_NAMES */ + +#if defined(__cplusplus) +} +#endif + +#endif /* DSFMT_H */ +","Unknown" +"Genesis","genesis-release-r-ccs/genesis","src/lib/dSFMT/dSFMT-params44497.h",".h","1468","41","#ifndef DSFMT_PARAMS44497_H +#define DSFMT_PARAMS44497_H + +/* #define DSFMT_N 427 */ +/* #define DSFMT_MAXDEGREE 44536 */ +#define DSFMT_POS1 304 +#define DSFMT_SL1 19 +#define DSFMT_MSK1 UINT64_C(0x000ff6dfffffffef) +#define DSFMT_MSK2 UINT64_C(0x0007ffdddeefff6f) +#define DSFMT_MSK32_1 0x000ff6dfU +#define DSFMT_MSK32_2 0xffffffefU +#define DSFMT_MSK32_3 0x0007ffddU +#define DSFMT_MSK32_4 0xdeefff6fU +#define DSFMT_FIX1 UINT64_C(0x75d910f235f6e10e) +#define DSFMT_FIX2 UINT64_C(0x7b32158aedc8e969) +#define DSFMT_PCV1 UINT64_C(0x4c3356b2a0000000) +#define DSFMT_PCV2 UINT64_C(0x0000000000000001) +#define DSFMT_IDSTR ""dSFMT2-44497:304-19:ff6dfffffffef-7ffdddeefff6f"" + + +/* PARAMETERS FOR ALTIVEC */ +#if defined(__APPLE__) /* For OSX */ + #define ALTI_SL1 (vector unsigned char)(3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3) + #define ALTI_SL1_PERM \ + (vector unsigned char)(2,3,4,5,6,7,30,30,10,11,12,13,14,15,0,1) + #define ALTI_SL1_MSK \ + (vector unsigned int)(0xffffffffU,0xfff80000U,0xffffffffU,0xfff80000U) + #define ALTI_MSK (vector unsigned int)(DSFMT_MSK32_1, \ + DSFMT_MSK32_2, DSFMT_MSK32_3, DSFMT_MSK32_4) +#else /* For OTHER OSs(Linux?) */ + #define ALTI_SL1 {3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3} + #define ALTI_SL1_PERM \ + {2,3,4,5,6,7,30,30,10,11,12,13,14,15,0,1} + #define ALTI_SL1_MSK \ + {0xffffffffU,0xfff80000U,0xffffffffU,0xfff80000U} + #define ALTI_MSK \ + {DSFMT_MSK32_1, DSFMT_MSK32_2, DSFMT_MSK32_3, DSFMT_MSK32_4} +#endif + +#endif /* DSFMT_PARAMS44497_H */ +","Unknown" +"Genesis","genesis-release-r-ccs/genesis","src/lib/dSFMT/dSFMT-params4253.h",".h","1460","41","#ifndef DSFMT_PARAMS4253_H +#define DSFMT_PARAMS4253_H + +/* #define DSFMT_N 40 */ +/* #define DSFMT_MAXDEGREE 4288 */ +#define DSFMT_POS1 19 +#define DSFMT_SL1 19 +#define DSFMT_MSK1 UINT64_C(0x0007b7fffef5feff) +#define DSFMT_MSK2 UINT64_C(0x000ffdffeffefbfc) +#define DSFMT_MSK32_1 0x0007b7ffU +#define DSFMT_MSK32_2 0xfef5feffU +#define DSFMT_MSK32_3 0x000ffdffU +#define DSFMT_MSK32_4 0xeffefbfcU +#define DSFMT_FIX1 UINT64_C(0x80901b5fd7a11c65) +#define DSFMT_FIX2 UINT64_C(0x5a63ff0e7cb0ba74) +#define DSFMT_PCV1 UINT64_C(0x1ad277be12000000) +#define DSFMT_PCV2 UINT64_C(0x0000000000000001) +#define DSFMT_IDSTR ""dSFMT2-4253:19-19:7b7fffef5feff-ffdffeffefbfc"" + + +/* PARAMETERS FOR ALTIVEC */ +#if defined(__APPLE__) /* For OSX */ + #define ALTI_SL1 (vector unsigned char)(3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3) + #define ALTI_SL1_PERM \ + (vector unsigned char)(2,3,4,5,6,7,30,30,10,11,12,13,14,15,0,1) + #define ALTI_SL1_MSK \ + (vector unsigned int)(0xffffffffU,0xfff80000U,0xffffffffU,0xfff80000U) + #define ALTI_MSK (vector unsigned int)(DSFMT_MSK32_1, \ + DSFMT_MSK32_2, DSFMT_MSK32_3, DSFMT_MSK32_4) +#else /* For OTHER OSs(Linux?) */ + #define ALTI_SL1 {3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3} + #define ALTI_SL1_PERM \ + {2,3,4,5,6,7,30,30,10,11,12,13,14,15,0,1} + #define ALTI_SL1_MSK \ + {0xffffffffU,0xfff80000U,0xffffffffU,0xfff80000U} + #define ALTI_MSK \ + {DSFMT_MSK32_1, DSFMT_MSK32_2, DSFMT_MSK32_3, DSFMT_MSK32_4} +#endif + +#endif /* DSFMT_PARAMS4253_H */ +","Unknown" +"Genesis","genesis-release-r-ccs/genesis","src/lib/dSFMT/dSFMT-params216091.h",".h","1476","41","#ifndef DSFMT_PARAMS216091_H +#define DSFMT_PARAMS216091_H + +/* #define DSFMT_N 2077 */ +/* #define DSFMT_MAXDEGREE 216136 */ +#define DSFMT_POS1 1890 +#define DSFMT_SL1 23 +#define DSFMT_MSK1 UINT64_C(0x000bf7df7fefcfff) +#define DSFMT_MSK2 UINT64_C(0x000e7ffffef737ff) +#define DSFMT_MSK32_1 0x000bf7dfU +#define DSFMT_MSK32_2 0x7fefcfffU +#define DSFMT_MSK32_3 0x000e7fffU +#define DSFMT_MSK32_4 0xfef737ffU +#define DSFMT_FIX1 UINT64_C(0xd7f95a04764c27d7) +#define DSFMT_FIX2 UINT64_C(0x6a483861810bebc2) +#define DSFMT_PCV1 UINT64_C(0x3af0a8f3d5600000) +#define DSFMT_PCV2 UINT64_C(0x0000000000000001) +#define DSFMT_IDSTR ""dSFMT2-216091:1890-23:bf7df7fefcfff-e7ffffef737ff"" + + +/* PARAMETERS FOR ALTIVEC */ +#if defined(__APPLE__) /* For OSX */ + #define ALTI_SL1 (vector unsigned char)(7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7) + #define ALTI_SL1_PERM \ + (vector unsigned char)(2,3,4,5,6,7,30,30,10,11,12,13,14,15,0,1) + #define ALTI_SL1_MSK \ + (vector unsigned int)(0xffffffffU,0xff800000U,0xffffffffU,0xff800000U) + #define ALTI_MSK (vector unsigned int)(DSFMT_MSK32_1, \ + DSFMT_MSK32_2, DSFMT_MSK32_3, DSFMT_MSK32_4) +#else /* For OTHER OSs(Linux?) */ + #define ALTI_SL1 {7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7} + #define ALTI_SL1_PERM \ + {2,3,4,5,6,7,30,30,10,11,12,13,14,15,0,1} + #define ALTI_SL1_MSK \ + {0xffffffffU,0xff800000U,0xffffffffU,0xff800000U} + #define ALTI_MSK \ + {DSFMT_MSK32_1, DSFMT_MSK32_2, DSFMT_MSK32_3, DSFMT_MSK32_4} +#endif + +#endif /* DSFMT_PARAMS216091_H */ +","Unknown" +"Genesis","genesis-release-r-ccs/genesis","src/lib/dSFMT/dSFMT-common.h",".h","3482","116","#pragma once +/** + * @file dSFMT-common.h + * + * @brief SIMD oriented Fast Mersenne Twister(SFMT) pseudorandom + * number generator with jump function. This file includes common functions + * used in random number generation and jump. + * + * @author Mutsuo Saito (Hiroshima University) + * @author Makoto Matsumoto (The University of Tokyo) + * + * Copyright (C) 2006, 2007 Mutsuo Saito, Makoto Matsumoto and Hiroshima + * University. + * Copyright (C) 2012 Mutsuo Saito, Makoto Matsumoto, Hiroshima + * University and The University of Tokyo. + * All rights reserved. + * + * The 3-clause BSD License is applied to this software, see + * LICENSE.txt + */ +#ifndef DSFMT_COMMON_H +#define DSFMT_COMMON_H + +#include ""dSFMT.h"" + +#if defined(HAVE_SSE2) +# include +union X128I_T { + uint64_t u[2]; + __m128i i128; +}; +union X128D_T { + double d[2]; + __m128d d128; +}; +/** mask data for sse2 */ +static const union X128I_T sse2_param_mask = {{DSFMT_MSK1, DSFMT_MSK2}}; +#endif + +#if defined(HAVE_ALTIVEC) +inline static void do_recursion(w128_t *r, w128_t *a, w128_t * b, + w128_t *lung) { + const vector unsigned char sl1 = ALTI_SL1; + const vector unsigned char sl1_perm = ALTI_SL1_PERM; + const vector unsigned int sl1_msk = ALTI_SL1_MSK; + const vector unsigned char sr1 = ALTI_SR; + const vector unsigned char sr1_perm = ALTI_SR_PERM; + const vector unsigned int sr1_msk = ALTI_SR_MSK; + const vector unsigned char perm = ALTI_PERM; + const vector unsigned int msk1 = ALTI_MSK; + vector unsigned int w, x, y, z; + + z = a->s; + w = lung->s; + x = vec_perm(w, (vector unsigned int)perm, perm); + y = vec_perm(z, (vector unsigned int)sl1_perm, sl1_perm); + y = vec_sll(y, sl1); + y = vec_and(y, sl1_msk); + w = vec_xor(x, b->s); + w = vec_xor(w, y); + x = vec_perm(w, (vector unsigned int)sr1_perm, sr1_perm); + x = vec_srl(x, sr1); + x = vec_and(x, sr1_msk); + y = vec_and(w, msk1); + z = vec_xor(z, y); + r->s = vec_xor(z, x); + lung->s = w; +} +#elif defined(HAVE_SSE2) +/** + * This function represents the recursion formula. + * @param r output 128-bit + * @param a a 128-bit part of the internal state array + * @param b a 128-bit part of the internal state array + * @param d a 128-bit part of the internal state array (I/O) + */ +inline static void do_recursion(w128_t *r, w128_t *a, w128_t *b, w128_t *u) { + __m128i v, w, x, y, z; + + x = a->si; + z = _mm_slli_epi64(x, DSFMT_SL1); + y = _mm_shuffle_epi32(u->si, SSE2_SHUFF); + z = _mm_xor_si128(z, b->si); + y = _mm_xor_si128(y, z); + + v = _mm_srli_epi64(y, DSFMT_SR); + w = _mm_and_si128(y, sse2_param_mask.i128); + v = _mm_xor_si128(v, x); + v = _mm_xor_si128(v, w); + r->si = v; + u->si = y; +} +#else +/** + * This function represents the recursion formula. + * @param r output 128-bit + * @param a a 128-bit part of the internal state array + * @param b a 128-bit part of the internal state array + * @param lung a 128-bit part of the internal state array (I/O) + */ +inline static void do_recursion(w128_t *r, w128_t *a, w128_t * b, + w128_t *lung) { + uint64_t t0, t1, L0, L1; + + t0 = a->u[0]; + t1 = a->u[1]; + L0 = lung->u[0]; + L1 = lung->u[1]; + lung->u[0] = (t0 << DSFMT_SL1) ^ (L1 >> 32) ^ (L1 << 32) ^ b->u[0]; + lung->u[1] = (t1 << DSFMT_SL1) ^ (L0 >> 32) ^ (L0 << 32) ^ b->u[1]; + r->u[0] = (lung->u[0] >> DSFMT_SR) ^ (lung->u[0] & DSFMT_MSK1) ^ t0; + r->u[1] = (lung->u[1] >> DSFMT_SR) ^ (lung->u[1] & DSFMT_MSK2) ^ t1; +} +#endif +#endif +","Unknown" +"Genesis","genesis-release-r-ccs/genesis","src/lib/dSFMT/dSFMT-params19937.h",".h","1468","41","#ifndef DSFMT_PARAMS19937_H +#define DSFMT_PARAMS19937_H + +/* #define DSFMT_N 191 */ +/* #define DSFMT_MAXDEGREE 19992 */ +#define DSFMT_POS1 117 +#define DSFMT_SL1 19 +#define DSFMT_MSK1 UINT64_C(0x000ffafffffffb3f) +#define DSFMT_MSK2 UINT64_C(0x000ffdfffc90fffd) +#define DSFMT_MSK32_1 0x000ffaffU +#define DSFMT_MSK32_2 0xfffffb3fU +#define DSFMT_MSK32_3 0x000ffdffU +#define DSFMT_MSK32_4 0xfc90fffdU +#define DSFMT_FIX1 UINT64_C(0x90014964b32f4329) +#define DSFMT_FIX2 UINT64_C(0x3b8d12ac548a7c7a) +#define DSFMT_PCV1 UINT64_C(0x3d84e1ac0dc82880) +#define DSFMT_PCV2 UINT64_C(0x0000000000000001) +#define DSFMT_IDSTR ""dSFMT2-19937:117-19:ffafffffffb3f-ffdfffc90fffd"" + + +/* PARAMETERS FOR ALTIVEC */ +#if defined(__APPLE__) /* For OSX */ + #define ALTI_SL1 (vector unsigned char)(3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3) + #define ALTI_SL1_PERM \ + (vector unsigned char)(2,3,4,5,6,7,30,30,10,11,12,13,14,15,0,1) + #define ALTI_SL1_MSK \ + (vector unsigned int)(0xffffffffU,0xfff80000U,0xffffffffU,0xfff80000U) + #define ALTI_MSK (vector unsigned int)(DSFMT_MSK32_1, \ + DSFMT_MSK32_2, DSFMT_MSK32_3, DSFMT_MSK32_4) +#else /* For OTHER OSs(Linux?) */ + #define ALTI_SL1 {3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3} + #define ALTI_SL1_PERM \ + {2,3,4,5,6,7,30,30,10,11,12,13,14,15,0,1} + #define ALTI_SL1_MSK \ + {0xffffffffU,0xfff80000U,0xffffffffU,0xfff80000U} + #define ALTI_MSK \ + {DSFMT_MSK32_1, DSFMT_MSK32_2, DSFMT_MSK32_3, DSFMT_MSK32_4} +#endif + +#endif /* DSFMT_PARAMS19937_H */ +","Unknown" +"Genesis","genesis-release-r-ccs/genesis","src/lib/dSFMT/dSFMT-params11213.h",".h","1466","41","#ifndef DSFMT_PARAMS11213_H +#define DSFMT_PARAMS11213_H + +/* #define DSFMT_N 107 */ +/* #define DSFMT_MAXDEGREE 11256 */ +#define DSFMT_POS1 37 +#define DSFMT_SL1 19 +#define DSFMT_MSK1 UINT64_C(0x000ffffffdf7fffd) +#define DSFMT_MSK2 UINT64_C(0x000dfffffff6bfff) +#define DSFMT_MSK32_1 0x000fffffU +#define DSFMT_MSK32_2 0xfdf7fffdU +#define DSFMT_MSK32_3 0x000dffffU +#define DSFMT_MSK32_4 0xfff6bfffU +#define DSFMT_FIX1 UINT64_C(0xd0ef7b7c75b06793) +#define DSFMT_FIX2 UINT64_C(0x9c50ff4caae0a641) +#define DSFMT_PCV1 UINT64_C(0x8234c51207c80000) +#define DSFMT_PCV2 UINT64_C(0x0000000000000001) +#define DSFMT_IDSTR ""dSFMT2-11213:37-19:ffffffdf7fffd-dfffffff6bfff"" + + +/* PARAMETERS FOR ALTIVEC */ +#if defined(__APPLE__) /* For OSX */ + #define ALTI_SL1 (vector unsigned char)(3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3) + #define ALTI_SL1_PERM \ + (vector unsigned char)(2,3,4,5,6,7,30,30,10,11,12,13,14,15,0,1) + #define ALTI_SL1_MSK \ + (vector unsigned int)(0xffffffffU,0xfff80000U,0xffffffffU,0xfff80000U) + #define ALTI_MSK (vector unsigned int)(DSFMT_MSK32_1, \ + DSFMT_MSK32_2, DSFMT_MSK32_3, DSFMT_MSK32_4) +#else /* For OTHER OSs(Linux?) */ + #define ALTI_SL1 {3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3} + #define ALTI_SL1_PERM \ + {2,3,4,5,6,7,30,30,10,11,12,13,14,15,0,1} + #define ALTI_SL1_MSK \ + {0xffffffffU,0xfff80000U,0xffffffffU,0xfff80000U} + #define ALTI_MSK \ + {DSFMT_MSK32_1, DSFMT_MSK32_2, DSFMT_MSK32_3, DSFMT_MSK32_4} +#endif + +#endif /* DSFMT_PARAMS11213_H */ +","Unknown" +"Genesis","genesis-release-r-ccs/genesis","src/lib/dSFMT/dSFMT_if.c",".c","510","31"," +#ifdef HAVE_CONFIG_H +#include ""../../config.h"" +#endif + +#include ""./dSFMT.h"" + +void +get_size_of_dsfmt_t_(int* size) +{ + *size = sizeof(dsfmt_t); +} + +void +dsfmt_init_gen_rand_(void* dsfmt, int* seed) +{ + dsfmt_init_gen_rand((dsfmt_t*)dsfmt, (uint32_t)*seed); +} + +void +dsfmt_genrand_close1_open2_(void* dsfmt, double* value) +{ + *value = dsfmt_genrand_close1_open2((dsfmt_t*)dsfmt); +} + +void +dsfmt_genrand_close0_open1_(void* dsfmt, double* value) +{ + *value = (dsfmt_genrand_close1_open2((dsfmt_t*)dsfmt) - 1.0); +} +","C" +"Genesis","genesis-release-r-ccs/genesis","src/lib/dSFMT/dSFMT-params521.h",".h","1452","41","#ifndef DSFMT_PARAMS521_H +#define DSFMT_PARAMS521_H + +/* #define DSFMT_N 4 */ +/* #define DSFMT_MAXDEGREE 544 */ +#define DSFMT_POS1 3 +#define DSFMT_SL1 25 +#define DSFMT_MSK1 UINT64_C(0x000fbfefff77efff) +#define DSFMT_MSK2 UINT64_C(0x000ffeebfbdfbfdf) +#define DSFMT_MSK32_1 0x000fbfefU +#define DSFMT_MSK32_2 0xff77efffU +#define DSFMT_MSK32_3 0x000ffeebU +#define DSFMT_MSK32_4 0xfbdfbfdfU +#define DSFMT_FIX1 UINT64_C(0xcfb393d661638469) +#define DSFMT_FIX2 UINT64_C(0xc166867883ae2adb) +#define DSFMT_PCV1 UINT64_C(0xccaa588000000000) +#define DSFMT_PCV2 UINT64_C(0x0000000000000001) +#define DSFMT_IDSTR ""dSFMT2-521:3-25:fbfefff77efff-ffeebfbdfbfdf"" + + +/* PARAMETERS FOR ALTIVEC */ +#if defined(__APPLE__) /* For OSX */ + #define ALTI_SL1 (vector unsigned char)(1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1) + #define ALTI_SL1_PERM \ + (vector unsigned char)(3,4,5,6,7,29,29,29,11,12,13,14,15,0,1,2) + #define ALTI_SL1_MSK \ + (vector unsigned int)(0xffffffffU,0xfe000000U,0xffffffffU,0xfe000000U) + #define ALTI_MSK (vector unsigned int)(DSFMT_MSK32_1, \ + DSFMT_MSK32_2, DSFMT_MSK32_3, DSFMT_MSK32_4) +#else /* For OTHER OSs(Linux?) */ + #define ALTI_SL1 {1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1} + #define ALTI_SL1_PERM \ + {3,4,5,6,7,29,29,29,11,12,13,14,15,0,1,2} + #define ALTI_SL1_MSK \ + {0xffffffffU,0xfe000000U,0xffffffffU,0xfe000000U} + #define ALTI_MSK \ + {DSFMT_MSK32_1, DSFMT_MSK32_2, DSFMT_MSK32_3, DSFMT_MSK32_4} +#endif + +#endif /* DSFMT_PARAMS521_H */ +","Unknown" +"Genesis","genesis-release-r-ccs/genesis","src/lib/dSFMT/dSFMT-params1279.h",".h","1458","41","#ifndef DSFMT_PARAMS1279_H +#define DSFMT_PARAMS1279_H + +/* #define DSFMT_N 12 */ +/* #define DSFMT_MAXDEGREE 1376 */ +#define DSFMT_POS1 9 +#define DSFMT_SL1 19 +#define DSFMT_MSK1 UINT64_C(0x000efff7ffddffee) +#define DSFMT_MSK2 UINT64_C(0x000fbffffff77fff) +#define DSFMT_MSK32_1 0x000efff7U +#define DSFMT_MSK32_2 0xffddffeeU +#define DSFMT_MSK32_3 0x000fbfffU +#define DSFMT_MSK32_4 0xfff77fffU +#define DSFMT_FIX1 UINT64_C(0xb66627623d1a31be) +#define DSFMT_FIX2 UINT64_C(0x04b6c51147b6109b) +#define DSFMT_PCV1 UINT64_C(0x7049f2da382a6aeb) +#define DSFMT_PCV2 UINT64_C(0xde4ca84a40000001) +#define DSFMT_IDSTR ""dSFMT2-1279:9-19:efff7ffddffee-fbffffff77fff"" + + +/* PARAMETERS FOR ALTIVEC */ +#if defined(__APPLE__) /* For OSX */ + #define ALTI_SL1 (vector unsigned char)(3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3) + #define ALTI_SL1_PERM \ + (vector unsigned char)(2,3,4,5,6,7,30,30,10,11,12,13,14,15,0,1) + #define ALTI_SL1_MSK \ + (vector unsigned int)(0xffffffffU,0xfff80000U,0xffffffffU,0xfff80000U) + #define ALTI_MSK (vector unsigned int)(DSFMT_MSK32_1, \ + DSFMT_MSK32_2, DSFMT_MSK32_3, DSFMT_MSK32_4) +#else /* For OTHER OSs(Linux?) */ + #define ALTI_SL1 {3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3} + #define ALTI_SL1_PERM \ + {2,3,4,5,6,7,30,30,10,11,12,13,14,15,0,1} + #define ALTI_SL1_MSK \ + {0xffffffffU,0xfff80000U,0xffffffffU,0xfff80000U} + #define ALTI_MSK \ + {DSFMT_MSK32_1, DSFMT_MSK32_2, DSFMT_MSK32_3, DSFMT_MSK32_4} +#endif + +#endif /* DSFMT_PARAMS1279_H */ +","Unknown" +"Genesis","genesis-release-r-ccs/genesis","src/lib/dSFMT/dSFMT-params.h",".h","2437","88","#ifndef DSFMT_PARAMS_H +#define DSFMT_PARAMS_H + +#include ""dSFMT.h"" + +/*---------------------- + the parameters of DSFMT + following definitions are in dSFMT-paramsXXXX.h file. + ----------------------*/ +/** the pick up position of the array. +#define DSFMT_POS1 122 +*/ + +/** the parameter of shift left as four 32-bit registers. +#define DSFMT_SL1 18 + */ + +/** the parameter of shift right as four 32-bit registers. +#define DSFMT_SR1 12 +*/ + +/** A bitmask, used in the recursion. These parameters are introduced + * to break symmetry of SIMD. +#define DSFMT_MSK1 (uint64_t)0xdfffffefULL +#define DSFMT_MSK2 (uint64_t)0xddfecb7fULL +*/ + +/** These definitions are part of a 128-bit period certification vector. +#define DSFMT_PCV1 UINT64_C(0x00000001) +#define DSFMT_PCV2 UINT64_C(0x00000000) +*/ + +#define DSFMT_LOW_MASK UINT64_C(0x000FFFFFFFFFFFFF) +#define DSFMT_HIGH_CONST UINT64_C(0x3FF0000000000000) +#define DSFMT_SR 12 + +/* for sse2 */ +#if defined(HAVE_SSE2) + #define SSE2_SHUFF 0x1b +#elif defined(HAVE_ALTIVEC) + #if defined(__APPLE__) /* For OSX */ + #define ALTI_SR (vector unsigned char)(4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4) + #define ALTI_SR_PERM \ + (vector unsigned char)(15,0,1,2,3,4,5,6,15,8,9,10,11,12,13,14) + #define ALTI_SR_MSK \ + (vector unsigned int)(0x000fffffU,0xffffffffU,0x000fffffU,0xffffffffU) + #define ALTI_PERM \ + (vector unsigned char)(12,13,14,15,8,9,10,11,4,5,6,7,0,1,2,3) + #else + #define ALTI_SR {4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4} + #define ALTI_SR_PERM {15,0,1,2,3,4,5,6,15,8,9,10,11,12,13,14} + #define ALTI_SR_MSK {0x000fffffU,0xffffffffU,0x000fffffU,0xffffffffU} + #define ALTI_PERM {12,13,14,15,8,9,10,11,4,5,6,7,0,1,2,3} + #endif +#endif + +#if DSFMT_MEXP == 521 + #include ""dSFMT-params521.h"" +#elif DSFMT_MEXP == 1279 + #include ""dSFMT-params1279.h"" +#elif DSFMT_MEXP == 2203 + #include ""dSFMT-params2203.h"" +#elif DSFMT_MEXP == 4253 + #include ""dSFMT-params4253.h"" +#elif DSFMT_MEXP == 11213 + #include ""dSFMT-params11213.h"" +#elif DSFMT_MEXP == 19937 + #include ""dSFMT-params19937.h"" +#elif DSFMT_MEXP == 44497 + #include ""dSFMT-params44497.h"" +#elif DSFMT_MEXP == 86243 + #include ""dSFMT-params86243.h"" +#elif DSFMT_MEXP == 132049 + #include ""dSFMT-params132049.h"" +#elif DSFMT_MEXP == 216091 + #include ""dSFMT-params216091.h"" +#else +#ifdef __GNUC__ + #error ""DSFMT_MEXP is not valid."" + #undef DSFMT_MEXP +#else + #undef DSFMT_MEXP +#endif + +#endif + +#endif /* DSFMT_PARAMS_H */ +","Unknown" +"Genesis","genesis-release-r-ccs/genesis","src/lib/dSFMT/dSFMT-params2203.h",".h","1458","41","#ifndef DSFMT_PARAMS2203_H +#define DSFMT_PARAMS2203_H + +/* #define DSFMT_N 20 */ +/* #define DSFMT_MAXDEGREE 2208 */ +#define DSFMT_POS1 7 +#define DSFMT_SL1 19 +#define DSFMT_MSK1 UINT64_C(0x000fdffff5edbfff) +#define DSFMT_MSK2 UINT64_C(0x000f77fffffffbfe) +#define DSFMT_MSK32_1 0x000fdfffU +#define DSFMT_MSK32_2 0xf5edbfffU +#define DSFMT_MSK32_3 0x000f77ffU +#define DSFMT_MSK32_4 0xfffffbfeU +#define DSFMT_FIX1 UINT64_C(0xb14e907a39338485) +#define DSFMT_FIX2 UINT64_C(0xf98f0735c637ef90) +#define DSFMT_PCV1 UINT64_C(0x8000000000000000) +#define DSFMT_PCV2 UINT64_C(0x0000000000000001) +#define DSFMT_IDSTR ""dSFMT2-2203:7-19:fdffff5edbfff-f77fffffffbfe"" + + +/* PARAMETERS FOR ALTIVEC */ +#if defined(__APPLE__) /* For OSX */ + #define ALTI_SL1 (vector unsigned char)(3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3) + #define ALTI_SL1_PERM \ + (vector unsigned char)(2,3,4,5,6,7,30,30,10,11,12,13,14,15,0,1) + #define ALTI_SL1_MSK \ + (vector unsigned int)(0xffffffffU,0xfff80000U,0xffffffffU,0xfff80000U) + #define ALTI_MSK (vector unsigned int)(DSFMT_MSK32_1, \ + DSFMT_MSK32_2, DSFMT_MSK32_3, DSFMT_MSK32_4) +#else /* For OTHER OSs(Linux?) */ + #define ALTI_SL1 {3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3} + #define ALTI_SL1_PERM \ + {2,3,4,5,6,7,30,30,10,11,12,13,14,15,0,1} + #define ALTI_SL1_MSK \ + {0xffffffffU,0xfff80000U,0xffffffffU,0xfff80000U} + #define ALTI_MSK \ + {DSFMT_MSK32_1, DSFMT_MSK32_2, DSFMT_MSK32_3, DSFMT_MSK32_4} +#endif + +#endif /* DSFMT_PARAMS2203_H */ +","Unknown" +"Genesis","genesis-release-r-ccs/genesis","src/lib/dSFMT/dSFMT-params86243.h",".h","1466","41","#ifndef DSFMT_PARAMS86243_H +#define DSFMT_PARAMS86243_H + +/* #define DSFMT_N 829 */ +/* #define DSFMT_MAXDEGREE 86344 */ +#define DSFMT_POS1 231 +#define DSFMT_SL1 13 +#define DSFMT_MSK1 UINT64_C(0x000ffedff6ffffdf) +#define DSFMT_MSK2 UINT64_C(0x000ffff7fdffff7e) +#define DSFMT_MSK32_1 0x000ffedfU +#define DSFMT_MSK32_2 0xf6ffffdfU +#define DSFMT_MSK32_3 0x000ffff7U +#define DSFMT_MSK32_4 0xfdffff7eU +#define DSFMT_FIX1 UINT64_C(0x1d553e776b975e68) +#define DSFMT_FIX2 UINT64_C(0x648faadf1416bf91) +#define DSFMT_PCV1 UINT64_C(0x5f2cd03e2758a373) +#define DSFMT_PCV2 UINT64_C(0xc0b7eb8410000001) +#define DSFMT_IDSTR ""dSFMT2-86243:231-13:ffedff6ffffdf-ffff7fdffff7e"" + + +/* PARAMETERS FOR ALTIVEC */ +#if defined(__APPLE__) /* For OSX */ + #define ALTI_SL1 (vector unsigned char)(5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5) + #define ALTI_SL1_PERM \ + (vector unsigned char)(1,2,3,4,5,6,7,31,9,10,11,12,13,14,15,0) + #define ALTI_SL1_MSK \ + (vector unsigned int)(0xffffffffU,0xffffe000U,0xffffffffU,0xffffe000U) + #define ALTI_MSK (vector unsigned int)(DSFMT_MSK32_1, \ + DSFMT_MSK32_2, DSFMT_MSK32_3, DSFMT_MSK32_4) +#else /* For OTHER OSs(Linux?) */ + #define ALTI_SL1 {5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5} + #define ALTI_SL1_PERM \ + {1,2,3,4,5,6,7,31,9,10,11,12,13,14,15,0} + #define ALTI_SL1_MSK \ + {0xffffffffU,0xffffe000U,0xffffffffU,0xffffe000U} + #define ALTI_MSK \ + {DSFMT_MSK32_1, DSFMT_MSK32_2, DSFMT_MSK32_3, DSFMT_MSK32_4} +#endif + +#endif /* DSFMT_PARAMS86243_H */ +","Unknown" +"Genesis","genesis-release-r-ccs/genesis","src/lib/dSFMT/dSFMT-params132049.h",".h","1474","41","#ifndef DSFMT_PARAMS132049_H +#define DSFMT_PARAMS132049_H + +/* #define DSFMT_N 1269 */ +/* #define DSFMT_MAXDEGREE 132104 */ +#define DSFMT_POS1 371 +#define DSFMT_SL1 23 +#define DSFMT_MSK1 UINT64_C(0x000fb9f4eff4bf77) +#define DSFMT_MSK2 UINT64_C(0x000fffffbfefff37) +#define DSFMT_MSK32_1 0x000fb9f4U +#define DSFMT_MSK32_2 0xeff4bf77U +#define DSFMT_MSK32_3 0x000fffffU +#define DSFMT_MSK32_4 0xbfefff37U +#define DSFMT_FIX1 UINT64_C(0x4ce24c0e4e234f3b) +#define DSFMT_FIX2 UINT64_C(0x62612409b5665c2d) +#define DSFMT_PCV1 UINT64_C(0x181232889145d000) +#define DSFMT_PCV2 UINT64_C(0x0000000000000001) +#define DSFMT_IDSTR ""dSFMT2-132049:371-23:fb9f4eff4bf77-fffffbfefff37"" + + +/* PARAMETERS FOR ALTIVEC */ +#if defined(__APPLE__) /* For OSX */ + #define ALTI_SL1 (vector unsigned char)(7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7) + #define ALTI_SL1_PERM \ + (vector unsigned char)(2,3,4,5,6,7,30,30,10,11,12,13,14,15,0,1) + #define ALTI_SL1_MSK \ + (vector unsigned int)(0xffffffffU,0xff800000U,0xffffffffU,0xff800000U) + #define ALTI_MSK (vector unsigned int)(DSFMT_MSK32_1, \ + DSFMT_MSK32_2, DSFMT_MSK32_3, DSFMT_MSK32_4) +#else /* For OTHER OSs(Linux?) */ + #define ALTI_SL1 {7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7} + #define ALTI_SL1_PERM \ + {2,3,4,5,6,7,30,30,10,11,12,13,14,15,0,1} + #define ALTI_SL1_MSK \ + {0xffffffffU,0xff800000U,0xffffffffU,0xff800000U} + #define ALTI_MSK \ + {DSFMT_MSK32_1, DSFMT_MSK32_2, DSFMT_MSK32_3, DSFMT_MSK32_4} +#endif + +#endif /* DSFMT_PARAMS132049_H */ +","Unknown" +"Genesis","genesis-release-r-ccs/genesis","src/lib/Lbfgsb.3.0/lbfgsb.f",".f","131684","3990","c +c L-BFGS-B is released under the “New BSD License” (aka “Modified BSD License” +c or “3-clause license”) +c Please read attached file License.txt +c +c=========== L-BFGS-B (version 3.0. April 25, 2011 =================== +c +c This is a modified version of L-BFGS-B. Minor changes in the updated +c code appear preceded by a line comment as follows +c +c c-jlm-jn +c +c Major changes are described in the accompanying paper: +c +c Jorge Nocedal and Jose Luis Morales, Remark on ""Algorithm 778: +c L-BFGS-B: Fortran Subroutines for Large-Scale Bound Constrained +c Optimization"" (2011). To appear in ACM Transactions on +c Mathematical Software, +c +c The paper describes an improvement and a correction to Algorithm 778. +c It is shown that the performance of the algorithm can be improved +c significantly by making a relatively simple modication to the subspace +c minimization phase. The correction concerns an error caused by the use +c of routine dpmeps to estimate machine precision. +c +c The total work space **wa** required by the new version is +c +c 2*m*n + 11m*m + 5*n + 8*m +c +c the old version required +c +c 2*m*n + 12m*m + 4*n + 12*m +c +c +c J. Nocedal Department of Electrical Engineering and +c Computer Science. +c Northwestern University. Evanston, IL. USA +c +c +c J.L Morales Departamento de Matematicas, +c Instituto Tecnologico Autonomo de Mexico +c Mexico D.F. Mexico. +c +c March 2011 +c +c============================================================================= + subroutine setulb(n, m, x, l, u, nbd, f, g, factr, pgtol, wa, iwa, + + task, iprint, csave, lsave, isave, dsave, +!YA_Bgn + + rank) +!YA_End + + character*60 task, csave + logical lsave(4) + integer n, m, iprint, + + nbd(n), iwa(3*n), isave(44) +!YA_Bgn + integer rank +!YA_End + double precision f, factr, pgtol, x(n), l(n), u(n), g(n), +c +c-jlm-jn + + wa(2*m*n + 5*n + 11*m*m + 8*m), dsave(29) + +c ************ +c +c Subroutine setulb +c +c This subroutine partitions the working arrays wa and iwa, and +c then uses the limited memory BFGS method to solve the bound +c constrained optimization problem by calling mainlb. +c (The direct method will be used in the subspace minimization.) +c +c n is an integer variable. +c On entry n is the dimension of the problem. +c On exit n is unchanged. +c +c m is an integer variable. +c On entry m is the maximum number of variable metric corrections +c used to define the limited memory matrix. +c On exit m is unchanged. +c +c x is a double precision array of dimension n. +c On entry x is an approximation to the solution. +c On exit x is the current approximation. +c +c l is a double precision array of dimension n. +c On entry l is the lower bound on x. +c On exit l is unchanged. +c +c u is a double precision array of dimension n. +c On entry u is the upper bound on x. +c On exit u is unchanged. +c +c nbd is an integer array of dimension n. +c On entry nbd represents the type of bounds imposed on the +c variables, and must be specified as follows: +c nbd(i)=0 if x(i) is unbounded, +c 1 if x(i) has only a lower bound, +c 2 if x(i) has both lower and upper bounds, and +c 3 if x(i) has only an upper bound. +c On exit nbd is unchanged. +c +c f is a double precision variable. +c On first entry f is unspecified. +c On final exit f is the value of the function at x. +c +c g is a double precision array of dimension n. +c On first entry g is unspecified. +c On final exit g is the value of the gradient at x. +c +c factr is a double precision variable. +c On entry factr >= 0 is specified by the user. The iteration +c will stop when +c +c (f^k - f^{k+1})/max{|f^k|,|f^{k+1}|,1} <= factr*epsmch +c +c where epsmch is the machine precision, which is automatically +c generated by the code. Typical values for factr: 1.d+12 for +c low accuracy; 1.d+7 for moderate accuracy; 1.d+1 for extremely +c high accuracy. +c On exit factr is unchanged. +c +c pgtol is a double precision variable. +c On entry pgtol >= 0 is specified by the user. The iteration +c will stop when +c +c max{|proj g_i | i = 1, ..., n} <= pgtol +c +c where pg_i is the ith component of the projected gradient. +c On exit pgtol is unchanged. +c +c wa is a double precision working array of length +c (2mmax + 5)nmax + 12mmax^2 + 12mmax. +c +c iwa is an integer working array of length 3nmax. +c +c task is a working string of characters of length 60 indicating +c the current job when entering and quitting this subroutine. +c +c iprint is an integer variable that must be set by the user. +c It controls the frequency and type of output generated: +c iprint<0 no output is generated; +c iprint=0 print only one line at the last iteration; +c 0100 print details of every iteration including x and g; +c When iprint > 0, the file iterate.dat will be created to +c summarize the iteration. +c +c csave is a working string of characters of length 60. +c +c lsave is a logical working array of dimension 4. +c On exit with 'task' = NEW_X, the following information is +c available: +c If lsave(1) = .true. then the initial X has been replaced by +c its projection in the feasible set; +c If lsave(2) = .true. then the problem is constrained; +c If lsave(3) = .true. then each variable has upper and lower +c bounds; +c +c isave is an integer working array of dimension 44. +c On exit with 'task' = NEW_X, the following information is +c available: +c isave(22) = the total number of intervals explored in the +c search of Cauchy points; +c isave(26) = the total number of skipped BFGS updates before +c the current iteration; +c isave(30) = the number of current iteration; +c isave(31) = the total number of BFGS updates prior the current +c iteration; +c isave(33) = the number of intervals explored in the search of +c Cauchy point in the current iteration; +c isave(34) = the total number of function and gradient +c evaluations; +c isave(36) = the number of function value or gradient +c evaluations in the current iteration; +c if isave(37) = 0 then the subspace argmin is within the box; +c if isave(37) = 1 then the subspace argmin is beyond the box; +c isave(38) = the number of free variables in the current +c iteration; +c isave(39) = the number of active constraints in the current +c iteration; +c n + 1 - isave(40) = the number of variables leaving the set of +c active constraints in the current iteration; +c isave(41) = the number of variables entering the set of active +c constraints in the current iteration. +c +c dsave is a double precision working array of dimension 29. +c On exit with 'task' = NEW_X, the following information is +c available: +c dsave(1) = current 'theta' in the BFGS matrix; +c dsave(2) = f(x) in the previous iteration; +c dsave(3) = factr*epsmch; +c dsave(4) = 2-norm of the line search direction vector; +c dsave(5) = the machine precision epsmch generated by the code; +c dsave(7) = the accumulated time spent on searching for +c Cauchy points; +c dsave(8) = the accumulated time spent on +c subspace minimization; +c dsave(9) = the accumulated time spent on line search; +c dsave(11) = the slope of the line search function at +c the current point of line search; +c dsave(12) = the maximum relative step length imposed in +c line search; +c dsave(13) = the infinity norm of the projected gradient; +c dsave(14) = the relative step length in the line search; +c dsave(15) = the slope of the line search function at +c the starting point of the line search; +c dsave(16) = the square of the 2-norm of the line search +c direction vector. +c +c Subprograms called: +c +c L-BFGS-B Library ... mainlb. +c +c +c References: +c +c [1] R. H. Byrd, P. Lu, J. Nocedal and C. Zhu, ``A limited +c memory algorithm for bound constrained optimization'', +c SIAM J. Scientific Computing 16 (1995), no. 5, pp. 1190--1208. +c +c [2] C. Zhu, R.H. Byrd, P. Lu, J. Nocedal, ``L-BFGS-B: a +c limited memory FORTRAN code for solving bound constrained +c optimization problems'', Tech. Report, NAM-11, EECS Department, +c Northwestern University, 1994. +c +c (Postscript files of these papers are available via anonymous +c ftp to eecs.nwu.edu in the directory pub/lbfgs/lbfgs_bcm.) +c +c * * * +c +c NEOS, November 1994. (Latest revision June 1996.) +c Optimization Technology Center. +c Argonne National Laboratory and Northwestern University. +c Written by +c Ciyou Zhu +c in collaboration with R.H. Byrd, P. Lu-Chen and J. Nocedal. +c +c +c ************ +c-jlm-jn + integer lws,lr,lz,lt,ld,lxp,lwa, + + lwy,lsy,lss,lwt,lwn,lsnd + + if (task .eq. 'START') then + isave(1) = m*n + isave(2) = m**2 + isave(3) = 4*m**2 + isave(4) = 1 ! ws m*n + isave(5) = isave(4) + isave(1) ! wy m*n + isave(6) = isave(5) + isave(1) ! wsy m**2 + isave(7) = isave(6) + isave(2) ! wss m**2 + isave(8) = isave(7) + isave(2) ! wt m**2 + isave(9) = isave(8) + isave(2) ! wn 4*m**2 + isave(10) = isave(9) + isave(3) ! wsnd 4*m**2 + isave(11) = isave(10) + isave(3) ! wz n + isave(12) = isave(11) + n ! wr n + isave(13) = isave(12) + n ! wd n + isave(14) = isave(13) + n ! wt n + isave(15) = isave(14) + n ! wxp n + isave(16) = isave(15) + n ! wa 8*m + endif + lws = isave(4) + lwy = isave(5) + lsy = isave(6) + lss = isave(7) + lwt = isave(8) + lwn = isave(9) + lsnd = isave(10) + lz = isave(11) + lr = isave(12) + ld = isave(13) + lt = isave(14) + lxp = isave(15) + lwa = isave(16) + + call mainlb(n,m,x,l,u,nbd,f,g,factr,pgtol, + + wa(lws),wa(lwy),wa(lsy),wa(lss), wa(lwt), + + wa(lwn),wa(lsnd),wa(lz),wa(lr),wa(ld),wa(lt),wa(lxp), + + wa(lwa), + + iwa(1),iwa(n+1),iwa(2*n+1),task,iprint, + + csave,lsave,isave(22),dsave, +!YA_Bgn + + rank) +!YA_End + + return + + end + +c======================= The end of setulb ============================= + + subroutine mainlb(n, m, x, l, u, nbd, f, g, factr, pgtol, ws, wy, + + sy, ss, wt, wn, snd, z, r, d, t, xp, wa, + + index, iwhere, indx2, task, + + iprint, csave, lsave, isave, dsave, +!YA_Bgn + + rank) +!YA_End + implicit none + character*60 task, csave + logical lsave(4) + integer n, m, iprint, nbd(n), index(n), + + iwhere(n), indx2(n), isave(23) +!YA_Bgn + integer rank + character*12 a +!YA_End + double precision f, factr, pgtol, + + x(n), l(n), u(n), g(n), z(n), r(n), d(n), t(n), +c-jlm-jn + + xp(n), + + wa(8*m), + + ws(n, m), wy(n, m), sy(m, m), ss(m, m), + + wt(m, m), wn(2*m, 2*m), snd(2*m, 2*m), dsave(29) + +c ************ +c +c Subroutine mainlb +c +c This subroutine solves bound constrained optimization problems by +c using the compact formula of the limited memory BFGS updates. +c +c n is an integer variable. +c On entry n is the number of variables. +c On exit n is unchanged. +c +c m is an integer variable. +c On entry m is the maximum number of variable metric +c corrections allowed in the limited memory matrix. +c On exit m is unchanged. +c +c x is a double precision array of dimension n. +c On entry x is an approximation to the solution. +c On exit x is the current approximation. +c +c l is a double precision array of dimension n. +c On entry l is the lower bound of x. +c On exit l is unchanged. +c +c u is a double precision array of dimension n. +c On entry u is the upper bound of x. +c On exit u is unchanged. +c +c nbd is an integer array of dimension n. +c On entry nbd represents the type of bounds imposed on the +c variables, and must be specified as follows: +c nbd(i)=0 if x(i) is unbounded, +c 1 if x(i) has only a lower bound, +c 2 if x(i) has both lower and upper bounds, +c 3 if x(i) has only an upper bound. +c On exit nbd is unchanged. +c +c f is a double precision variable. +c On first entry f is unspecified. +c On final exit f is the value of the function at x. +c +c g is a double precision array of dimension n. +c On first entry g is unspecified. +c On final exit g is the value of the gradient at x. +c +c factr is a double precision variable. +c On entry factr >= 0 is specified by the user. The iteration +c will stop when +c +c (f^k - f^{k+1})/max{|f^k|,|f^{k+1}|,1} <= factr*epsmch +c +c where epsmch is the machine precision, which is automatically +c generated by the code. +c On exit factr is unchanged. +c +c pgtol is a double precision variable. +c On entry pgtol >= 0 is specified by the user. The iteration +c will stop when +c +c max{|proj g_i | i = 1, ..., n} <= pgtol +c +c where pg_i is the ith component of the projected gradient. +c On exit pgtol is unchanged. +c +c ws, wy, sy, and wt are double precision working arrays used to +c store the following information defining the limited memory +c BFGS matrix: +c ws, of dimension n x m, stores S, the matrix of s-vectors; +c wy, of dimension n x m, stores Y, the matrix of y-vectors; +c sy, of dimension m x m, stores S'Y; +c ss, of dimension m x m, stores S'S; +c yy, of dimension m x m, stores Y'Y; +c wt, of dimension m x m, stores the Cholesky factorization +c of (theta*S'S+LD^(-1)L'); see eq. +c (2.26) in [3]. +c +c wn is a double precision working array of dimension 2m x 2m +c used to store the LEL^T factorization of the indefinite matrix +c K = [-D -Y'ZZ'Y/theta L_a'-R_z' ] +c [L_a -R_z theta*S'AA'S ] +c +c where E = [-I 0] +c [ 0 I] +c +c snd is a double precision working array of dimension 2m x 2m +c used to store the lower triangular part of +c N = [Y' ZZ'Y L_a'+R_z'] +c [L_a +R_z S'AA'S ] +c +c z(n),r(n),d(n),t(n), xp(n),wa(8*m) are double precision working arrays. +c z is used at different times to store the Cauchy point and +c the Newton point. +c xp is used to safeguard the projected Newton direction +c +c sg(m),sgo(m),yg(m),ygo(m) are double precision working arrays. +c +c index is an integer working array of dimension n. +c In subroutine freev, index is used to store the free and fixed +c variables at the Generalized Cauchy Point (GCP). +c +c iwhere is an integer working array of dimension n used to record +c the status of the vector x for GCP computation. +c iwhere(i)=0 or -3 if x(i) is free and has bounds, +c 1 if x(i) is fixed at l(i), and l(i) .ne. u(i) +c 2 if x(i) is fixed at u(i), and u(i) .ne. l(i) +c 3 if x(i) is always fixed, i.e., u(i)=x(i)=l(i) +c -1 if x(i) is always free, i.e., no bounds on it. +c +c indx2 is an integer working array of dimension n. +c Within subroutine cauchy, indx2 corresponds to the array iorder. +c In subroutine freev, a list of variables entering and leaving +c the free set is stored in indx2, and it is passed on to +c subroutine formk with this information. +c +c task is a working string of characters of length 60 indicating +c the current job when entering and leaving this subroutine. +c +c iprint is an INTEGER variable that must be set by the user. +c It controls the frequency and type of output generated: +c iprint<0 no output is generated; +c iprint=0 print only one line at the last iteration; +c 0100 print details of every iteration including x and g; +c When iprint > 0, the file iterate.dat will be created to +c summarize the iteration. +c +c csave is a working string of characters of length 60. +c +c lsave is a logical working array of dimension 4. +c +c isave is an integer working array of dimension 23. +c +c dsave is a double precision working array of dimension 29. +c +c +c Subprograms called +c +c L-BFGS-B Library ... cauchy, subsm, lnsrlb, formk, +c +c errclb, prn1lb, prn2lb, prn3lb, active, projgr, +c +c freev, cmprlb, matupd, formt. +c +c Minpack2 Library ... timer +c +c Linpack Library ... dcopy, ddot. +c +c +c References: +c +c [1] R. H. Byrd, P. Lu, J. Nocedal and C. Zhu, ``A limited +c memory algorithm for bound constrained optimization'', +c SIAM J. Scientific Computing 16 (1995), no. 5, pp. 1190--1208. +c +c [2] C. Zhu, R.H. Byrd, P. Lu, J. Nocedal, ``L-BFGS-B: FORTRAN +c Subroutines for Large Scale Bound Constrained Optimization'' +c Tech. Report, NAM-11, EECS Department, Northwestern University, +c 1994. +c +c [3] R. Byrd, J. Nocedal and R. Schnabel ""Representations of +c Quasi-Newton Matrices and their use in Limited Memory Methods'', +c Mathematical Programming 63 (1994), no. 4, pp. 129-156. +c +c (Postscript files of these papers are available via anonymous +c ftp to eecs.nwu.edu in the directory pub/lbfgs/lbfgs_bcm.) +c +c * * * +c +c NEOS, November 1994. (Latest revision June 1996.) +c Optimization Technology Center. +c Argonne National Laboratory and Northwestern University. +c Written by +c Ciyou Zhu +c in collaboration with R.H. Byrd, P. Lu-Chen and J. Nocedal. +c +c +c ************ + + logical prjctd,cnstnd,boxed,updatd,wrk + character*3 word + integer i,k,nintol,itfile,iback,nskip, + + head,col,iter,itail,iupdat, + + nseg,nfgv,info,ifun, + + iword,nfree,nact,ileave,nenter + double precision theta,fold,ddot,dr,rr,tol, + + xstep,sbgnrm,ddum,dnorm,dtd,epsmch, + + cpu1,cpu2,cachyt,sbtime,lnscht,time1,time2, + + gd,gdold,stp,stpmx,time + double precision one,zero + parameter (one=1.0d0,zero=0.0d0) + + cpu2 = zero + time2 = zero + + if (task .eq. 'START') then + + epsmch = epsilon(one) + +C call timer(time1) + +c Initialize counters and scalars when task='START'. + +c for the limited memory BFGS matrices: + col = 0 + head = 1 + theta = one + iupdat = 0 + updatd = .false. + iback = 0 + itail = 0 + iword = 0 + nact = 0 + ileave = 0 + nenter = 0 + fold = zero + dnorm = zero + gd = zero + stpmx = zero + sbgnrm = zero + stp = zero + gdold = zero + dtd = zero + +c for operation counts: + iter = 0 + nfgv = 0 + nseg = 0 + nintol = 0 + nskip = 0 + nfree = n + ifun = 0 +c for stopping tolerance: + tol = factr*epsmch + +c for measuring running time: + cpu1 = zero + cachyt = zero + sbtime = zero + lnscht = zero + time1 = zero + +c 'word' records the status of subspace solutions. + word = '---' + +c 'info' records the termination information. + info = 0 + +c_YA itfile = 8 +c_YA_Bgn + itfile = 98 +c_YA_End + if (iprint .ge. 1) then +c open a summary file 'iterate.dat' +c_YA open (8, file = 'iterate.dat', status = 'unknown') +c_YA_Bgn +c open (98, file = 'iterate.dat', status = 'unknown') + write(*, '(""Rank: "",i0)') rank + write(a,*) rank + open (98, file = 'iterate.dat'//trim(adjustl(a)), + + status = 'unknown') +c_YA_End + endif + +c Check the input arguments for errors. + + call errclb(n,m,factr,l,u,nbd,task,info,k) + if (task(1:5) .eq. 'ERROR') then + call prn3lb(n,x,f,task,iprint,info,itfile, + + iter,nfgv,nintol,nskip,nact,sbgnrm, + + zero,nseg,word,iback,stp,xstep,k, + + cachyt,sbtime,lnscht) + return + endif + + call prn1lb(n,m,l,u,x,iprint,itfile,epsmch) + +c Initialize iwhere & project x onto the feasible set. + + call active(n,l,u,nbd,x,iwhere,iprint,prjctd,cnstnd,boxed) + +c The end of the initialization. + + else +c restore local variables. + + prjctd = lsave(1) + cnstnd = lsave(2) + boxed = lsave(3) + updatd = lsave(4) + + nintol = isave(1) + itfile = isave(3) + iback = isave(4) + nskip = isave(5) + head = isave(6) + col = isave(7) + itail = isave(8) + iter = isave(9) + iupdat = isave(10) + nseg = isave(12) + nfgv = isave(13) + info = isave(14) + ifun = isave(15) + iword = isave(16) + nfree = isave(17) + nact = isave(18) + ileave = isave(19) + nenter = isave(20) + + theta = dsave(1) + fold = dsave(2) + tol = dsave(3) + dnorm = dsave(4) + epsmch = dsave(5) + cpu1 = dsave(6) + cachyt = dsave(7) + sbtime = dsave(8) + lnscht = dsave(9) + time1 = dsave(10) + gd = dsave(11) + stpmx = dsave(12) + sbgnrm = dsave(13) + stp = dsave(14) + gdold = dsave(15) + dtd = dsave(16) + +c After returning from the driver go to the point where execution +c is to resume. + + if (task(1:5) .eq. 'FG_LN') goto 666 + if (task(1:5) .eq. 'NEW_X') goto 777 + if (task(1:5) .eq. 'FG_ST') goto 111 + if (task(1:4) .eq. 'STOP') then + if (task(7:9) .eq. 'CPU') then +c restore the previous iterate. + call dcopy(n,t,1,x,1) + call dcopy(n,r,1,g,1) + f = fold + endif + goto 999 + endif + endif + +c Compute f0 and g0. + + task = 'FG_START' +c return to the driver to calculate f and g; reenter at 111. + goto 1000 + 111 continue + nfgv = 1 + +c Compute the infinity norm of the (-) projected gradient. + + call projgr(n,l,u,nbd,x,g,sbgnrm) + + if (iprint .ge. 1) then + write (6,1002) iter,f,sbgnrm + write (itfile,1003) iter,nfgv,sbgnrm,f + endif + if (sbgnrm .le. pgtol) then +c terminate the algorithm. + task = 'CONVERGENCE: NORM_OF_PROJECTED_GRADIENT_<=_PGTOL' + goto 999 + endif + +c ----------------- the beginning of the loop -------------------------- + + 222 continue + if (iprint .ge. 99) write (6,1001) iter + 1 + iword = -1 +c + if (.not. cnstnd .and. col .gt. 0) then +c skip the search for GCP. + call dcopy(n,x,1,z,1) + wrk = updatd + nseg = 0 + goto 333 + endif + +cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc +c +c Compute the Generalized Cauchy Point (GCP). +c +cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc + +C call timer(cpu1) + call cauchy(n,x,l,u,nbd,g,indx2,iwhere,t,d,z, + + m,wy,ws,sy,wt,theta,col,head, + + wa(1),wa(2*m+1),wa(4*m+1),wa(6*m+1),nseg, + + iprint, sbgnrm, info, epsmch) + if (info .ne. 0) then +c singular triangular system detected; refresh the lbfgs memory. + if(iprint .ge. 1) write (6, 1005) + info = 0 + col = 0 + head = 1 + theta = one + iupdat = 0 + updatd = .false. +C call timer(cpu2) + cachyt = cachyt + cpu2 - cpu1 + goto 222 + endif +C call timer(cpu2) + cachyt = cachyt + cpu2 - cpu1 + nintol = nintol + nseg + +c Count the entering and leaving variables for iter > 0; +c find the index set of free and active variables at the GCP. + + call freev(n,nfree,index,nenter,ileave,indx2, + + iwhere,wrk,updatd,cnstnd,iprint,iter) + nact = n - nfree + + 333 continue + +c If there are no free variables or B=theta*I, then +c skip the subspace minimization. + + if (nfree .eq. 0 .or. col .eq. 0) goto 555 + +cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc +c +c Subspace minimization. +c +cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc + +C call timer(cpu1) + +c Form the LEL^T factorization of the indefinite +c matrix K = [-D -Y'ZZ'Y/theta L_a'-R_z' ] +c [L_a -R_z theta*S'AA'S ] +c where E = [-I 0] +c [ 0 I] + + if (wrk) call formk(n,nfree,index,nenter,ileave,indx2,iupdat, + + updatd,wn,snd,m,ws,wy,sy,theta,col,head,info) + if (info .ne. 0) then +c nonpositive definiteness in Cholesky factorization; +c refresh the lbfgs memory and restart the iteration. + if(iprint .ge. 1) write (6, 1006) + info = 0 + col = 0 + head = 1 + theta = one + iupdat = 0 + updatd = .false. +C call timer(cpu2) + sbtime = sbtime + cpu2 - cpu1 + goto 222 + endif + +c compute r=-Z'B(xcp-xk)-Z'g (using wa(2m+1)=W'(xcp-x) +c from 'cauchy'). + call cmprlb(n,m,x,g,ws,wy,sy,wt,z,r,wa,index, + + theta,col,head,nfree,cnstnd,info) + if (info .ne. 0) goto 444 + +c-jlm-jn call the direct method. + + call subsm( n, m, nfree, index, l, u, nbd, z, r, xp, ws, wy, + + theta, x, g, col, head, iword, wa, wn, iprint, info) + 444 continue + if (info .ne. 0) then +c singular triangular system detected; +c refresh the lbfgs memory and restart the iteration. + if(iprint .ge. 1) write (6, 1005) + info = 0 + col = 0 + head = 1 + theta = one + iupdat = 0 + updatd = .false. +C call timer(cpu2) + sbtime = sbtime + cpu2 - cpu1 + goto 222 + endif + +C call timer(cpu2) + sbtime = sbtime + cpu2 - cpu1 + 555 continue + +cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc +c +c Line search and optimality tests. +c +cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc + +c Generate the search direction d:=z-x. + + do 40 i = 1, n + d(i) = z(i) - x(i) + 40 continue +C call timer(cpu1) + 666 continue + call lnsrlb(n,l,u,nbd,x,f,fold,gd,gdold,g,d,r,t,z,stp,dnorm, + + dtd,xstep,stpmx,iter,ifun,iback,nfgv,info,task, + + boxed,cnstnd,csave,isave(22),dsave(17)) + if (info .ne. 0 .or. iback .ge. 20) then +c restore the previous iterate. + call dcopy(n,t,1,x,1) + call dcopy(n,r,1,g,1) + f = fold + if (col .eq. 0) then +c abnormal termination. + if (info .eq. 0) then + info = -9 +c restore the actual number of f and g evaluations etc. + nfgv = nfgv - 1 + ifun = ifun - 1 + iback = iback - 1 + endif + task = 'ABNORMAL_TERMINATION_IN_LNSRCH' + iter = iter + 1 + goto 999 + else +c refresh the lbfgs memory and restart the iteration. + if(iprint .ge. 1) write (6, 1008) + if (info .eq. 0) nfgv = nfgv - 1 + info = 0 + col = 0 + head = 1 + theta = one + iupdat = 0 + updatd = .false. + task = 'RESTART_FROM_LNSRCH' +C call timer(cpu2) + lnscht = lnscht + cpu2 - cpu1 + goto 222 + endif + else if (task(1:5) .eq. 'FG_LN') then +c return to the driver for calculating f and g; reenter at 666. + goto 1000 + else +c calculate and print out the quantities related to the new X. +C call timer(cpu2) + lnscht = lnscht + cpu2 - cpu1 + iter = iter + 1 + +c Compute the infinity norm of the projected (-)gradient. + + call projgr(n,l,u,nbd,x,g,sbgnrm) + +c Print iteration information. + + call prn2lb(n,x,f,g,iprint,itfile,iter,nfgv,nact, + + sbgnrm,nseg,word,iword,iback,stp,xstep) + goto 1000 + endif + 777 continue + +c Test for termination. + + if (sbgnrm .le. pgtol) then +c terminate the algorithm. + task = 'CONVERGENCE: NORM_OF_PROJECTED_GRADIENT_<=_PGTOL' + goto 999 + endif + + ddum = max(abs(fold), abs(f), one) +c@ky if ((fold - f) .le. tol*ddum) then +c@ky change from (fold - f) to |fold - f| + if (abs(fold - f) .le. tol*ddum) then +c terminate the algorithm. + task = 'CONVERGENCE: REL_REDUCTION_OF_F_<=_FACTR*EPSMCH' + if (iback .ge. 10) info = -5 +c i.e., to issue a warning if iback>10 in the line search. + goto 999 + endif + +c Compute d=newx-oldx, r=newg-oldg, rr=y'y and dr=y's. + + do 42 i = 1, n + r(i) = g(i) - r(i) + 42 continue + rr = ddot(n,r,1,r,1) + if (stp .eq. one) then + dr = gd - gdold + ddum = -gdold + else + dr = (gd - gdold)*stp + call dscal(n,stp,d,1) + ddum = -gdold*stp + endif + + if (dr .le. epsmch*ddum) then +c skip the L-BFGS update. + nskip = nskip + 1 + updatd = .false. + if (iprint .ge. 1) write (6,1004) dr, ddum + goto 888 + endif + +cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc +c +c Update the L-BFGS matrix. +c +cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc + + updatd = .true. + iupdat = iupdat + 1 + +c Update matrices WS and WY and form the middle matrix in B. + + call matupd(n,m,ws,wy,sy,ss,d,r,itail, + + iupdat,col,head,theta,rr,dr,stp,dtd) + +c Form the upper half of the pds T = theta*SS + L*D^(-1)*L'; +c Store T in the upper triangular of the array wt; +c Cholesky factorize T to J*J' with +c J' stored in the upper triangular of wt. + + call formt(m,wt,sy,ss,col,theta,info) + + if (info .ne. 0) then +c nonpositive definiteness in Cholesky factorization; +c refresh the lbfgs memory and restart the iteration. + if(iprint .ge. 1) write (6, 1007) + info = 0 + col = 0 + head = 1 + theta = one + iupdat = 0 + updatd = .false. + goto 222 + endif + +c Now the inverse of the middle matrix in B is + +c [ D^(1/2) O ] [ -D^(1/2) D^(-1/2)*L' ] +c [ -L*D^(-1/2) J ] [ 0 J' ] + + 888 continue + +c -------------------- the end of the loop ----------------------------- + + goto 222 + 999 continue +C call timer(time2) + time = time2 - time1 + call prn3lb(n,x,f,task,iprint,info,itfile, + + iter,nfgv,nintol,nskip,nact,sbgnrm, + + time,nseg,word,iback,stp,xstep,k, + + cachyt,sbtime,lnscht) + 1000 continue + +c Save local variables. + + lsave(1) = prjctd + lsave(2) = cnstnd + lsave(3) = boxed + lsave(4) = updatd + + isave(1) = nintol + isave(3) = itfile + isave(4) = iback + isave(5) = nskip + isave(6) = head + isave(7) = col + isave(8) = itail + isave(9) = iter + isave(10) = iupdat + isave(12) = nseg + isave(13) = nfgv + isave(14) = info + isave(15) = ifun + isave(16) = iword + isave(17) = nfree + isave(18) = nact + isave(19) = ileave + isave(20) = nenter + + dsave(1) = theta + dsave(2) = fold + dsave(3) = tol + dsave(4) = dnorm + dsave(5) = epsmch + dsave(6) = cpu1 + dsave(7) = cachyt + dsave(8) = sbtime + dsave(9) = lnscht + dsave(10) = time1 + dsave(11) = gd + dsave(12) = stpmx + dsave(13) = sbgnrm + dsave(14) = stp + dsave(15) = gdold + dsave(16) = dtd + + 1001 format (//,'ITERATION ',i5) + 1002 format + + (/,'At iterate',i5,4x,'f= ',1p,d12.5,4x,'|proj g|= ',1p,d12.5) + 1003 format (2(1x,i4),5x,'-',5x,'-',3x,'-',5x,'-',5x,'-',8x,'-',3x, + + 1p,2(1x,d10.3)) + 1004 format (' ys=',1p,e10.3,' -gs=',1p,e10.3,' BFGS update SKIPPED') + 1005 format (/, + +' Singular triangular system detected;',/, + +' refresh the lbfgs memory and restart the iteration.') + 1006 format (/, + +' Nonpositive definiteness in Cholesky factorization in formk;',/, + +' refresh the lbfgs memory and restart the iteration.') + 1007 format (/, + +' Nonpositive definiteness in Cholesky factorization in formt;',/, + +' refresh the lbfgs memory and restart the iteration.') + 1008 format (/, + +' Bad direction in the line search;',/, + +' refresh the lbfgs memory and restart the iteration.') + + return + + end + +c======================= The end of mainlb ============================= + + subroutine active(n, l, u, nbd, x, iwhere, iprint, + + prjctd, cnstnd, boxed) + + logical prjctd, cnstnd, boxed + integer n, iprint, nbd(n), iwhere(n) + double precision x(n), l(n), u(n) + +c ************ +c +c Subroutine active +c +c This subroutine initializes iwhere and projects the initial x to +c the feasible set if necessary. +c +c iwhere is an integer array of dimension n. +c On entry iwhere is unspecified. +c On exit iwhere(i)=-1 if x(i) has no bounds +c 3 if l(i)=u(i) +c 0 otherwise. +c In cauchy, iwhere is given finer gradations. +c +c +c * * * +c +c NEOS, November 1994. (Latest revision June 1996.) +c Optimization Technology Center. +c Argonne National Laboratory and Northwestern University. +c Written by +c Ciyou Zhu +c in collaboration with R.H. Byrd, P. Lu-Chen and J. Nocedal. +c +c +c ************ + + integer nbdd,i + double precision zero + parameter (zero=0.0d0) + +c Initialize nbdd, prjctd, cnstnd and boxed. + + nbdd = 0 + prjctd = .false. + cnstnd = .false. + boxed = .true. + +c Project the initial x to the easible set if necessary. + + do 10 i = 1, n + if (nbd(i) .gt. 0) then + if (nbd(i) .le. 2 .and. x(i) .le. l(i)) then + if (x(i) .lt. l(i)) then + prjctd = .true. + x(i) = l(i) + endif + nbdd = nbdd + 1 + else if (nbd(i) .ge. 2 .and. x(i) .ge. u(i)) then + if (x(i) .gt. u(i)) then + prjctd = .true. + x(i) = u(i) + endif + nbdd = nbdd + 1 + endif + endif + 10 continue + +c Initialize iwhere and assign values to cnstnd and boxed. + + do 20 i = 1, n + if (nbd(i) .ne. 2) boxed = .false. + if (nbd(i) .eq. 0) then +c this variable is always free + iwhere(i) = -1 + +c otherwise set x(i)=mid(x(i), u(i), l(i)). + else + cnstnd = .true. + if (nbd(i) .eq. 2 .and. u(i) - l(i) .le. zero) then +c this variable is always fixed + iwhere(i) = 3 + else + iwhere(i) = 0 + endif + endif + 20 continue + + if (iprint .ge. 0) then + if (prjctd) write (6,*) + + 'The initial X is infeasible. Restart with its projection.' + if (.not. cnstnd) + + write (6,*) 'This problem is unconstrained.' + endif + + if (iprint .gt. 0) write (6,1001) nbdd + + 1001 format (/,'At X0 ',i9,' variables are exactly at the bounds') + + return + + end + +c======================= The end of active ============================= + + subroutine bmv(m, sy, wt, col, v, p, info) + + integer m, col, info + double precision sy(m, m), wt(m, m), v(2*col), p(2*col) + +c ************ +c +c Subroutine bmv +c +c This subroutine computes the product of the 2m x 2m middle matrix +c in the compact L-BFGS formula of B and a 2m vector v; +c it returns the product in p. +c +c m is an integer variable. +c On entry m is the maximum number of variable metric corrections +c used to define the limited memory matrix. +c On exit m is unchanged. +c +c sy is a double precision array of dimension m x m. +c On entry sy specifies the matrix S'Y. +c On exit sy is unchanged. +c +c wt is a double precision array of dimension m x m. +c On entry wt specifies the upper triangular matrix J' which is +c the Cholesky factor of (thetaS'S+LD^(-1)L'). +c On exit wt is unchanged. +c +c col is an integer variable. +c On entry col specifies the number of s-vectors (or y-vectors) +c stored in the compact L-BFGS formula. +c On exit col is unchanged. +c +c v is a double precision array of dimension 2col. +c On entry v specifies vector v. +c On exit v is unchanged. +c +c p is a double precision array of dimension 2col. +c On entry p is unspecified. +c On exit p is the product Mv. +c +c info is an integer variable. +c On entry info is unspecified. +c On exit info = 0 for normal return, +c = nonzero for abnormal return when the system +c to be solved by dtrsl is singular. +c +c Subprograms called: +c +c Linpack ... dtrsl. +c +c +c * * * +c +c NEOS, November 1994. (Latest revision June 1996.) +c Optimization Technology Center. +c Argonne National Laboratory and Northwestern University. +c Written by +c Ciyou Zhu +c in collaboration with R.H. Byrd, P. Lu-Chen and J. Nocedal. +c +c +c ************ + + integer i,k,i2 + double precision sum + + if (col .eq. 0) return + +c PART I: solve [ D^(1/2) O ] [ p1 ] = [ v1 ] +c [ -L*D^(-1/2) J ] [ p2 ] [ v2 ]. + +c solve Jp2=v2+LD^(-1)v1. + p(col + 1) = v(col + 1) + do 20 i = 2, col + i2 = col + i + sum = 0.0d0 + do 10 k = 1, i - 1 + sum = sum + sy(i,k)*v(k)/sy(k,k) + 10 continue + p(i2) = v(i2) + sum + 20 continue +c Solve the triangular system +c call dtrsl(wt,m,col,p(col+1),11,info) + call dtrtrs('U', 'T', 'N', col, 1, wt, m, p(col+1), col, info) + if (info .ne. 0) return + +c solve D^(1/2)p1=v1. + do 30 i = 1, col + p(i) = v(i)/sqrt(sy(i,i)) + 30 continue + +c PART II: solve [ -D^(1/2) D^(-1/2)*L' ] [ p1 ] = [ p1 ] +c [ 0 J' ] [ p2 ] [ p2 ]. + +c solve J^Tp2=p2. +c call dtrsl(wt,m,col,p(col+1),01,info) + call dtrtrs('U', 'N', 'N', col, 1, wt, m, p(col+1), col, info) + if (info .ne. 0) return + +c compute p1=-D^(-1/2)(p1-D^(-1/2)L'p2) +c =-D^(-1/2)p1+D^(-1)L'p2. + do 40 i = 1, col + p(i) = -p(i)/sqrt(sy(i,i)) + 40 continue + do 60 i = 1, col + sum = 0.d0 + do 50 k = i + 1, col + sum = sum + sy(k,i)*p(col+k)/sy(i,i) + 50 continue + p(i) = p(i) + sum + 60 continue + + return + + end + +c======================== The end of bmv =============================== + + subroutine cauchy(n, x, l, u, nbd, g, iorder, iwhere, t, d, xcp, + + m, wy, ws, sy, wt, theta, col, head, p, c, wbp, + + v, nseg, iprint, sbgnrm, info, epsmch) + implicit none + integer n, m, head, col, nseg, iprint, info, + + nbd(n), iorder(n), iwhere(n) + double precision theta, epsmch, + + x(n), l(n), u(n), g(n), t(n), d(n), xcp(n), + + wy(n, col), ws(n, col), sy(m, m), + + wt(m, m), p(2*m), c(2*m), wbp(2*m), v(2*m) + +c ************ +c +c Subroutine cauchy +c +c For given x, l, u, g (with sbgnrm > 0), and a limited memory +c BFGS matrix B defined in terms of matrices WY, WS, WT, and +c scalars head, col, and theta, this subroutine computes the +c generalized Cauchy point (GCP), defined as the first local +c minimizer of the quadratic +c +c Q(x + s) = g's + 1/2 s'Bs +c +c along the projected gradient direction P(x-tg,l,u). +c The routine returns the GCP in xcp. +c +c n is an integer variable. +c On entry n is the dimension of the problem. +c On exit n is unchanged. +c +c x is a double precision array of dimension n. +c On entry x is the starting point for the GCP computation. +c On exit x is unchanged. +c +c l is a double precision array of dimension n. +c On entry l is the lower bound of x. +c On exit l is unchanged. +c +c u is a double precision array of dimension n. +c On entry u is the upper bound of x. +c On exit u is unchanged. +c +c nbd is an integer array of dimension n. +c On entry nbd represents the type of bounds imposed on the +c variables, and must be specified as follows: +c nbd(i)=0 if x(i) is unbounded, +c 1 if x(i) has only a lower bound, +c 2 if x(i) has both lower and upper bounds, and +c 3 if x(i) has only an upper bound. +c On exit nbd is unchanged. +c +c g is a double precision array of dimension n. +c On entry g is the gradient of f(x). g must be a nonzero vector. +c On exit g is unchanged. +c +c iorder is an integer working array of dimension n. +c iorder will be used to store the breakpoints in the piecewise +c linear path and free variables encountered. On exit, +c iorder(1),...,iorder(nleft) are indices of breakpoints +c which have not been encountered; +c iorder(nleft+1),...,iorder(nbreak) are indices of +c encountered breakpoints; and +c iorder(nfree),...,iorder(n) are indices of variables which +c have no bound constraits along the search direction. +c +c iwhere is an integer array of dimension n. +c On entry iwhere indicates only the permanently fixed (iwhere=3) +c or free (iwhere= -1) components of x. +c On exit iwhere records the status of the current x variables. +c iwhere(i)=-3 if x(i) is free and has bounds, but is not moved +c 0 if x(i) is free and has bounds, and is moved +c 1 if x(i) is fixed at l(i), and l(i) .ne. u(i) +c 2 if x(i) is fixed at u(i), and u(i) .ne. l(i) +c 3 if x(i) is always fixed, i.e., u(i)=x(i)=l(i) +c -1 if x(i) is always free, i.e., it has no bounds. +c +c t is a double precision working array of dimension n. +c t will be used to store the break points. +c +c d is a double precision array of dimension n used to store +c the Cauchy direction P(x-tg)-x. +c +c xcp is a double precision array of dimension n used to return the +c GCP on exit. +c +c m is an integer variable. +c On entry m is the maximum number of variable metric corrections +c used to define the limited memory matrix. +c On exit m is unchanged. +c +c ws, wy, sy, and wt are double precision arrays. +c On entry they store information that defines the +c limited memory BFGS matrix: +c ws(n,m) stores S, a set of s-vectors; +c wy(n,m) stores Y, a set of y-vectors; +c sy(m,m) stores S'Y; +c wt(m,m) stores the +c Cholesky factorization of (theta*S'S+LD^(-1)L'). +c On exit these arrays are unchanged. +c +c theta is a double precision variable. +c On entry theta is the scaling factor specifying B_0 = theta I. +c On exit theta is unchanged. +c +c col is an integer variable. +c On entry col is the actual number of variable metric +c corrections stored so far. +c On exit col is unchanged. +c +c head is an integer variable. +c On entry head is the location of the first s-vector (or y-vector) +c in S (or Y). +c On exit col is unchanged. +c +c p is a double precision working array of dimension 2m. +c p will be used to store the vector p = W^(T)d. +c +c c is a double precision working array of dimension 2m. +c c will be used to store the vector c = W^(T)(xcp-x). +c +c wbp is a double precision working array of dimension 2m. +c wbp will be used to store the row of W corresponding +c to a breakpoint. +c +c v is a double precision working array of dimension 2m. +c +c nseg is an integer variable. +c On exit nseg records the number of quadratic segments explored +c in searching for the GCP. +c +c sg and yg are double precision arrays of dimension m. +c On entry sg and yg store S'g and Y'g correspondingly. +c On exit they are unchanged. +c +c iprint is an INTEGER variable that must be set by the user. +c It controls the frequency and type of output generated: +c iprint<0 no output is generated; +c iprint=0 print only one line at the last iteration; +c 0100 print details of every iteration including x and g; +c When iprint > 0, the file iterate.dat will be created to +c summarize the iteration. +c +c sbgnrm is a double precision variable. +c On entry sbgnrm is the norm of the projected gradient at x. +c On exit sbgnrm is unchanged. +c +c info is an integer variable. +c On entry info is 0. +c On exit info = 0 for normal return, +c = nonzero for abnormal return when the the system +c used in routine bmv is singular. +c +c Subprograms called: +c +c L-BFGS-B Library ... hpsolb, bmv. +c +c Linpack ... dscal dcopy, daxpy. +c +c +c References: +c +c [1] R. H. Byrd, P. Lu, J. Nocedal and C. Zhu, ``A limited +c memory algorithm for bound constrained optimization'', +c SIAM J. Scientific Computing 16 (1995), no. 5, pp. 1190--1208. +c +c [2] C. Zhu, R.H. Byrd, P. Lu, J. Nocedal, ``L-BFGS-B: FORTRAN +c Subroutines for Large Scale Bound Constrained Optimization'' +c Tech. Report, NAM-11, EECS Department, Northwestern University, +c 1994. +c +c (Postscript files of these papers are available via anonymous +c ftp to eecs.nwu.edu in the directory pub/lbfgs/lbfgs_bcm.) +c +c * * * +c +c NEOS, November 1994. (Latest revision June 1996.) +c Optimization Technology Center. +c Argonne National Laboratory and Northwestern University. +c Written by +c Ciyou Zhu +c in collaboration with R.H. Byrd, P. Lu-Chen and J. Nocedal. +c +c +c ************ + + logical xlower,xupper,bnded + integer i,j,col2,nfree,nbreak,pointr, + + ibp,nleft,ibkmin,iter + double precision f1,f2,dt,dtm,tsum,dibp,zibp,dibp2,bkmin, + + tu,tl,wmc,wmp,wmw,ddot,tj,tj0,neggi,sbgnrm, + + f2_org + double precision one,zero + parameter (one=1.0d0,zero=0.0d0) + +c Check the status of the variables, reset iwhere(i) if necessary; +c compute the Cauchy direction d and the breakpoints t; initialize +c the derivative f1 and the vector p = W'd (for theta = 1). + + if (sbgnrm .le. zero) then + if (iprint .ge. 0) write (6,*) 'Subgnorm = 0. GCP = X.' + call dcopy(n,x,1,xcp,1) + return + endif + bnded = .true. + nfree = n + 1 + nbreak = 0 + ibkmin = 0 + bkmin = zero + col2 = 2*col + f1 = zero + if (iprint .ge. 99) write (6,3010) + +c We set p to zero and build it up as we determine d. + + do 20 i = 1, col2 + p(i) = zero + 20 continue + +c In the following loop we determine for each variable its bound +c status and its breakpoint, and update p accordingly. +c Smallest breakpoint is identified. + + do 50 i = 1, n + neggi = -g(i) + if (iwhere(i) .ne. 3 .and. iwhere(i) .ne. -1) then +c if x(i) is not a constant and has bounds, +c compute the difference between x(i) and its bounds. + if (nbd(i) .le. 2) tl = x(i) - l(i) + if (nbd(i) .ge. 2) tu = u(i) - x(i) + +c If a variable is close enough to a bound +c we treat it as at bound. + xlower = nbd(i) .le. 2 .and. tl .le. zero + xupper = nbd(i) .ge. 2 .and. tu .le. zero + +c reset iwhere(i). + iwhere(i) = 0 + if (xlower) then + if (neggi .le. zero) iwhere(i) = 1 + else if (xupper) then + if (neggi .ge. zero) iwhere(i) = 2 + else + if (abs(neggi) .le. zero) iwhere(i) = -3 + endif + endif + pointr = head + if (iwhere(i) .ne. 0 .and. iwhere(i) .ne. -1) then + d(i) = zero + else + d(i) = neggi + f1 = f1 - neggi*neggi +c calculate p := p - W'e_i* (g_i). + do 40 j = 1, col + p(j) = p(j) + wy(i,pointr)* neggi + p(col + j) = p(col + j) + ws(i,pointr)*neggi + pointr = mod(pointr,m) + 1 + 40 continue + if (nbd(i) .le. 2 .and. nbd(i) .ne. 0 + + .and. neggi .lt. zero) then +c x(i) + d(i) is bounded; compute t(i). + nbreak = nbreak + 1 + iorder(nbreak) = i + t(nbreak) = tl/(-neggi) + if (nbreak .eq. 1 .or. t(nbreak) .lt. bkmin) then + bkmin = t(nbreak) + ibkmin = nbreak + endif + else if (nbd(i) .ge. 2 .and. neggi .gt. zero) then +c x(i) + d(i) is bounded; compute t(i). + nbreak = nbreak + 1 + iorder(nbreak) = i + t(nbreak) = tu/neggi + if (nbreak .eq. 1 .or. t(nbreak) .lt. bkmin) then + bkmin = t(nbreak) + ibkmin = nbreak + endif + else +c x(i) + d(i) is not bounded. + nfree = nfree - 1 + iorder(nfree) = i + if (abs(neggi) .gt. zero) bnded = .false. + endif + endif + 50 continue + +c The indices of the nonzero components of d are now stored +c in iorder(1),...,iorder(nbreak) and iorder(nfree),...,iorder(n). +c The smallest of the nbreak breakpoints is in t(ibkmin)=bkmin. + + if (theta .ne. one) then +c complete the initialization of p for theta not= one. + call dscal(col,theta,p(col+1),1) + endif + +c Initialize GCP xcp = x. + + call dcopy(n,x,1,xcp,1) + + if (nbreak .eq. 0 .and. nfree .eq. n + 1) then +c is a zero vector, return with the initial xcp as GCP. + if (iprint .gt. 100) write (6,1010) (xcp(i), i = 1, n) + return + endif + +c Initialize c = W'(xcp - x) = 0. + + do 60 j = 1, col2 + c(j) = zero + 60 continue + +c Initialize derivative f2. + + f2 = -theta*f1 + f2_org = f2 + if (col .gt. 0) then + call bmv(m,sy,wt,col,p,v,info) + if (info .ne. 0) return + f2 = f2 - ddot(col2,v,1,p,1) + endif + dtm = -f1/f2 + tsum = zero + nseg = 1 + if (iprint .ge. 99) + + write (6,*) 'There are ',nbreak,' breakpoints ' + +c If there are no breakpoints, locate the GCP and return. + + if (nbreak .eq. 0) goto 888 + + nleft = nbreak + iter = 1 + + + tj = zero + +c------------------- the beginning of the loop ------------------------- + + 777 continue + +c Find the next smallest breakpoint; +c compute dt = t(nleft) - t(nleft + 1). + + tj0 = tj + if (iter .eq. 1) then +c Since we already have the smallest breakpoint we need not do +c heapsort yet. Often only one breakpoint is used and the +c cost of heapsort is avoided. + tj = bkmin + ibp = iorder(ibkmin) + else + if (iter .eq. 2) then +c Replace the already used smallest breakpoint with the +c breakpoint numbered nbreak > nlast, before heapsort call. + if (ibkmin .ne. nbreak) then + t(ibkmin) = t(nbreak) + iorder(ibkmin) = iorder(nbreak) + endif +c Update heap structure of breakpoints +c (if iter=2, initialize heap). + endif + call hpsolb(nleft,t,iorder,iter-2) + tj = t(nleft) + ibp = iorder(nleft) + endif + + dt = tj - tj0 + + if (dt .ne. zero .and. iprint .ge. 100) then + write (6,4011) nseg,f1,f2 + write (6,5010) dt + write (6,6010) dtm + endif + +c If a minimizer is within this interval, locate the GCP and return. + + if (dtm .lt. dt) goto 888 + +c Otherwise fix one variable and +c reset the corresponding component of d to zero. + + tsum = tsum + dt + nleft = nleft - 1 + iter = iter + 1 + dibp = d(ibp) + d(ibp) = zero + if (dibp .gt. zero) then + zibp = u(ibp) - x(ibp) + xcp(ibp) = u(ibp) + iwhere(ibp) = 2 + else + zibp = l(ibp) - x(ibp) + xcp(ibp) = l(ibp) + iwhere(ibp) = 1 + endif + if (iprint .ge. 100) write (6,*) 'Variable ',ibp,' is fixed.' + if (nleft .eq. 0 .and. nbreak .eq. n) then +c all n variables are fixed, +c return with xcp as GCP. + dtm = dt + goto 999 + endif + +c Update the derivative information. + + nseg = nseg + 1 + dibp2 = dibp**2 + +c Update f1 and f2. + +c temporarily set f1 and f2 for col=0. + f1 = f1 + dt*f2 + dibp2 - theta*dibp*zibp + f2 = f2 - theta*dibp2 + + if (col .gt. 0) then +c update c = c + dt*p. + call daxpy(col2,dt,p,1,c,1) + +c choose wbp, +c the row of W corresponding to the breakpoint encountered. + pointr = head + do 70 j = 1,col + wbp(j) = wy(ibp,pointr) + wbp(col + j) = theta*ws(ibp,pointr) + pointr = mod(pointr,m) + 1 + 70 continue + +c compute (wbp)Mc, (wbp)Mp, and (wbp)M(wbp)'. + call bmv(m,sy,wt,col,wbp,v,info) + if (info .ne. 0) return + wmc = ddot(col2,c,1,v,1) + wmp = ddot(col2,p,1,v,1) + wmw = ddot(col2,wbp,1,v,1) + +c update p = p - dibp*wbp. + call daxpy(col2,-dibp,wbp,1,p,1) + +c complete updating f1 and f2 while col > 0. + f1 = f1 + dibp*wmc + f2 = f2 + 2.0d0*dibp*wmp - dibp2*wmw + endif + + f2 = max(epsmch*f2_org,f2) + if (nleft .gt. 0) then + dtm = -f1/f2 + goto 777 +c to repeat the loop for unsearched intervals. + else if(bnded) then + f1 = zero + f2 = zero + dtm = zero + else + dtm = -f1/f2 + endif + +c------------------- the end of the loop ------------------------------- + + 888 continue + if (iprint .ge. 99) then + write (6,*) + write (6,*) 'GCP found in this segment' + write (6,4010) nseg,f1,f2 + write (6,6010) dtm + endif + if (dtm .le. zero) dtm = zero + tsum = tsum + dtm + +c Move free variables (i.e., the ones w/o breakpoints) and +c the variables whose breakpoints haven't been reached. + + call daxpy(n,tsum,d,1,xcp,1) + + 999 continue + +c Update c = c + dtm*p = W'(x^c - x) +c which will be used in computing r = Z'(B(x^c - x) + g). + + if (col .gt. 0) call daxpy(col2,dtm,p,1,c,1) + if (iprint .gt. 100) write (6,1010) (xcp(i),i = 1,n) + if (iprint .ge. 99) write (6,2010) + + 1010 format ('Cauchy X = ',/,(4x,1p,6(1x,d11.4))) + 2010 format (/,'---------------- exit CAUCHY----------------------',/) + 3010 format (/,'---------------- CAUCHY entered-------------------') + 4010 format ('Piece ',i3,' --f1, f2 at start point ',1p,2(1x,d11.4)) + 4011 format (/,'Piece ',i3,' --f1, f2 at start point ', + + 1p,2(1x,d11.4)) + 5010 format ('Distance to the next break point = ',1p,d11.4) + 6010 format ('Distance to the stationary point = ',1p,d11.4) + + return + + end + +c====================== The end of cauchy ============================== + + subroutine cmprlb(n, m, x, g, ws, wy, sy, wt, z, r, wa, index, + + theta, col, head, nfree, cnstnd, info) + + logical cnstnd + integer n, m, col, head, nfree, info, index(n) + double precision theta, + + x(n), g(n), z(n), r(n), wa(4*m), + + ws(n, m), wy(n, m), sy(m, m), wt(m, m) + +c ************ +c +c Subroutine cmprlb +c +c This subroutine computes r=-Z'B(xcp-xk)-Z'g by using +c wa(2m+1)=W'(xcp-x) from subroutine cauchy. +c +c Subprograms called: +c +c L-BFGS-B Library ... bmv. +c +c +c * * * +c +c NEOS, November 1994. (Latest revision June 1996.) +c Optimization Technology Center. +c Argonne National Laboratory and Northwestern University. +c Written by +c Ciyou Zhu +c in collaboration with R.H. Byrd, P. Lu-Chen and J. Nocedal. +c +c +c ************ + + integer i,j,k,pointr + double precision a1,a2 + + if (.not. cnstnd .and. col .gt. 0) then + do 26 i = 1, n + r(i) = -g(i) + 26 continue + else + do 30 i = 1, nfree + k = index(i) + r(i) = -theta*(z(k) - x(k)) - g(k) + 30 continue + call bmv(m,sy,wt,col,wa(2*m+1),wa(1),info) + if (info .ne. 0) then + info = -8 + return + endif + pointr = head + do 34 j = 1, col + a1 = wa(j) + a2 = theta*wa(col + j) + do 32 i = 1, nfree + k = index(i) + r(i) = r(i) + wy(k,pointr)*a1 + ws(k,pointr)*a2 + 32 continue + pointr = mod(pointr,m) + 1 + 34 continue + endif + + return + + end + +c======================= The end of cmprlb ============================= + + subroutine errclb(n, m, factr, l, u, nbd, task, info, k) + + character*60 task + integer n, m, info, k, nbd(n) + double precision factr, l(n), u(n) + +c ************ +c +c Subroutine errclb +c +c This subroutine checks the validity of the input data. +c +c +c * * * +c +c NEOS, November 1994. (Latest revision June 1996.) +c Optimization Technology Center. +c Argonne National Laboratory and Northwestern University. +c Written by +c Ciyou Zhu +c in collaboration with R.H. Byrd, P. Lu-Chen and J. Nocedal. +c +c +c ************ + + integer i + double precision one,zero + parameter (one=1.0d0,zero=0.0d0) + +c Check the input arguments for errors. + + if (n .le. 0) task = 'ERROR: N .LE. 0' + if (m .le. 0) task = 'ERROR: M .LE. 0' + if (factr .lt. zero) task = 'ERROR: FACTR .LT. 0' + +c Check the validity of the arrays nbd(i), u(i), and l(i). + + do 10 i = 1, n + if (nbd(i) .lt. 0 .or. nbd(i) .gt. 3) then +c return + task = 'ERROR: INVALID NBD' + info = -6 + k = i + endif + if (nbd(i) .eq. 2) then + if (l(i) .gt. u(i)) then +c return + task = 'ERROR: NO FEASIBLE SOLUTION' + info = -7 + k = i + endif + endif + 10 continue + + return + + end + +c======================= The end of errclb ============================= + + subroutine formk(n, nsub, ind, nenter, ileave, indx2, iupdat, + + updatd, wn, wn1, m, ws, wy, sy, theta, col, + + head, info) + + integer n, nsub, m, col, head, nenter, ileave, iupdat, + + info, ind(n), indx2(n) + double precision theta, wn(2*m, 2*m), wn1(2*m, 2*m), + + ws(n, m), wy(n, m), sy(m, m) + logical updatd + +c ************ +c +c Subroutine formk +c +c This subroutine forms the LEL^T factorization of the indefinite +c +c matrix K = [-D -Y'ZZ'Y/theta L_a'-R_z' ] +c [L_a -R_z theta*S'AA'S ] +c where E = [-I 0] +c [ 0 I] +c The matrix K can be shown to be equal to the matrix M^[-1]N +c occurring in section 5.1 of [1], as well as to the matrix +c Mbar^[-1] Nbar in section 5.3. +c +c n is an integer variable. +c On entry n is the dimension of the problem. +c On exit n is unchanged. +c +c nsub is an integer variable +c On entry nsub is the number of subspace variables in free set. +c On exit nsub is not changed. +c +c ind is an integer array of dimension nsub. +c On entry ind specifies the indices of subspace variables. +c On exit ind is unchanged. +c +c nenter is an integer variable. +c On entry nenter is the number of variables entering the +c free set. +c On exit nenter is unchanged. +c +c ileave is an integer variable. +c On entry indx2(ileave),...,indx2(n) are the variables leaving +c the free set. +c On exit ileave is unchanged. +c +c indx2 is an integer array of dimension n. +c On entry indx2(1),...,indx2(nenter) are the variables entering +c the free set, while indx2(ileave),...,indx2(n) are the +c variables leaving the free set. +c On exit indx2 is unchanged. +c +c iupdat is an integer variable. +c On entry iupdat is the total number of BFGS updates made so far. +c On exit iupdat is unchanged. +c +c updatd is a logical variable. +c On entry 'updatd' is true if the L-BFGS matrix is updatd. +c On exit 'updatd' is unchanged. +c +c wn is a double precision array of dimension 2m x 2m. +c On entry wn is unspecified. +c On exit the upper triangle of wn stores the LEL^T factorization +c of the 2*col x 2*col indefinite matrix +c [-D -Y'ZZ'Y/theta L_a'-R_z' ] +c [L_a -R_z theta*S'AA'S ] +c +c wn1 is a double precision array of dimension 2m x 2m. +c On entry wn1 stores the lower triangular part of +c [Y' ZZ'Y L_a'+R_z'] +c [L_a+R_z S'AA'S ] +c in the previous iteration. +c On exit wn1 stores the corresponding updated matrices. +c The purpose of wn1 is just to store these inner products +c so they can be easily updated and inserted into wn. +c +c m is an integer variable. +c On entry m is the maximum number of variable metric corrections +c used to define the limited memory matrix. +c On exit m is unchanged. +c +c ws, wy, sy, and wtyy are double precision arrays; +c theta is a double precision variable; +c col is an integer variable; +c head is an integer variable. +c On entry they store the information defining the +c limited memory BFGS matrix: +c ws(n,m) stores S, a set of s-vectors; +c wy(n,m) stores Y, a set of y-vectors; +c sy(m,m) stores S'Y; +c wtyy(m,m) stores the Cholesky factorization +c of (theta*S'S+LD^(-1)L') +c theta is the scaling factor specifying B_0 = theta I; +c col is the number of variable metric corrections stored; +c head is the location of the 1st s- (or y-) vector in S (or Y). +c On exit they are unchanged. +c +c info is an integer variable. +c On entry info is unspecified. +c On exit info = 0 for normal return; +c = -1 when the 1st Cholesky factorization failed; +c = -2 when the 2st Cholesky factorization failed. +c +c Subprograms called: +c +c Linpack ... dcopy, dpofa, dtrsl. +c +c +c References: +c [1] R. H. Byrd, P. Lu, J. Nocedal and C. Zhu, ``A limited +c memory algorithm for bound constrained optimization'', +c SIAM J. Scientific Computing 16 (1995), no. 5, pp. 1190--1208. +c +c [2] C. Zhu, R.H. Byrd, P. Lu, J. Nocedal, ``L-BFGS-B: a +c limited memory FORTRAN code for solving bound constrained +c optimization problems'', Tech. Report, NAM-11, EECS Department, +c Northwestern University, 1994. +c +c (Postscript files of these papers are available via anonymous +c ftp to eecs.nwu.edu in the directory pub/lbfgs/lbfgs_bcm.) +c +c * * * +c +c NEOS, November 1994. (Latest revision June 1996.) +c Optimization Technology Center. +c Argonne National Laboratory and Northwestern University. +c Written by +c Ciyou Zhu +c in collaboration with R.H. Byrd, P. Lu-Chen and J. Nocedal. +c +c +c ************ + + integer m2,ipntr,jpntr,iy,is,jy,js,is1,js1,k1,i,k, + + col2,pbegin,pend,dbegin,dend,upcl + double precision ddot,temp1,temp2,temp3,temp4 + double precision one,zero + parameter (one=1.0d0,zero=0.0d0) + +c Form the lower triangular part of +c WN1 = [Y' ZZ'Y L_a'+R_z'] +c [L_a+R_z S'AA'S ] +c where L_a is the strictly lower triangular part of S'AA'Y +c R_z is the upper triangular part of S'ZZ'Y. + + if (updatd) then + if (iupdat .gt. m) then +c shift old part of WN1. + do 10 jy = 1, m - 1 + js = m + jy + call dcopy(m-jy,wn1(jy+1,jy+1),1,wn1(jy,jy),1) + call dcopy(m-jy,wn1(js+1,js+1),1,wn1(js,js),1) + call dcopy(m-1,wn1(m+2,jy+1),1,wn1(m+1,jy),1) + 10 continue + endif + +c put new rows in blocks (1,1), (2,1) and (2,2). + pbegin = 1 + pend = nsub + dbegin = nsub + 1 + dend = n + iy = col + is = m + col + ipntr = head + col - 1 + if (ipntr .gt. m) ipntr = ipntr - m + jpntr = head + do 20 jy = 1, col + js = m + jy + temp1 = zero + temp2 = zero + temp3 = zero +c compute element jy of row 'col' of Y'ZZ'Y + do 15 k = pbegin, pend + k1 = ind(k) + temp1 = temp1 + wy(k1,ipntr)*wy(k1,jpntr) + 15 continue +c compute elements jy of row 'col' of L_a and S'AA'S + do 16 k = dbegin, dend + k1 = ind(k) + temp2 = temp2 + ws(k1,ipntr)*ws(k1,jpntr) + temp3 = temp3 + ws(k1,ipntr)*wy(k1,jpntr) + 16 continue + wn1(iy,jy) = temp1 + wn1(is,js) = temp2 + wn1(is,jy) = temp3 + jpntr = mod(jpntr,m) + 1 + 20 continue + +c put new column in block (2,1). + jy = col + jpntr = head + col - 1 + if (jpntr .gt. m) jpntr = jpntr - m + ipntr = head + do 30 i = 1, col + is = m + i + temp3 = zero +c compute element i of column 'col' of R_z + do 25 k = pbegin, pend + k1 = ind(k) + temp3 = temp3 + ws(k1,ipntr)*wy(k1,jpntr) + 25 continue + ipntr = mod(ipntr,m) + 1 + wn1(is,jy) = temp3 + 30 continue + upcl = col - 1 + else + upcl = col + endif + +c modify the old parts in blocks (1,1) and (2,2) due to changes +c in the set of free variables. + ipntr = head + do 45 iy = 1, upcl + is = m + iy + jpntr = head + do 40 jy = 1, iy + js = m + jy + temp1 = zero + temp2 = zero + temp3 = zero + temp4 = zero + do 35 k = 1, nenter + k1 = indx2(k) + temp1 = temp1 + wy(k1,ipntr)*wy(k1,jpntr) + temp2 = temp2 + ws(k1,ipntr)*ws(k1,jpntr) + 35 continue + do 36 k = ileave, n + k1 = indx2(k) + temp3 = temp3 + wy(k1,ipntr)*wy(k1,jpntr) + temp4 = temp4 + ws(k1,ipntr)*ws(k1,jpntr) + 36 continue + wn1(iy,jy) = wn1(iy,jy) + temp1 - temp3 + wn1(is,js) = wn1(is,js) - temp2 + temp4 + jpntr = mod(jpntr,m) + 1 + 40 continue + ipntr = mod(ipntr,m) + 1 + 45 continue + +c modify the old parts in block (2,1). + ipntr = head + do 60 is = m + 1, m + upcl + jpntr = head + do 55 jy = 1, upcl + temp1 = zero + temp3 = zero + do 50 k = 1, nenter + k1 = indx2(k) + temp1 = temp1 + ws(k1,ipntr)*wy(k1,jpntr) + 50 continue + do 51 k = ileave, n + k1 = indx2(k) + temp3 = temp3 + ws(k1,ipntr)*wy(k1,jpntr) + 51 continue + if (is .le. jy + m) then + wn1(is,jy) = wn1(is,jy) + temp1 - temp3 + else + wn1(is,jy) = wn1(is,jy) - temp1 + temp3 + endif + jpntr = mod(jpntr,m) + 1 + 55 continue + ipntr = mod(ipntr,m) + 1 + 60 continue + +c Form the upper triangle of WN = [D+Y' ZZ'Y/theta -L_a'+R_z' ] +c [-L_a +R_z S'AA'S*theta] + + m2 = 2*m + do 70 iy = 1, col + is = col + iy + is1 = m + iy + do 65 jy = 1, iy + js = col + jy + js1 = m + jy + wn(jy,iy) = wn1(iy,jy)/theta + wn(js,is) = wn1(is1,js1)*theta + 65 continue + do 66 jy = 1, iy - 1 + wn(jy,is) = -wn1(is1,jy) + 66 continue + do 67 jy = iy, col + wn(jy,is) = wn1(is1,jy) + 67 continue + wn(iy,iy) = wn(iy,iy) + sy(iy,iy) + 70 continue + +c Form the upper triangle of WN= [ LL' L^-1(-L_a'+R_z')] +c [(-L_a +R_z)L'^-1 S'AA'S*theta ] + +c first Cholesky factor (1,1) block of wn to get LL' +c with L' stored in the upper triangle of wn. +c call dpofa(wn,m2,col,info) + call dpotrf('U', col, wn, m2, info) + if (info .ne. 0) then + info = -1 + return + endif +c then form L^-1(-L_a'+R_z') in the (1,2) block. + col2 = 2*col + do 71 js = col+1 ,col2 +c call dtrsl(wn,m2,col,wn(1,js),11,info) + call dtrtrs('U', 'T', 'N', col, 1, wn, m2, wn(1,js), col, info) + 71 continue + +c Form S'AA'S*theta + (L^-1(-L_a'+R_z'))'L^-1(-L_a'+R_z') in the +c upper triangle of (2,2) block of wn. + + + do 72 is = col+1, col2 + do 74 js = is, col2 + wn(is,js) = wn(is,js) + ddot(col,wn(1,is),1,wn(1,js),1) + 74 continue + 72 continue + +c Cholesky factorization of (2,2) block of wn. + +c call dpofa(wn(col+1,col+1),m2,col,info) + call dpotrf('U', col, wn(col+1,col+1), m2, info) + if (info .ne. 0) then + info = -2 + return + endif + + return + + end + +c======================= The end of formk ============================== + + subroutine formt(m, wt, sy, ss, col, theta, info) + + integer m, col, info + double precision theta, wt(m, m), sy(m, m), ss(m, m) + +c ************ +c +c Subroutine formt +c +c This subroutine forms the upper half of the pos. def. and symm. +c T = theta*SS + L*D^(-1)*L', stores T in the upper triangle +c of the array wt, and performs the Cholesky factorization of T +c to produce J*J', with J' stored in the upper triangle of wt. +c +c Subprograms called: +c +c Linpack ... dpofa. +c +c +c * * * +c +c NEOS, November 1994. (Latest revision June 1996.) +c Optimization Technology Center. +c Argonne National Laboratory and Northwestern University. +c Written by +c Ciyou Zhu +c in collaboration with R.H. Byrd, P. Lu-Chen and J. Nocedal. +c +c +c ************ + + integer i,j,k,k1 + double precision ddum + double precision zero + parameter (zero=0.0d0) + + +c Form the upper half of T = theta*SS + L*D^(-1)*L', +c store T in the upper triangle of the array wt. + + do 52 j = 1, col + wt(1,j) = theta*ss(1,j) + 52 continue + do 55 i = 2, col + do 54 j = i, col + k1 = min(i,j) - 1 + ddum = zero + do 53 k = 1, k1 + ddum = ddum + sy(i,k)*sy(j,k)/sy(k,k) + 53 continue + wt(i,j) = ddum + theta*ss(i,j) + 54 continue + 55 continue + +c Cholesky factorize T to J*J' with +c J' stored in the upper triangle of wt. + +c call dpofa(wt,m,col,info) + call dpotrf('U', col, wt, m, info) + if (info .ne. 0) then + info = -3 + endif + + return + + end + +c======================= The end of formt ============================== + + subroutine freev(n, nfree, index, nenter, ileave, indx2, + + iwhere, wrk, updatd, cnstnd, iprint, iter) + + integer n, nfree, nenter, ileave, iprint, iter, + + index(n), indx2(n), iwhere(n) + logical wrk, updatd, cnstnd + +c ************ +c +c Subroutine freev +c +c This subroutine counts the entering and leaving variables when +c iter > 0, and finds the index set of free and active variables +c at the GCP. +c +c cnstnd is a logical variable indicating whether bounds are present +c +c index is an integer array of dimension n +c for i=1,...,nfree, index(i) are the indices of free variables +c for i=nfree+1,...,n, index(i) are the indices of bound variables +c On entry after the first iteration, index gives +c the free variables at the previous iteration. +c On exit it gives the free variables based on the determination +c in cauchy using the array iwhere. +c +c indx2 is an integer array of dimension n +c On entry indx2 is unspecified. +c On exit with iter>0, indx2 indicates which variables +c have changed status since the previous iteration. +c For i= 1,...,nenter, indx2(i) have changed from bound to free. +c For i= ileave+1,...,n, indx2(i) have changed from free to bound. +c +c +c * * * +c +c NEOS, November 1994. (Latest revision June 1996.) +c Optimization Technology Center. +c Argonne National Laboratory and Northwestern University. +c Written by +c Ciyou Zhu +c in collaboration with R.H. Byrd, P. Lu-Chen and J. Nocedal. +c +c +c ************ + + integer iact,i,k + + nenter = 0 + ileave = n + 1 + if (iter .gt. 0 .and. cnstnd) then +c count the entering and leaving variables. + do 20 i = 1, nfree + k = index(i) + +c write(6,*) ' k = index(i) ', k +c write(6,*) ' index = ', i + + if (iwhere(k) .gt. 0) then + ileave = ileave - 1 + indx2(ileave) = k + if (iprint .ge. 100) write (6,*) + + 'Variable ',k,' leaves the set of free variables' + endif + 20 continue + do 22 i = 1 + nfree, n + k = index(i) + if (iwhere(k) .le. 0) then + nenter = nenter + 1 + indx2(nenter) = k + if (iprint .ge. 100) write (6,*) + + 'Variable ',k,' enters the set of free variables' + endif + 22 continue + if (iprint .ge. 99) write (6,*) + + n+1-ileave,' variables leave; ',nenter,' variables enter' + endif + wrk = (ileave .lt. n+1) .or. (nenter .gt. 0) .or. updatd + +c Find the index set of free and active variables at the GCP. + + nfree = 0 + iact = n + 1 + do 24 i = 1, n + if (iwhere(i) .le. 0) then + nfree = nfree + 1 + index(nfree) = i + else + iact = iact - 1 + index(iact) = i + endif + 24 continue + if (iprint .ge. 99) write (6,*) + + nfree,' variables are free at GCP ',iter + 1 + + return + + end + +c======================= The end of freev ============================== + + subroutine hpsolb(n, t, iorder, iheap) + integer iheap, n, iorder(n) + double precision t(n) + +c ************ +c +c Subroutine hpsolb +c +c This subroutine sorts out the least element of t, and puts the +c remaining elements of t in a heap. +c +c n is an integer variable. +c On entry n is the dimension of the arrays t and iorder. +c On exit n is unchanged. +c +c t is a double precision array of dimension n. +c On entry t stores the elements to be sorted, +c On exit t(n) stores the least elements of t, and t(1) to t(n-1) +c stores the remaining elements in the form of a heap. +c +c iorder is an integer array of dimension n. +c On entry iorder(i) is the index of t(i). +c On exit iorder(i) is still the index of t(i), but iorder may be +c permuted in accordance with t. +c +c iheap is an integer variable specifying the task. +c On entry iheap should be set as follows: +c iheap .eq. 0 if t(1) to t(n) is not in the form of a heap, +c iheap .ne. 0 if otherwise. +c On exit iheap is unchanged. +c +c +c References: +c Algorithm 232 of CACM (J. W. J. Williams): HEAPSORT. +c +c * * * +c +c NEOS, November 1994. (Latest revision June 1996.) +c Optimization Technology Center. +c Argonne National Laboratory and Northwestern University. +c Written by +c Ciyou Zhu +c in collaboration with R.H. Byrd, P. Lu-Chen and J. Nocedal. +c +c ************ + + integer i,j,k,indxin,indxou + double precision ddum,out + + if (iheap .eq. 0) then + +c Rearrange the elements t(1) to t(n) to form a heap. + + do 20 k = 2, n + ddum = t(k) + indxin = iorder(k) + +c Add ddum to the heap. + i = k + 10 continue + if (i.gt.1) then + j = i/2 + if (ddum .lt. t(j)) then + t(i) = t(j) + iorder(i) = iorder(j) + i = j + goto 10 + endif + endif + t(i) = ddum + iorder(i) = indxin + 20 continue + endif + +c Assign to 'out' the value of t(1), the least member of the heap, +c and rearrange the remaining members to form a heap as +c elements 1 to n-1 of t. + + if (n .gt. 1) then + i = 1 + out = t(1) + indxou = iorder(1) + ddum = t(n) + indxin = iorder(n) + +c Restore the heap + 30 continue + j = i+i + if (j .le. n-1) then + if (t(j+1) .lt. t(j)) j = j+1 + if (t(j) .lt. ddum ) then + t(i) = t(j) + iorder(i) = iorder(j) + i = j + goto 30 + endif + endif + t(i) = ddum + iorder(i) = indxin + +c Put the least member in t(n). + + t(n) = out + iorder(n) = indxou + endif + + return + + end + +c====================== The end of hpsolb ============================== + + subroutine lnsrlb(n, l, u, nbd, x, f, fold, gd, gdold, g, d, r, t, + + z, stp, dnorm, dtd, xstep, stpmx, iter, ifun, + + iback, nfgv, info, task, boxed, cnstnd, csave, + + isave, dsave) + + character*60 task, csave + logical boxed, cnstnd + integer n, iter, ifun, iback, nfgv, info, + + nbd(n), isave(2) + double precision f, fold, gd, gdold, stp, dnorm, dtd, xstep, + + stpmx, x(n), l(n), u(n), g(n), d(n), r(n), t(n), + + z(n), dsave(13) +c ********** +c +c Subroutine lnsrlb +c +c This subroutine calls subroutine dcsrch from the Minpack2 library +c to perform the line search. Subroutine dscrch is safeguarded so +c that all trial points lie within the feasible region. +c +c Subprograms called: +c +c Minpack2 Library ... dcsrch. +c +c Linpack ... dtrsl, ddot. +c +c +c * * * +c +c NEOS, November 1994. (Latest revision June 1996.) +c Optimization Technology Center. +c Argonne National Laboratory and Northwestern University. +c Written by +c Ciyou Zhu +c in collaboration with R.H. Byrd, P. Lu-Chen and J. Nocedal. +c +c +c ********** + + integer i + double precision ddot,a1,a2 + double precision one,zero,big + parameter (one=1.0d0,zero=0.0d0,big=1.0d+10) + double precision ftol,gtol,xtol + parameter (ftol=1.0d-3,gtol=0.9d0,xtol=0.1d0) + + if (task(1:5) .eq. 'FG_LN') goto 556 + + dtd = ddot(n,d,1,d,1) + dnorm = sqrt(dtd) + +c Determine the maximum step length. + + stpmx = big + if (cnstnd) then + if (iter .eq. 0) then + stpmx = one + else + do 43 i = 1, n + a1 = d(i) + if (nbd(i) .ne. 0) then + if (a1 .lt. zero .and. nbd(i) .le. 2) then + a2 = l(i) - x(i) + if (a2 .ge. zero) then + stpmx = zero + else if (a1*stpmx .lt. a2) then + stpmx = a2/a1 + endif + else if (a1 .gt. zero .and. nbd(i) .ge. 2) then + a2 = u(i) - x(i) + if (a2 .le. zero) then + stpmx = zero + else if (a1*stpmx .gt. a2) then + stpmx = a2/a1 + endif + endif + endif + 43 continue + endif + endif + + if (iter .eq. 0 .and. .not. boxed) then + stp = min(one/dnorm, stpmx) + else + stp = one + endif + + call dcopy(n,x,1,t,1) + call dcopy(n,g,1,r,1) + fold = f + ifun = 0 + iback = 0 + csave = 'START' + 556 continue + gd = ddot(n,g,1,d,1) + if (ifun .eq. 0) then + gdold=gd + if (gd .ge. zero) then +c the directional derivative >=0. +c Line search is impossible. + write(6,*)' ascent direction in projection gd = ', gd + info = -4 + return + endif + endif + + call dcsrch(f,gd,stp,ftol,gtol,xtol,zero,stpmx,csave,isave,dsave) + + xstep = stp*dnorm + if (csave(1:4) .ne. 'CONV' .and. csave(1:4) .ne. 'WARN') then + task = 'FG_LNSRCH' + ifun = ifun + 1 + nfgv = nfgv + 1 + iback = ifun - 1 + if (stp .eq. one) then + call dcopy(n,z,1,x,1) + else + do 41 i = 1, n + x(i) = stp*d(i) + t(i) + 41 continue + endif + else + task = 'NEW_X' + endif + + return + + end + +c======================= The end of lnsrlb ============================= + + subroutine matupd(n, m, ws, wy, sy, ss, d, r, itail, + + iupdat, col, head, theta, rr, dr, stp, dtd) + + integer n, m, itail, iupdat, col, head + double precision theta, rr, dr, stp, dtd, d(n), r(n), + + ws(n, m), wy(n, m), sy(m, m), ss(m, m) + +c ************ +c +c Subroutine matupd +c +c This subroutine updates matrices WS and WY, and forms the +c middle matrix in B. +c +c Subprograms called: +c +c Linpack ... dcopy, ddot. +c +c +c * * * +c +c NEOS, November 1994. (Latest revision June 1996.) +c Optimization Technology Center. +c Argonne National Laboratory and Northwestern University. +c Written by +c Ciyou Zhu +c in collaboration with R.H. Byrd, P. Lu-Chen and J. Nocedal. +c +c +c ************ + + integer j,pointr + double precision ddot + double precision one + parameter (one=1.0d0) + +c Set pointers for matrices WS and WY. + + if (iupdat .le. m) then + col = iupdat + itail = mod(head+iupdat-2,m) + 1 + else + itail = mod(itail,m) + 1 + head = mod(head,m) + 1 + endif + +c Update matrices WS and WY. + + call dcopy(n,d,1,ws(1,itail),1) + call dcopy(n,r,1,wy(1,itail),1) + +c Set theta=yy/ys. + + theta = rr/dr + +c Form the middle matrix in B. + +c update the upper triangle of SS, +c and the lower triangle of SY: + if (iupdat .gt. m) then +c move old information + do 50 j = 1, col - 1 + call dcopy(j,ss(2,j+1),1,ss(1,j),1) + call dcopy(col-j,sy(j+1,j+1),1,sy(j,j),1) + 50 continue + endif +c add new information: the last row of SY +c and the last column of SS: + pointr = head + do 51 j = 1, col - 1 + sy(col,j) = ddot(n,d,1,wy(1,pointr),1) + ss(j,col) = ddot(n,ws(1,pointr),1,d,1) + pointr = mod(pointr,m) + 1 + 51 continue + if (stp .eq. one) then + ss(col,col) = dtd + else + ss(col,col) = stp*stp*dtd + endif + sy(col,col) = dr + + return + + end + +c======================= The end of matupd ============================= + + subroutine prn1lb(n, m, l, u, x, iprint, itfile, epsmch) + + integer n, m, iprint, itfile + double precision epsmch, x(n), l(n), u(n) + +c ************ +c +c Subroutine prn1lb +c +c This subroutine prints the input data, initial point, upper and +c lower bounds of each variable, machine precision, as well as +c the headings of the output. +c +c +c * * * +c +c NEOS, November 1994. (Latest revision June 1996.) +c Optimization Technology Center. +c Argonne National Laboratory and Northwestern University. +c Written by +c Ciyou Zhu +c in collaboration with R.H. Byrd, P. Lu-Chen and J. Nocedal. +c +c +c ************ + + integer i + + if (iprint .ge. 0) then + write (6,7001) epsmch + write (6,*) 'N = ',n,' M = ',m + if (iprint .ge. 1) then + write (itfile,2001) epsmch + write (itfile,*)'N = ',n,' M = ',m + write (itfile,9001) + if (iprint .gt. 100) then + write (6,1004) 'L =',(l(i),i = 1,n) + write (6,1004) 'X0 =',(x(i),i = 1,n) + write (6,1004) 'U =',(u(i),i = 1,n) + endif + endif + endif + + 1004 format (/,a4, 1p, 6(1x,d11.4),/,(4x,1p,6(1x,d11.4))) + 2001 format ('RUNNING THE L-BFGS-B CODE',/,/, + + 'it = iteration number',/, + + 'nf = number of function evaluations',/, + + 'nseg = number of segments explored during the Cauchy search',/, + + 'nact = number of active bounds at the generalized Cauchy point' + + ,/, + + 'sub = manner in which the subspace minimization terminated:' + + ,/,' con = converged, bnd = a bound was reached',/, + + 'itls = number of iterations performed in the line search',/, + + 'stepl = step length used',/, + + 'tstep = norm of the displacement (total step)',/, + + 'projg = norm of the projected gradient',/, + + 'f = function value',/,/, + + ' * * *',/,/, + + 'Machine precision =',1p,d10.3) + 7001 format ('RUNNING THE L-BFGS-B CODE',/,/, + + ' * * *',/,/, + + 'Machine precision =',1p,d10.3) + 9001 format (/,3x,'it',3x,'nf',2x,'nseg',2x,'nact',2x,'sub',2x,'itls', + + 2x,'stepl',4x,'tstep',5x,'projg',8x,'f') + + return + + end + +c======================= The end of prn1lb ============================= + + subroutine prn2lb(n, x, f, g, iprint, itfile, iter, nfgv, nact, + + sbgnrm, nseg, word, iword, iback, stp, xstep) + + character*3 word + integer n, iprint, itfile, iter, nfgv, nact, nseg, + + iword, iback + double precision f, sbgnrm, stp, xstep, x(n), g(n) + +c ************ +c +c Subroutine prn2lb +c +c This subroutine prints out new information after a successful +c line search. +c +c +c * * * +c +c NEOS, November 1994. (Latest revision June 1996.) +c Optimization Technology Center. +c Argonne National Laboratory and Northwestern University. +c Written by +c Ciyou Zhu +c in collaboration with R.H. Byrd, P. Lu-Chen and J. Nocedal. +c +c +c ************ + + integer i,imod + +c 'word' records the status of subspace solutions. + if (iword .eq. 0) then +c the subspace minimization converged. + word = 'con' + else if (iword .eq. 1) then +c the subspace minimization stopped at a bound. + word = 'bnd' + else if (iword .eq. 5) then +c the truncated Newton step has been used. + word = 'TNT' + else + word = '---' + endif + if (iprint .ge. 99) then + write (6,*) 'LINE SEARCH',iback,' times; norm of step = ',xstep + write (6,2001) iter,f,sbgnrm + if (iprint .gt. 100) then + write (6,1004) 'X =',(x(i), i = 1, n) + write (6,1004) 'G =',(g(i), i = 1, n) + endif + else if (iprint .gt. 0) then + imod = mod(iter,iprint) + if (imod .eq. 0) write (6,2001) iter,f,sbgnrm + endif + if (iprint .ge. 1) write (itfile,3001) + + iter,nfgv,nseg,nact,word,iback,stp,xstep,sbgnrm,f + + 1004 format (/,a4, 1p, 6(1x,d11.4),/,(4x,1p,6(1x,d11.4))) + 2001 format + + (/,'At iterate',i5,4x,'f= ',1p,d12.5,4x,'|proj g|= ',1p,d12.5) + 3001 format(2(1x,i4),2(1x,i5),2x,a3,1x,i4,1p,2(2x,d8.1),1p,2(1x,f10.3)) + + return + + end + +c======================= The end of prn2lb ============================= + + subroutine prn3lb(n, x, f, task, iprint, info, itfile, + + iter, nfgv, nintol, nskip, nact, sbgnrm, + + time, nseg, word, iback, stp, xstep, k, + + cachyt, sbtime, lnscht) + + character*60 task + character*3 word + integer n, iprint, info, itfile, iter, nfgv, nintol, + + nskip, nact, nseg, iback, k + double precision f, sbgnrm, time, stp, xstep, cachyt, sbtime, + + lnscht, x(n) + +c ************ +c +c Subroutine prn3lb +c +c This subroutine prints out information when either a built-in +c convergence test is satisfied or when an error message is +c generated. +c +c +c * * * +c +c NEOS, November 1994. (Latest revision June 1996.) +c Optimization Technology Center. +c Argonne National Laboratory and Northwestern University. +c Written by +c Ciyou Zhu +c in collaboration with R.H. Byrd, P. Lu-Chen and J. Nocedal. +c +c +c ************ + + integer i + + if (task(1:5) .eq. 'ERROR') goto 999 + + if (iprint .ge. 0) then + write (6,3003) + write (6,3004) + write(6,3005) n,iter,nfgv,nintol,nskip,nact,sbgnrm,f + if (iprint .ge. 100) then + write (6,1004) 'X =',(x(i),i = 1,n) + endif + if (iprint .ge. 1) write (6,*) ' F =',f + endif + 999 continue + if (iprint .ge. 0) then + write (6,3009) task + if (info .ne. 0) then + if (info .eq. -1) write (6,9011) + if (info .eq. -2) write (6,9012) + if (info .eq. -3) write (6,9013) + if (info .eq. -4) write (6,9014) + if (info .eq. -5) write (6,9015) + if (info .eq. -6) write (6,*)' Input nbd(',k,') is invalid.' + if (info .eq. -7) + + write (6,*)' l(',k,') > u(',k,'). No feasible solution.' + if (info .eq. -8) write (6,9018) + if (info .eq. -9) write (6,9019) + endif + if (iprint .ge. 1) write (6,3007) cachyt,sbtime,lnscht + write (6,3008) time + if (iprint .ge. 1) then + if (info .eq. -4 .or. info .eq. -9) then + write (itfile,3002) + + iter,nfgv,nseg,nact,word,iback,stp,xstep + endif + write (itfile,3009) task + if (info .ne. 0) then + if (info .eq. -1) write (itfile,9011) + if (info .eq. -2) write (itfile,9012) + if (info .eq. -3) write (itfile,9013) + if (info .eq. -4) write (itfile,9014) + if (info .eq. -5) write (itfile,9015) + if (info .eq. -8) write (itfile,9018) + if (info .eq. -9) write (itfile,9019) + endif + write (itfile,3008) time + endif + endif + + 1004 format (/,a4, 1p, 6(1x,d11.4),/,(4x,1p,6(1x,d11.4))) + 3002 format(2(1x,i4),2(1x,i5),2x,a3,1x,i4,1p,2(2x,d8.1),6x,'-',10x,'-') + 3003 format (/, + + ' * * *',/,/, + + 'Tit = total number of iterations',/, + + 'Tnf = total number of function evaluations',/, + + 'Tnint = total number of segments explored during', + + ' Cauchy searches',/, + + 'Skip = number of BFGS updates skipped',/, + + 'Nact = number of active bounds at final generalized', + + ' Cauchy point',/, + + 'Projg = norm of the final projected gradient',/, + + 'F = final function value',/,/, + + ' * * *') + 3004 format (/,3x,'N',4x,'Tit',5x,'Tnf',2x,'Tnint',2x, + + 'Skip',2x,'Nact',5x,'Projg',8x,'F') + 3005 format (i5,2(1x,i6),(1x,i6),(2x,i4),(1x,i5),1p,2(2x,d10.3)) + 3007 format (/,' Cauchy time',1p,e10.3,' seconds.',/ + + ' Subspace minimization time',1p,e10.3,' seconds.',/ + + ' Line search time',1p,e10.3,' seconds.') + 3008 format (/,' Total User time',1p,e10.3,' seconds.',/) + 3009 format (/,a60) + 9011 format (/, + +' Matrix in 1st Cholesky factorization in formk is not Pos. Def.') + 9012 format (/, + +' Matrix in 2st Cholesky factorization in formk is not Pos. Def.') + 9013 format (/, + +' Matrix in the Cholesky factorization in formt is not Pos. Def.') + 9014 format (/, + +' Derivative >= 0, backtracking line search impossible.',/, + +' Previous x, f and g restored.',/, + +' Possible causes: 1 error in function or gradient evaluation;',/, + +' 2 rounding errors dominate computation.') + 9015 format (/, + +' Warning: more than 10 function and gradient',/, + +' evaluations in the last line search. Termination',/, + +' may possibly be caused by a bad search direction.') + 9018 format (/,' The triangular system is singular.') + 9019 format (/, + +' Line search cannot locate an adequate point after 20 function',/ + +,' and gradient evaluations. Previous x, f and g restored.',/, + +' Possible causes: 1 error in function or gradient evaluation;',/, + +' 2 rounding error dominate computation.') + + return + + end + +c======================= The end of prn3lb ============================= + + subroutine projgr(n, l, u, nbd, x, g, sbgnrm) + + integer n, nbd(n) + double precision sbgnrm, x(n), l(n), u(n), g(n) + +c ************ +c +c Subroutine projgr +c +c This subroutine computes the infinity norm of the projected +c gradient. +c +c +c * * * +c +c NEOS, November 1994. (Latest revision June 1996.) +c Optimization Technology Center. +c Argonne National Laboratory and Northwestern University. +c Written by +c Ciyou Zhu +c in collaboration with R.H. Byrd, P. Lu-Chen and J. Nocedal. +c +c +c ************ + + integer i + double precision gi + double precision one,zero + parameter (one=1.0d0,zero=0.0d0) + + sbgnrm = zero + do 15 i = 1, n + gi = g(i) + if (nbd(i) .ne. 0) then + if (gi .lt. zero) then + if (nbd(i) .ge. 2) gi = max((x(i)-u(i)),gi) + else + if (nbd(i) .le. 2) gi = min((x(i)-l(i)),gi) + endif + endif + sbgnrm = max(sbgnrm,abs(gi)) + 15 continue + + return + + end + +c======================= The end of projgr ============================= + + subroutine subsm ( n, m, nsub, ind, l, u, nbd, x, d, xp, ws, wy, + + theta, xx, gg, + + col, head, iword, wv, wn, iprint, info ) + implicit none + integer n, m, nsub, col, head, iword, iprint, info, + + ind(nsub), nbd(n) + double precision theta, + + l(n), u(n), x(n), d(n), xp(n), xx(n), gg(n), + + ws(n, m), wy(n, m), + + wv(2*m), wn(2*m, 2*m) + +c ********************************************************************** +c +c This routine contains the major changes in the updated version. +c The changes are described in the accompanying paper +c +c Jose Luis Morales, Jorge Nocedal +c ""Remark On Algorithm 788: L-BFGS-B: Fortran Subroutines for Large-Scale +c Bound Constrained Optimization"". Decemmber 27, 2010. +c +c J.L. Morales Departamento de Matematicas, +c Instituto Tecnologico Autonomo de Mexico +c Mexico D.F. +c +c J, Nocedal Department of Electrical Engineering and +c Computer Science. +c Northwestern University. Evanston, IL. USA +c +c January 17, 2011 +c +c ********************************************************************** +c +c +c Subroutine subsm +c +c Given xcp, l, u, r, an index set that specifies +c the active set at xcp, and an l-BFGS matrix B +c (in terms of WY, WS, SY, WT, head, col, and theta), +c this subroutine computes an approximate solution +c of the subspace problem +c +c (P) min Q(x) = r'(x-xcp) + 1/2 (x-xcp)' B (x-xcp) +c +c subject to l<=x<=u +c x_i=xcp_i for all i in A(xcp) +c +c along the subspace unconstrained Newton direction +c +c d = -(Z'BZ)^(-1) r. +c +c The formula for the Newton direction, given the L-BFGS matrix +c and the Sherman-Morrison formula, is +c +c d = (1/theta)r + (1/theta*2) Z'WK^(-1)W'Z r. +c +c where +c K = [-D -Y'ZZ'Y/theta L_a'-R_z' ] +c [L_a -R_z theta*S'AA'S ] +c +c Note that this procedure for computing d differs +c from that described in [1]. One can show that the matrix K is +c equal to the matrix M^[-1]N in that paper. +c +c n is an integer variable. +c On entry n is the dimension of the problem. +c On exit n is unchanged. +c +c m is an integer variable. +c On entry m is the maximum number of variable metric corrections +c used to define the limited memory matrix. +c On exit m is unchanged. +c +c nsub is an integer variable. +c On entry nsub is the number of free variables. +c On exit nsub is unchanged. +c +c ind is an integer array of dimension nsub. +c On entry ind specifies the coordinate indices of free variables. +c On exit ind is unchanged. +c +c l is a double precision array of dimension n. +c On entry l is the lower bound of x. +c On exit l is unchanged. +c +c u is a double precision array of dimension n. +c On entry u is the upper bound of x. +c On exit u is unchanged. +c +c nbd is a integer array of dimension n. +c On entry nbd represents the type of bounds imposed on the +c variables, and must be specified as follows: +c nbd(i)=0 if x(i) is unbounded, +c 1 if x(i) has only a lower bound, +c 2 if x(i) has both lower and upper bounds, and +c 3 if x(i) has only an upper bound. +c On exit nbd is unchanged. +c +c x is a double precision array of dimension n. +c On entry x specifies the Cauchy point xcp. +c On exit x(i) is the minimizer of Q over the subspace of +c free variables. +c +c d is a double precision array of dimension n. +c On entry d is the reduced gradient of Q at xcp. +c On exit d is the Newton direction of Q. +c +c xp is a double precision array of dimension n. +c used to safeguard the projected Newton direction +c +c xx is a double precision array of dimension n +c On entry it holds the current iterate +c On output it is unchanged + +c gg is a double precision array of dimension n +c On entry it holds the gradient at the current iterate +c On output it is unchanged +c +c ws and wy are double precision arrays; +c theta is a double precision variable; +c col is an integer variable; +c head is an integer variable. +c On entry they store the information defining the +c limited memory BFGS matrix: +c ws(n,m) stores S, a set of s-vectors; +c wy(n,m) stores Y, a set of y-vectors; +c theta is the scaling factor specifying B_0 = theta I; +c col is the number of variable metric corrections stored; +c head is the location of the 1st s- (or y-) vector in S (or Y). +c On exit they are unchanged. +c +c iword is an integer variable. +c On entry iword is unspecified. +c On exit iword specifies the status of the subspace solution. +c iword = 0 if the solution is in the box, +c 1 if some bound is encountered. +c +c wv is a double precision working array of dimension 2m. +c +c wn is a double precision array of dimension 2m x 2m. +c On entry the upper triangle of wn stores the LEL^T factorization +c of the indefinite matrix +c +c K = [-D -Y'ZZ'Y/theta L_a'-R_z' ] +c [L_a -R_z theta*S'AA'S ] +c where E = [-I 0] +c [ 0 I] +c On exit wn is unchanged. +c +c iprint is an INTEGER variable that must be set by the user. +c It controls the frequency and type of output generated: +c iprint<0 no output is generated; +c iprint=0 print only one line at the last iteration; +c 0100 print details of every iteration including x and g; +c When iprint > 0, the file iterate.dat will be created to +c summarize the iteration. +c +c info is an integer variable. +c On entry info is unspecified. +c On exit info = 0 for normal return, +c = nonzero for abnormal return +c when the matrix K is ill-conditioned. +c +c Subprograms called: +c +c Linpack dtrsl. +c +c +c References: +c +c [1] R. H. Byrd, P. Lu, J. Nocedal and C. Zhu, ``A limited +c memory algorithm for bound constrained optimization'', +c SIAM J. Scientific Computing 16 (1995), no. 5, pp. 1190--1208. +c +c +c +c * * * +c +c NEOS, November 1994. (Latest revision June 1996.) +c Optimization Technology Center. +c Argonne National Laboratory and Northwestern University. +c Written by +c Ciyou Zhu +c in collaboration with R.H. Byrd, P. Lu-Chen and J. Nocedal. +c +c +c ************ + + integer pointr,m2,col2,ibd,jy,js,i,j,k + double precision alpha, xk, dk, temp1, temp2 + double precision one,zero + parameter (one=1.0d0,zero=0.0d0) +c + double precision dd_p + + if (nsub .le. 0) return + if (iprint .ge. 99) write (6,1001) + +c Compute wv = W'Zd. + + pointr = head + do 20 i = 1, col + temp1 = zero + temp2 = zero + do 10 j = 1, nsub + k = ind(j) + temp1 = temp1 + wy(k,pointr)*d(j) + temp2 = temp2 + ws(k,pointr)*d(j) + 10 continue + wv(i) = temp1 + wv(col + i) = theta*temp2 + pointr = mod(pointr,m) + 1 + 20 continue + +c Compute wv:=K^(-1)wv. + + m2 = 2*m + col2 = 2*col +c call dtrsl(wn,m2,col2,wv,11,info) + call dtrtrs('U', 'T', 'N', col2, 1, wn, m2, wv, col2, info) + if (info .ne. 0) return + do 25 i = 1, col + wv(i) = -wv(i) + 25 continue +c call dtrsl(wn,m2,col2,wv,01,info) + call dtrtrs('U', 'N', 'N', col2, 1, wn, m2, wv, col2, info) + if (info .ne. 0) return + +c Compute d = (1/theta)d + (1/theta**2)Z'W wv. + + pointr = head + do 40 jy = 1, col + js = col + jy + do 30 i = 1, nsub + k = ind(i) + d(i) = d(i) + wy(k,pointr)*wv(jy)/theta + + + ws(k,pointr)*wv(js) + 30 continue + pointr = mod(pointr,m) + 1 + 40 continue + + call dscal( nsub, one/theta, d, 1 ) +c +c----------------------------------------------------------------- +c Let us try the projection, d is the Newton direction + + iword = 0 + + call dcopy ( n, x, 1, xp, 1 ) +c + do 50 i=1, nsub + k = ind(i) + dk = d(i) + xk = x(k) + if ( nbd(k) .ne. 0 ) then +c + if ( nbd(k).eq.1 ) then ! lower bounds only + x(k) = max( l(k), xk + dk ) + if ( x(k).eq.l(k) ) iword = 1 + else +c + if ( nbd(k).eq.2 ) then ! upper and lower bounds + xk = max( l(k), xk + dk ) + x(k) = min( u(k), xk ) + if ( x(k).eq.l(k) .or. x(k).eq.u(k) ) iword = 1 + else +c + if ( nbd(k).eq.3 ) then ! upper bounds only + x(k) = min( u(k), xk + dk ) + if ( x(k).eq.u(k) ) iword = 1 + end if + end if + end if +c + else ! free variables + x(k) = xk + dk + end if + 50 continue +c + if ( iword.eq.0 ) then + go to 911 + end if +c +c check sign of the directional derivative +c + dd_p = zero + do 55 i=1, n + dd_p = dd_p + (x(i) - xx(i))*gg(i) + 55 continue + if ( dd_p .gt.zero ) then + call dcopy( n, xp, 1, x, 1 ) + write(6,*) ' Positive dir derivative in projection ' + write(6,*) ' Using the backtracking step ' + else + go to 911 + endif +c +c----------------------------------------------------------------- +c + alpha = one + temp1 = alpha + ibd = 0 + do 60 i = 1, nsub + k = ind(i) + dk = d(i) + if (nbd(k) .ne. 0) then + if (dk .lt. zero .and. nbd(k) .le. 2) then + temp2 = l(k) - x(k) + if (temp2 .ge. zero) then + temp1 = zero + else if (dk*alpha .lt. temp2) then + temp1 = temp2/dk + endif + else if (dk .gt. zero .and. nbd(k) .ge. 2) then + temp2 = u(k) - x(k) + if (temp2 .le. zero) then + temp1 = zero + else if (dk*alpha .gt. temp2) then + temp1 = temp2/dk + endif + endif + if (temp1 .lt. alpha) then + alpha = temp1 + ibd = i + endif + endif + 60 continue + + if (alpha .lt. one) then + dk = d(ibd) + k = ind(ibd) + if (dk .gt. zero) then + x(k) = u(k) + d(ibd) = zero + else if (dk .lt. zero) then + x(k) = l(k) + d(ibd) = zero + endif + endif + do 70 i = 1, nsub + k = ind(i) + x(k) = x(k) + alpha*d(i) + 70 continue +cccccc + 911 continue + + if (iprint .ge. 99) write (6,1004) + + 1001 format (/,'----------------SUBSM entered-----------------',/) + 1004 format (/,'----------------exit SUBSM --------------------',/) + + return + + end +c====================== The end of subsm =============================== + + subroutine dcsrch(f,g,stp,ftol,gtol,xtol,stpmin,stpmax, + + task,isave,dsave) + character*(*) task + integer isave(2) + double precision f,g,stp,ftol,gtol,xtol,stpmin,stpmax + double precision dsave(13) +c ********** +c +c Subroutine dcsrch +c +c This subroutine finds a step that satisfies a sufficient +c decrease condition and a curvature condition. +c +c Each call of the subroutine updates an interval with +c endpoints stx and sty. The interval is initially chosen +c so that it contains a minimizer of the modified function +c +c psi(stp) = f(stp) - f(0) - ftol*stp*f'(0). +c +c If psi(stp) <= 0 and f'(stp) >= 0 for some step, then the +c interval is chosen so that it contains a minimizer of f. +c +c The algorithm is designed to find a step that satisfies +c the sufficient decrease condition +c +c f(stp) <= f(0) + ftol*stp*f'(0), +c +c and the curvature condition +c +c abs(f'(stp)) <= gtol*abs(f'(0)). +c +c If ftol is less than gtol and if, for example, the function +c is bounded below, then there is always a step which satisfies +c both conditions. +c +c If no step can be found that satisfies both conditions, then +c the algorithm stops with a warning. In this case stp only +c satisfies the sufficient decrease condition. +c +c A typical invocation of dcsrch has the following outline: +c +c task = 'START' +c 10 continue +c call dcsrch( ... ) +c if (task .eq. 'FG') then +c Evaluate the function and the gradient at stp +c goto 10 +c end if +c +c NOTE: The user must no alter work arrays between calls. +c +c The subroutine statement is +c +c subroutine dcsrch(f,g,stp,ftol,gtol,xtol,stpmin,stpmax, +c task,isave,dsave) +c where +c +c f is a double precision variable. +c On initial entry f is the value of the function at 0. +c On subsequent entries f is the value of the +c function at stp. +c On exit f is the value of the function at stp. +c +c g is a double precision variable. +c On initial entry g is the derivative of the function at 0. +c On subsequent entries g is the derivative of the +c function at stp. +c On exit g is the derivative of the function at stp. +c +c stp is a double precision variable. +c On entry stp is the current estimate of a satisfactory +c step. On initial entry, a positive initial estimate +c must be provided. +c On exit stp is the current estimate of a satisfactory step +c if task = 'FG'. If task = 'CONV' then stp satisfies +c the sufficient decrease and curvature condition. +c +c ftol is a double precision variable. +c On entry ftol specifies a nonnegative tolerance for the +c sufficient decrease condition. +c On exit ftol is unchanged. +c +c gtol is a double precision variable. +c On entry gtol specifies a nonnegative tolerance for the +c curvature condition. +c On exit gtol is unchanged. +c +c xtol is a double precision variable. +c On entry xtol specifies a nonnegative relative tolerance +c for an acceptable step. The subroutine exits with a +c warning if the relative difference between sty and stx +c is less than xtol. +c On exit xtol is unchanged. +c +c stpmin is a double precision variable. +c On entry stpmin is a nonnegative lower bound for the step. +c On exit stpmin is unchanged. +c +c stpmax is a double precision variable. +c On entry stpmax is a nonnegative upper bound for the step. +c On exit stpmax is unchanged. +c +c task is a character variable of length at least 60. +c On initial entry task must be set to 'START'. +c On exit task indicates the required action: +c +c If task(1:2) = 'FG' then evaluate the function and +c derivative at stp and call dcsrch again. +c +c If task(1:4) = 'CONV' then the search is successful. +c +c If task(1:4) = 'WARN' then the subroutine is not able +c to satisfy the convergence conditions. The exit value of +c stp contains the best point found during the search. +c +c If task(1:5) = 'ERROR' then there is an error in the +c input arguments. +c +c On exit with convergence, a warning or an error, the +c variable task contains additional information. +c +c isave is an integer work array of dimension 2. +c +c dsave is a double precision work array of dimension 13. +c +c Subprograms called +c +c MINPACK-2 ... dcstep +c +c MINPACK-1 Project. June 1983. +c Argonne National Laboratory. +c Jorge J. More' and David J. Thuente. +c +c MINPACK-2 Project. October 1993. +c Argonne National Laboratory and University of Minnesota. +c Brett M. Averick, Richard G. Carter, and Jorge J. More'. +c +c ********** + double precision zero,p5,p66 + parameter(zero=0.0d0,p5=0.5d0,p66=0.66d0) + double precision xtrapl,xtrapu + parameter(xtrapl=1.1d0,xtrapu=4.0d0) + + logical brackt + integer stage + double precision finit,ftest,fm,fx,fxm,fy,fym,ginit,gtest, + + gm,gx,gxm,gy,gym,stx,sty,stmin,stmax,width,width1 + +c Initialization block. + + if (task(1:5) .eq. 'START') then + +c Check the input arguments for errors. + + if (stp .lt. stpmin) task = 'ERROR: STP .LT. STPMIN' + if (stp .gt. stpmax) task = 'ERROR: STP .GT. STPMAX' + if (g .ge. zero) task = 'ERROR: INITIAL G .GE. ZERO' + if (ftol .lt. zero) task = 'ERROR: FTOL .LT. ZERO' + if (gtol .lt. zero) task = 'ERROR: GTOL .LT. ZERO' + if (xtol .lt. zero) task = 'ERROR: XTOL .LT. ZERO' + if (stpmin .lt. zero) task = 'ERROR: STPMIN .LT. ZERO' + if (stpmax .lt. stpmin) task = 'ERROR: STPMAX .LT. STPMIN' + +c Exit if there are errors on input. + + if (task(1:5) .eq. 'ERROR') return + +c Initialize local variables. + + brackt = .false. + stage = 1 + finit = f + ginit = g + gtest = ftol*ginit + width = stpmax - stpmin + width1 = width/p5 + +c The variables stx, fx, gx contain the values of the step, +c function, and derivative at the best step. +c The variables sty, fy, gy contain the value of the step, +c function, and derivative at sty. +c The variables stp, f, g contain the values of the step, +c function, and derivative at stp. + + stx = zero + fx = finit + gx = ginit + sty = zero + fy = finit + gy = ginit + stmin = zero + stmax = stp + xtrapu*stp + task = 'FG' + + goto 1000 + + else + +c Restore local variables. + + if (isave(1) .eq. 1) then + brackt = .true. + else + brackt = .false. + endif + stage = isave(2) + ginit = dsave(1) + gtest = dsave(2) + gx = dsave(3) + gy = dsave(4) + finit = dsave(5) + fx = dsave(6) + fy = dsave(7) + stx = dsave(8) + sty = dsave(9) + stmin = dsave(10) + stmax = dsave(11) + width = dsave(12) + width1 = dsave(13) + + endif + +c If psi(stp) <= 0 and f'(stp) >= 0 for some step, then the +c algorithm enters the second stage. + + ftest = finit + stp*gtest + if (stage .eq. 1 .and. f .le. ftest .and. g .ge. zero) + + stage = 2 + +c Test for warnings. + + if (brackt .and. (stp .le. stmin .or. stp .ge. stmax)) + + task = 'WARNING: ROUNDING ERRORS PREVENT PROGRESS' + if (brackt .and. stmax - stmin .le. xtol*stmax) + + task = 'WARNING: XTOL TEST SATISFIED' + if (stp .eq. stpmax .and. f .le. ftest .and. g .le. gtest) + + task = 'WARNING: STP = STPMAX' + if (stp .eq. stpmin .and. (f .gt. ftest .or. g .ge. gtest)) + + task = 'WARNING: STP = STPMIN' + +c Test for convergence. + + if (f .le. ftest .and. abs(g) .le. gtol*(-ginit)) + + task = 'CONVERGENCE' + +c Test for termination. + + if (task(1:4) .eq. 'WARN' .or. task(1:4) .eq. 'CONV') goto 1000 + +c A modified function is used to predict the step during the +c first stage if a lower function value has been obtained but +c the decrease is not sufficient. + + if (stage .eq. 1 .and. f .le. fx .and. f .gt. ftest) then + +c Define the modified function and derivative values. + + fm = f - stp*gtest + fxm = fx - stx*gtest + fym = fy - sty*gtest + gm = g - gtest + gxm = gx - gtest + gym = gy - gtest + +c Call dcstep to update stx, sty, and to compute the new step. + + call dcstep(stx,fxm,gxm,sty,fym,gym,stp,fm,gm, + + brackt,stmin,stmax) + +c Reset the function and derivative values for f. + + fx = fxm + stx*gtest + fy = fym + sty*gtest + gx = gxm + gtest + gy = gym + gtest + + else + +c Call dcstep to update stx, sty, and to compute the new step. + + call dcstep(stx,fx,gx,sty,fy,gy,stp,f,g, + + brackt,stmin,stmax) + + endif + +c Decide if a bisection step is needed. + + if (brackt) then + if (abs(sty-stx) .ge. p66*width1) stp = stx + p5*(sty - stx) + width1 = width + width = abs(sty-stx) + endif + +c Set the minimum and maximum steps allowed for stp. + + if (brackt) then + stmin = min(stx,sty) + stmax = max(stx,sty) + else + stmin = stp + xtrapl*(stp - stx) + stmax = stp + xtrapu*(stp - stx) + endif + +c Force the step to be within the bounds stpmax and stpmin. + + stp = max(stp,stpmin) + stp = min(stp,stpmax) + +c If further progress is not possible, let stp be the best +c point obtained during the search. + + if (brackt .and. (stp .le. stmin .or. stp .ge. stmax) + + .or. (brackt .and. stmax-stmin .le. xtol*stmax)) stp = stx + +c Obtain another function and derivative. + + task = 'FG' + + 1000 continue + +c Save local variables. + + if (brackt) then + isave(1) = 1 + else + isave(1) = 0 + endif + isave(2) = stage + dsave(1) = ginit + dsave(2) = gtest + dsave(3) = gx + dsave(4) = gy + dsave(5) = finit + dsave(6) = fx + dsave(7) = fy + dsave(8) = stx + dsave(9) = sty + dsave(10) = stmin + dsave(11) = stmax + dsave(12) = width + dsave(13) = width1 + + return + end + +c====================== The end of dcsrch ============================== + + subroutine dcstep(stx,fx,dx,sty,fy,dy,stp,fp,dp,brackt, + + stpmin,stpmax) + logical brackt + double precision stx,fx,dx,sty,fy,dy,stp,fp,dp,stpmin,stpmax +c ********** +c +c Subroutine dcstep +c +c This subroutine computes a safeguarded step for a search +c procedure and updates an interval that contains a step that +c satisfies a sufficient decrease and a curvature condition. +c +c The parameter stx contains the step with the least function +c value. If brackt is set to .true. then a minimizer has +c been bracketed in an interval with endpoints stx and sty. +c The parameter stp contains the current step. +c The subroutine assumes that if brackt is set to .true. then +c +c min(stx,sty) < stp < max(stx,sty), +c +c and that the derivative at stx is negative in the direction +c of the step. +c +c The subroutine statement is +c +c subroutine dcstep(stx,fx,dx,sty,fy,dy,stp,fp,dp,brackt, +c stpmin,stpmax) +c +c where +c +c stx is a double precision variable. +c On entry stx is the best step obtained so far and is an +c endpoint of the interval that contains the minimizer. +c On exit stx is the updated best step. +c +c fx is a double precision variable. +c On entry fx is the function at stx. +c On exit fx is the function at stx. +c +c dx is a double precision variable. +c On entry dx is the derivative of the function at +c stx. The derivative must be negative in the direction of +c the step, that is, dx and stp - stx must have opposite +c signs. +c On exit dx is the derivative of the function at stx. +c +c sty is a double precision variable. +c On entry sty is the second endpoint of the interval that +c contains the minimizer. +c On exit sty is the updated endpoint of the interval that +c contains the minimizer. +c +c fy is a double precision variable. +c On entry fy is the function at sty. +c On exit fy is the function at sty. +c +c dy is a double precision variable. +c On entry dy is the derivative of the function at sty. +c On exit dy is the derivative of the function at the exit sty. +c +c stp is a double precision variable. +c On entry stp is the current step. If brackt is set to .true. +c then on input stp must be between stx and sty. +c On exit stp is a new trial step. +c +c fp is a double precision variable. +c On entry fp is the function at stp +c On exit fp is unchanged. +c +c dp is a double precision variable. +c On entry dp is the the derivative of the function at stp. +c On exit dp is unchanged. +c +c brackt is an logical variable. +c On entry brackt specifies if a minimizer has been bracketed. +c Initially brackt must be set to .false. +c On exit brackt specifies if a minimizer has been bracketed. +c When a minimizer is bracketed brackt is set to .true. +c +c stpmin is a double precision variable. +c On entry stpmin is a lower bound for the step. +c On exit stpmin is unchanged. +c +c stpmax is a double precision variable. +c On entry stpmax is an upper bound for the step. +c On exit stpmax is unchanged. +c +c MINPACK-1 Project. June 1983 +c Argonne National Laboratory. +c Jorge J. More' and David J. Thuente. +c +c MINPACK-2 Project. October 1993. +c Argonne National Laboratory and University of Minnesota. +c Brett M. Averick and Jorge J. More'. +c +c ********** + double precision zero,p66,two,three + parameter(zero=0.0d0,p66=0.66d0,two=2.0d0,three=3.0d0) + + double precision gamma,p,q,r,s,sgnd,stpc,stpf,stpq,theta + + sgnd = dp*(dx/abs(dx)) + +c First case: A higher function value. The minimum is bracketed. +c If the cubic step is closer to stx than the quadratic step, the +c cubic step is taken, otherwise the average of the cubic and +c quadratic steps is taken. + + if (fp .gt. fx) then + theta = three*(fx - fp)/(stp - stx) + dx + dp + s = max(abs(theta),abs(dx),abs(dp)) + gamma = s*sqrt((theta/s)**2 - (dx/s)*(dp/s)) + if (stp .lt. stx) gamma = -gamma + p = (gamma - dx) + theta + q = ((gamma - dx) + gamma) + dp + r = p/q + stpc = stx + r*(stp - stx) + stpq = stx + ((dx/((fx - fp)/(stp - stx) + dx))/two)* + + (stp - stx) + if (abs(stpc-stx) .lt. abs(stpq-stx)) then + stpf = stpc + else + stpf = stpc + (stpq - stpc)/two + endif + brackt = .true. + +c Second case: A lower function value and derivatives of opposite +c sign. The minimum is bracketed. If the cubic step is farther from +c stp than the secant step, the cubic step is taken, otherwise the +c secant step is taken. + + else if (sgnd .lt. zero) then + theta = three*(fx - fp)/(stp - stx) + dx + dp + s = max(abs(theta),abs(dx),abs(dp)) + gamma = s*sqrt((theta/s)**2 - (dx/s)*(dp/s)) + if (stp .gt. stx) gamma = -gamma + p = (gamma - dp) + theta + q = ((gamma - dp) + gamma) + dx + r = p/q + stpc = stp + r*(stx - stp) + stpq = stp + (dp/(dp - dx))*(stx - stp) + if (abs(stpc-stp) .gt. abs(stpq-stp)) then + stpf = stpc + else + stpf = stpq + endif + brackt = .true. + +c Third case: A lower function value, derivatives of the same sign, +c and the magnitude of the derivative decreases. + + else if (abs(dp) .lt. abs(dx)) then + +c The cubic step is computed only if the cubic tends to infinity +c in the direction of the step or if the minimum of the cubic +c is beyond stp. Otherwise the cubic step is defined to be the +c secant step. + + theta = three*(fx - fp)/(stp - stx) + dx + dp + s = max(abs(theta),abs(dx),abs(dp)) + +c The case gamma = 0 only arises if the cubic does not tend +c to infinity in the direction of the step. + + gamma = s*sqrt(max(zero,(theta/s)**2-(dx/s)*(dp/s))) + if (stp .gt. stx) gamma = -gamma + p = (gamma - dp) + theta + q = (gamma + (dx - dp)) + gamma + r = p/q + if (r .lt. zero .and. gamma .ne. zero) then + stpc = stp + r*(stx - stp) + else if (stp .gt. stx) then + stpc = stpmax + else + stpc = stpmin + endif + stpq = stp + (dp/(dp - dx))*(stx - stp) + + if (brackt) then + +c A minimizer has been bracketed. If the cubic step is +c closer to stp than the secant step, the cubic step is +c taken, otherwise the secant step is taken. + + if (abs(stpc-stp) .lt. abs(stpq-stp)) then + stpf = stpc + else + stpf = stpq + endif + if (stp .gt. stx) then + stpf = min(stp+p66*(sty-stp),stpf) + else + stpf = max(stp+p66*(sty-stp),stpf) + endif + else + +c A minimizer has not been bracketed. If the cubic step is +c farther from stp than the secant step, the cubic step is +c taken, otherwise the secant step is taken. + + if (abs(stpc-stp) .gt. abs(stpq-stp)) then + stpf = stpc + else + stpf = stpq + endif + stpf = min(stpmax,stpf) + stpf = max(stpmin,stpf) + endif + +c Fourth case: A lower function value, derivatives of the same sign, +c and the magnitude of the derivative does not decrease. If the +c minimum is not bracketed, the step is either stpmin or stpmax, +c otherwise the cubic step is taken. + + else + if (brackt) then + theta = three*(fp - fy)/(sty - stp) + dy + dp + s = max(abs(theta),abs(dy),abs(dp)) + gamma = s*sqrt((theta/s)**2 - (dy/s)*(dp/s)) + if (stp .gt. sty) gamma = -gamma + p = (gamma - dp) + theta + q = ((gamma - dp) + gamma) + dy + r = p/q + stpc = stp + r*(sty - stp) + stpf = stpc + else if (stp .gt. stx) then + stpf = stpmax + else + stpf = stpmin + endif + endif + +c Update the interval which contains a minimizer. + + if (fp .gt. fx) then + sty = stp + fy = fp + dy = dp + else + if (sgnd .lt. zero) then + sty = stx + fy = fx + dy = dx + endif + stx = stp + fx = fp + dx = dp + endif + +c Compute the new step. + + stp = stpf + + return + end + +","Fortran" +"Genesis","genesis-release-r-ccs/genesis","src/lib/json-fortran/json_file_module.F90",".F90","121590","3041","!***************************************************************************************** +!> author: Jacob Williams +! license: BSD +! +! Higher-level [[json_file]] interface for the [[json_value]] type. +! +!### License +! * JSON-Fortran is released under a BSD-style license. +! See the [LICENSE](https://github.com/jacobwilliams/json-fortran/blob/master/LICENSE) +! file for details. + + module json_file_module + + use,intrinsic :: iso_fortran_env + use json_kinds + use json_parameters, only: unit2str + use json_string_utilities + use json_value_module + + implicit none + + private + +#include ""json_macros.inc"" + + !********************************************************* + !> author: Jacob Williams + ! date: 12/9/2013 + ! + ! The `json_file` is the main public class that is + ! used to open a file and get data from it. + ! + ! A `json_file` contains only two items: an instance of a [[json_core(type)]], + ! which is used for all data manipulation, and a [[json_value]] pointer, + ! which is used to construct the linked-list data structure. + ! Note that most methods in the `json_file` class are simply wrappers + ! to the lower-level routines in the [[json_value_module]]. + ! + !### Example + ! + !```fortran + ! program test + ! use json_module + ! implicit none + ! type(json_file) :: json + ! integer :: ival + ! real(real64) :: rval + ! character(len=:),allocatable :: cval + ! logical :: found + ! call json%initialize(compact_reals=.true.) + ! call json%load(filename='myfile.json') + ! call json%print() !print to the console + ! call json%get('var.i',ival,found) + ! call json%get('var.r(3)',rval,found) + ! call json%get('var.c',cval,found) + ! call json%destroy() + ! end program test + !``` + ! + !@note The `destroy()` method may be called to free the memory if necessary. + ! [[json_file(type)]] includes a finalizer that also calls + ! `destroy()` when the variable goes out of scope. + + type,public :: json_file + + private + + type(json_core) :: core !! The instance of the [[json_core(type)]] + !! factory used for this file. + type(json_value),pointer :: p => null() !! the JSON structure read from the file + + contains + + generic,public :: initialize => initialize_json_core_in_file,& + set_json_core_in_file + + procedure,public :: get_core => get_json_core_in_file + + !> + ! Load JSON from a file. + procedure,public :: load => json_file_load + + !> + ! The same as `load`, but only here for backward compatibility + procedure,public :: load_file => json_file_load + + !> + ! Load JSON from a string. + generic,public :: deserialize => MAYBEWRAP(json_file_load_from_string) + + !> + ! The same as `deserialize`, but only here for backward compatibility + generic,public :: load_from_string => MAYBEWRAP(json_file_load_from_string) + + !> + ! Print the [[json_value]] structure to an allocatable string + procedure,public :: serialize => json_file_print_to_string + + !> + ! The same as `serialize`, but only here for backward compatibility + procedure,public :: print_to_string => json_file_print_to_string + + procedure,public :: destroy => json_file_destroy + procedure,public :: nullify => json_file_nullify + procedure,public :: move => json_file_move_pointer + generic,public :: info => MAYBEWRAP(json_file_variable_info) + generic,public :: matrix_info => MAYBEWRAP(json_file_variable_matrix_info) + + !error checking: + procedure,public :: failed => json_file_failed + procedure,public :: print_error_message => json_file_print_error_message + procedure,public :: check_for_errors => json_file_check_for_errors + procedure,public :: clear_exceptions => json_file_clear_exceptions + + generic,public :: print => json_file_print_to_console, & + json_file_print_to_unit, & + json_file_print_to_filename + + !> + ! The same as `print`, but only here for backward compatibility + generic,public :: print_file => json_file_print_to_console, & + json_file_print_to_unit, & + json_file_print_to_filename + + !> + ! Rename a variable, specifying it by path + generic,public :: rename => MAYBEWRAP(json_file_rename) +#ifdef USE_UCS4 + generic,public :: rename => json_file_rename_path_ascii, & + json_file_rename_name_ascii +#endif + + !> + ! Verify that a path is valid + ! (i.e., a variable with this path exists in the file). + generic,public :: valid_path => MAYBEWRAP(json_file_valid_path) + + !> + ! Get a variable from a [[json_file(type)]], by specifying the path. + generic,public :: get => MAYBEWRAP(json_file_get_object), & + MAYBEWRAP(json_file_get_integer), & +#ifndef REAL32 + MAYBEWRAP(json_file_get_real32), & +#endif + MAYBEWRAP(json_file_get_real), & +#ifdef REAL128 + MAYBEWRAP(json_file_get_real64), & +#endif + MAYBEWRAP(json_file_get_logical), & + MAYBEWRAP(json_file_get_string), & + MAYBEWRAP(json_file_get_integer_vec), & +#ifndef REAL32 + MAYBEWRAP(json_file_get_real32_vec), & +#endif + MAYBEWRAP(json_file_get_real_vec), & +#ifdef REAL128 + MAYBEWRAP(json_file_get_real64_vec), & +#endif + MAYBEWRAP(json_file_get_logical_vec), & + MAYBEWRAP(json_file_get_string_vec), & + MAYBEWRAP(json_file_get_alloc_string_vec), & + json_file_get_root + + !> + ! Add a variable to a [[json_file(type)]], by specifying the path. + ! + !### Example + ! + !```fortran + ! program test + ! use json_module, rk=>json_rk, ik=>json_ik + ! implicit none + ! type(json_file) :: f + ! call f%initialize() ! specify whatever init options you want. + ! call f%add('inputs.t', 0.0_rk) + ! call f%add('inputs.x', [1.0_rk,2.0_rk,3.0_rk]) + ! call f%add('inputs.flag', .true.) + ! call f%print() ! print to the console + ! end program test + !``` + generic,public :: add => json_file_add, & + MAYBEWRAP(json_file_add_object), & + MAYBEWRAP(json_file_add_integer), & +#ifndef REAL32 + MAYBEWRAP(json_file_add_real32), & +#endif + MAYBEWRAP(json_file_add_real), & +#ifdef REAL128 + MAYBEWRAP(json_file_add_real64), & +#endif + MAYBEWRAP(json_file_add_logical), & + MAYBEWRAP(json_file_add_string), & + MAYBEWRAP(json_file_add_integer_vec), & +#ifndef REAL32 + MAYBEWRAP(json_file_add_real32_vec), & +#endif + MAYBEWRAP(json_file_add_real_vec), & +#ifdef REAL128 + MAYBEWRAP(json_file_add_real64_vec), & +#endif + MAYBEWRAP(json_file_add_logical_vec), & + MAYBEWRAP(json_file_add_string_vec) +#ifdef USE_UCS4 + generic,public :: add => json_file_add_string_path_ascii, & + json_file_add_string_value_ascii,& + json_file_add_string_vec_path_ascii,& + json_file_add_string_vec_vec_ascii +#endif + + !> + ! Update a scalar variable in a [[json_file(type)]], + ! by specifying the path. + ! + !@note These have been mostly supplanted by the `add` + ! methods, which do a similar thing (and can be used for + ! scalars and vectors, etc.) + generic,public :: update => MAYBEWRAP(json_file_update_integer), & + MAYBEWRAP(json_file_update_logical), & +#ifndef REAL32 + MAYBEWRAP(json_file_update_real32), & +#endif + MAYBEWRAP(json_file_update_real), & +#ifdef REAL128 + MAYBEWRAP(json_file_update_real64), & +#endif + MAYBEWRAP(json_file_update_string) +#ifdef USE_UCS4 + generic,public :: update => json_file_update_string_name_ascii, & + json_file_update_string_val_ascii +#endif + + !> + ! Remove a variable from a [[json_file(type)]] + ! by specifying the path. + generic,public :: remove => MAYBEWRAP(json_file_remove) + + !traverse + procedure,public :: traverse => json_file_traverse + + ! *************************************************** + ! operators + ! *************************************************** + + generic,public :: operator(.in.) => MAYBEWRAP(json_file_valid_path_op) + procedure,pass(me) :: MAYBEWRAP(json_file_valid_path_op) + + generic,public :: assignment(=) => assign_json_file,& + assign_json_file_to_string,& + MAYBEWRAP(assign_string_to_json_file) + procedure :: assign_json_file + procedure,pass(me) :: assign_json_file_to_string + procedure :: MAYBEWRAP(assign_string_to_json_file) + + ! *************************************************** + ! private routines + ! *************************************************** + + !load from string: + procedure :: MAYBEWRAP(json_file_load_from_string) + + !initialize + procedure :: initialize_json_core_in_file + procedure :: set_json_core_in_file + + !get info: + procedure :: MAYBEWRAP(json_file_variable_info) + procedure :: MAYBEWRAP(json_file_variable_matrix_info) + + !rename: + procedure :: MAYBEWRAP(json_file_rename) +#ifdef USE_UCS4 + procedure :: json_file_rename_path_ascii + procedure :: json_file_rename_name_ascii +#endif + + !validate path: + procedure :: MAYBEWRAP(json_file_valid_path) + + !get: + procedure :: MAYBEWRAP(json_file_get_object) + procedure :: MAYBEWRAP(json_file_get_integer) +#ifndef REAL32 + procedure :: MAYBEWRAP(json_file_get_real32) +#endif + procedure :: MAYBEWRAP(json_file_get_real) +#ifdef REAL128 + procedure :: MAYBEWRAP(json_file_get_real64) +#endif + procedure :: MAYBEWRAP(json_file_get_logical) + procedure :: MAYBEWRAP(json_file_get_string) + procedure :: MAYBEWRAP(json_file_get_integer_vec) +#ifndef REAL32 + procedure :: MAYBEWRAP(json_file_get_real32_vec) +#endif + procedure :: MAYBEWRAP(json_file_get_real_vec) +#ifdef REAL128 + procedure :: MAYBEWRAP(json_file_get_real64_vec) +#endif + procedure :: MAYBEWRAP(json_file_get_logical_vec) + procedure :: MAYBEWRAP(json_file_get_string_vec) + procedure :: MAYBEWRAP(json_file_get_alloc_string_vec) + procedure :: json_file_get_root + + !add: + procedure :: json_file_add + procedure :: MAYBEWRAP(json_file_add_object) + procedure :: MAYBEWRAP(json_file_add_integer) +#ifndef REAL32 + procedure :: MAYBEWRAP(json_file_add_real32) +#endif + procedure :: MAYBEWRAP(json_file_add_real) +#ifdef REAL128 + procedure :: MAYBEWRAP(json_file_add_real64) +#endif + procedure :: MAYBEWRAP(json_file_add_logical) + procedure :: MAYBEWRAP(json_file_add_string) + procedure :: MAYBEWRAP(json_file_add_integer_vec) +#ifndef REAL32 + procedure :: MAYBEWRAP(json_file_add_real32_vec) +#endif + procedure :: MAYBEWRAP(json_file_add_real_vec) +#ifdef REAL128 + procedure :: MAYBEWRAP(json_file_add_real64_vec) +#endif + procedure :: MAYBEWRAP(json_file_add_logical_vec) + procedure :: MAYBEWRAP(json_file_add_string_vec) +#ifdef USE_UCS4 + procedure :: json_file_add_string_path_ascii + procedure :: json_file_add_string_value_ascii + procedure :: json_file_add_string_vec_path_ascii + procedure :: json_file_add_string_vec_vec_ascii +#endif + + !update: + procedure :: MAYBEWRAP(json_file_update_integer) + procedure :: MAYBEWRAP(json_file_update_logical) +#ifndef REAL32 + procedure :: MAYBEWRAP(json_file_update_real32) +#endif + procedure :: MAYBEWRAP(json_file_update_real) +#ifdef REAL128 + procedure :: MAYBEWRAP(json_file_update_real64) +#endif + procedure :: MAYBEWRAP(json_file_update_string) +#ifdef USE_UCS4 + procedure :: json_file_update_string_name_ascii + procedure :: json_file_update_string_val_ascii +#endif + + !remove: + procedure :: MAYBEWRAP(json_file_remove) + + !print: + procedure :: json_file_print_to_console + procedure :: json_file_print_to_unit + procedure :: json_file_print_to_filename + + final :: finalize_json_file + + end type json_file + !********************************************************* + + !********************************************************* + !> author: Izaak Beekman + ! date: 07/23/2015 + ! + ! Structure constructor to initialize a [[json_file(type)]] + ! object with an existing [[json_value]] object or a JSON + ! string, and either the [[json_core(type)]] settings or a + ! [[json_core(type)]] instance. + ! + !### Example + ! + !```fortran + ! ... + ! type(json_file) :: my_file + ! type(json_value),pointer :: json_object + ! type(json_core) :: json_core_object + ! ... + ! ! Construct a json_object: + ! !could do this: + ! my_file = json_file(json_object) + ! !or: + ! my_file = json_file(json_object,verbose=.true.) + ! !or: + ! my_file = json_file('{""x"": [1]}',verbose=.true.) + ! !or: + ! my_file = json_file(json_object,json_core_object) + ! !or: + ! my_file = json_file('{""x"": [1]}',json_core_object) + !``` + interface json_file + module procedure initialize_json_file, & + initialize_json_file_v2, & + MAYBEWRAP(initialize_json_file_from_string), & + MAYBEWRAP(initialize_json_file_from_string_v2) + end interface + !************************************************************************************* + + contains +!***************************************************************************************** + +!***************************************************************************************** +!> +! Finalizer for [[json_file]] class. +! +! Just a wrapper for [[json_file_destroy]]. + + subroutine finalize_json_file(me) + + implicit none + + type(json_file),intent(inout) :: me + + call me%destroy(destroy_core=.true.) + + end subroutine finalize_json_file +!***************************************************************************************** + +!***************************************************************************************** +!> +! Check error status in the file. + + pure function json_file_failed(me) result(failed) + + implicit none + + class(json_file),intent(in) :: me + logical(LK) :: failed !! will be true if there has been an error. + + failed = me%core%failed() + + end function json_file_failed +!***************************************************************************************** + +!***************************************************************************************** +!> +! Retrieve error status and message from the class. + + subroutine json_file_check_for_errors(me,status_ok,error_msg) + + implicit none + + class(json_file),intent(inout) :: me + logical(LK),intent(out),optional :: status_ok !! true if there were no errors + character(kind=CK,len=:),allocatable,intent(out),optional :: error_msg !! the error message + !! (if there were errors) + +#if defined __GFORTRAN__ + character(kind=CK,len=:),allocatable :: tmp !! workaround for gfortran bugs + call me%core%check_for_errors(status_ok,tmp) + if (present(error_msg)) error_msg = tmp +#else + call me%core%check_for_errors(status_ok,error_msg) +#endif + + end subroutine json_file_check_for_errors +!***************************************************************************************** + +!***************************************************************************************** +!> +! Clear exceptions in the class. + + pure subroutine json_file_clear_exceptions(me) + + implicit none + + class(json_file),intent(inout) :: me + + call me%core%clear_exceptions() + + end subroutine json_file_clear_exceptions +!***************************************************************************************** + +!***************************************************************************************** +!> +! This is a wrapper for [[json_print_error_message]]. + + subroutine json_file_print_error_message(me,io_unit) + + implicit none + + class(json_file),intent(inout) :: me + integer, intent(in), optional :: io_unit + + call me%core%print_error_message(io_unit) + + end subroutine json_file_print_error_message +!***************************************************************************************** + +!***************************************************************************************** +!> +! Initialize the [[json_core(type)]] for this [[json_file]]. +! This is just a wrapper for [[json_initialize]]. +! +!@note This does not destroy the data in the file. +! +!@note [[initialize_json_core]], [[json_initialize]], +! [[initialize_json_core_in_file]], [[initialize_json_file]], +! [[initialize_json_file_v2]], [[initialize_json_file_from_string]], +! and [[initialize_json_file_from_string_v2]] +! all have a similar interface. + + subroutine initialize_json_core_in_file(me,& +#include ""json_initialize_dummy_arguments.inc"" + ) + + implicit none + + class(json_file),intent(inout) :: me +#include ""json_initialize_arguments.inc"" + + call me%core%initialize(& +#include ""json_initialize_dummy_arguments.inc"" + ) + end subroutine initialize_json_core_in_file +!***************************************************************************************** + +!***************************************************************************************** +!> +! Set the [[json_core(type)]] for this [[json_file]]. +! +!@note This does not destroy the data in the file. +! +!@note This one is used if you want to initialize the file with +! an already-existing [[json_core(type)]] (presumably, this was already +! initialized by a call to [[initialize_json_core]] or similar). + + subroutine set_json_core_in_file(me,core) + + implicit none + + class(json_file),intent(inout) :: me + type(json_core),intent(in) :: core + + me%core = core + + end subroutine set_json_core_in_file +!***************************************************************************************** + +!***************************************************************************************** +!> +! Get a copy of the [[json_core(type)]] in this [[json_file]]. + + subroutine get_json_core_in_file(me,core) + + implicit none + + class(json_file),intent(in) :: me + type(json_core),intent(out) :: core + + core = me%core + + end subroutine get_json_core_in_file +!***************************************************************************************** + +!***************************************************************************************** +!> author: Izaak Beekman +! date: 07/23/2015 +! +! Cast a [[json_value]] object as a [[json_file(type)]] object. +! It also calls the `initialize()` method. +! +!@note [[initialize_json_core]], [[json_initialize]], +! [[initialize_json_core_in_file]], [[initialize_json_file]], +! [[initialize_json_file_v2]], [[initialize_json_file_from_string]], +! and [[initialize_json_file_from_string_v2]] +! all have a similar interface. + + function initialize_json_file(p,& +#include ""json_initialize_dummy_arguments.inc"" + ) result(file_object) + + implicit none + + type(json_file) :: file_object + type(json_value),pointer,optional :: p !! `json_value` object to cast + !! as a `json_file` object. This + !! will be nullified. +#include ""json_initialize_arguments.inc"" + + call file_object%initialize(& +#include ""json_initialize_dummy_arguments.inc"" + ) + + if (present(p)) then + file_object%p => p + ! we have to nullify it to avoid + ! a dangling pointer when the file + ! goes out of scope + nullify(p) + end if + + end function initialize_json_file +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 4/26/2016 +! +! Cast a [[json_value]] pointer and a [[json_core(type)]] object +! as a [[json_file(type)]] object. + + function initialize_json_file_v2(json_value_object,json_core_object) & + result(file_object) + + implicit none + + type(json_file) :: file_object + type(json_value),pointer,intent(in) :: json_value_object + type(json_core),intent(in) :: json_core_object + + file_object%p => json_value_object + file_object%core = json_core_object + + end function initialize_json_file_v2 +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 01/19/2019 +! +! Cast a JSON string as a [[json_file(type)]] object. +! It also calls the `initialize()` method. +! +!### Example +! +!```fortran +! type(json_file) :: f +! f = json_file('{""key "": 1}', trailing_spaces_significant=.true.) +!``` +! +!@note [[initialize_json_core]], [[json_initialize]], +! [[initialize_json_core_in_file]], [[initialize_json_file]], +! [[initialize_json_file_v2]], [[initialize_json_file_from_string]], +! and [[initialize_json_file_from_string_v2]] +! all have a similar interface. + + function initialize_json_file_from_string(str,& +#include ""json_initialize_dummy_arguments.inc"" + ) result(file_object) + + implicit none + + type(json_file) :: file_object + character(kind=CK,len=*),intent(in) :: str !! string to load JSON data from +#include ""json_initialize_arguments.inc"" + + call file_object%initialize(& +#include ""json_initialize_dummy_arguments.inc"" + ) + call file_object%deserialize(str) + + end function initialize_json_file_from_string +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[initialize_json_file_from_string]], where ""str"" is kind=CDK. + + function wrap_initialize_json_file_from_string(str,& +#include ""json_initialize_dummy_arguments.inc"" + ) result(file_object) + + implicit none + + type(json_file) :: file_object + character(kind=CDK,len=*),intent(in) :: str !! string to load JSON data from +#include ""json_initialize_arguments.inc"" + + file_object = initialize_json_file_from_string(& + to_unicode(str),& +#include ""json_initialize_dummy_arguments.inc"" + ) + + end function wrap_initialize_json_file_from_string +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 1/19/2019 +! +! Cast a JSON string and a [[json_core(type)]] object +! as a [[json_file(type)]] object. + + function initialize_json_file_from_string_v2(str, json_core_object) & + result(file_object) + + implicit none + + type(json_file) :: file_object + character(kind=CK,len=*),intent(in) :: str !! string to load JSON data from + type(json_core),intent(in) :: json_core_object + + file_object%core = json_core_object + call file_object%deserialize(str) + + end function initialize_json_file_from_string_v2 +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[initialize_json_file_from_string_v2]], where ""str"" is kind=CDK. + + function wrap_initialize_json_file_from_string_v2(str,json_core_object) & + result(file_object) + + implicit none + + type(json_file) :: file_object + character(kind=CDK,len=*),intent(in) :: str !! string to load JSON data from + type(json_core),intent(in) :: json_core_object + + file_object = initialize_json_file_from_string_v2(to_unicode(str),json_core_object) + + end function wrap_initialize_json_file_from_string_v2 +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! +! Nullify the [[json_value]] pointer in a [[json_file(type)]], +! but do not destroy it. +! +! This should normally only be done if the pointer is the target of +! another pointer outside the class that is still intended to be in +! scope after the [[json_file(type)]] has gone out of scope. +! Otherwise, this would result in a memory leak. +! +!### See also +! * [[json_file_destroy]] +! +!### History +! * 6/30/2019 : Created + + subroutine json_file_nullify(me) + + implicit none + + class(json_file),intent(inout) :: me + + nullify(me%p) + + end subroutine json_file_nullify +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! +! Destroy the [[json_value]] data in a [[json_file(type)]]. +! This may be done when the variable is no longer needed, +! or will be reused to open a different file. +! Otherwise a memory leak will occur. +! +! Optionally, also destroy the [[json_core(type)]] instance (this +! is not necessary to prevent memory leaks, since a [[json_core(type)]] +! does not use pointers). +! +!### See also +! * [[json_file_nullify]] +! +!### History +! * 12/9/2013 : Created +! * 4/26/2016 : Added optional `destroy_core` argument +! +!@note This routine will be called automatically when the variable +! goes out of scope. + + subroutine json_file_destroy(me,destroy_core) + + implicit none + + class(json_file),intent(inout) :: me + logical,intent(in),optional :: destroy_core !! to also destroy the [[json_core(type)]]. + !! default is to leave it as is. + + if (associated(me%p)) call me%core%destroy(me%p) + + if (present(destroy_core)) then + if (destroy_core) call me%core%destroy() + end if + + end subroutine json_file_destroy +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 12/5/2014 +! +! Move the [[json_value]] pointer from one [[json_file(type)]] to another. +! The ""from"" pointer is then nullified, but not destroyed. +! +!@note If ""from%p"" is not associated, then an error is thrown. + + subroutine json_file_move_pointer(to,from) + + implicit none + + class(json_file),intent(inout) :: to + class(json_file),intent(inout) :: from + + if (associated(from%p)) then + + if (from%failed()) then + !Don't get the data if the FROM file has an + !active exception, since it may not be valid. + call to%core%throw_exception('Error in json_file_move_pointer: '//& + 'error exception in FROM file.') + else + call to%initialize() !initialize and clear any exceptions that may be present + to%p => from%p + nullify(from%p) + end if + + else + call to%core%throw_exception('Error in json_file_move_pointer: '//& + 'pointer is not associated.') + end if + + end subroutine json_file_move_pointer +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 12/9/2013 +! +! Load the JSON data from a file. +! +!### Example +! +!```fortran +! program main +! use json_module +! implicit none +! type(json_file) :: f +! call f%load('my_file.json') +! !... +! call f%destroy() +! end program main +!``` + + subroutine json_file_load(me, filename, unit) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CDK,len=*),intent(in) :: filename !! the filename to open + integer(IK),intent(in),optional :: unit !! the unit number to use + !! (if not present, a newunit + !! is used) + + call me%core%load(file=filename, p=me%p, unit=unit) + + end subroutine json_file_load +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 1/13/2015 +! +! Load the JSON data from a string. +! +!### Example +! +! Load JSON from a string: +!```fortran +! type(json_file) :: f +! call f%deserialize('{ ""name"": ""Leonidas"" }') +!``` + + subroutine json_file_load_from_string(me, str) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CK,len=*),intent(in) :: str !! string to load JSON data from + + call me%core%deserialize(me%p, str) + + end subroutine json_file_load_from_string +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_file_load_from_string]], where ""str"" is kind=CDK. + + subroutine wrap_json_file_load_from_string(me, str) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CDK,len=*),intent(in) :: str + + call me%deserialize(to_unicode(str)) + + end subroutine wrap_json_file_load_from_string +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 1/11/2015 +! +! Print the JSON file to the console. + + subroutine json_file_print_to_console(me) + + implicit none + + class(json_file),intent(inout) :: me + + call me%core%print(me%p,iunit=int(output_unit,IK)) + + end subroutine json_file_print_to_console +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 12/9/2013 +! +! Prints the JSON file to the specified file unit number. + + subroutine json_file_print_to_unit(me, iunit) + + implicit none + + class(json_file),intent(inout) :: me + integer(IK),intent(in) :: iunit !! file unit number (must not be -1) + + if (iunit/=unit2str) then + call me%core%print(me%p,iunit=iunit) + else + call me%core%throw_exception('Error in json_file_print_to_unit: iunit must not be -1.') + end if + + end subroutine json_file_print_to_unit +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 1/11/2015 +! +! Print the JSON structure to the specified filename. +! The file is opened, printed, and then closed. +! +!### Example +! Example loading a JSON file, changing a value, and then printing +! result to a new file: +!```fortran +! type(json_file) :: f +! logical :: found +! call f%load('my_file.json') !open the original file +! call f%update('version',4,found) !change the value of a variable +! call f%print('my_file_2.json') !save file as new name +!``` + + subroutine json_file_print_to_filename(me,filename) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CDK,len=*),intent(in) :: filename !! filename to print to + + call me%core%print(me%p,filename) + + end subroutine json_file_print_to_filename +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 1/11/2015 +! +! Print the JSON file to a string. +! +!### Example +! +! Open a JSON file, and then print the contents to a string: +!```fortran +! type(json_file) :: f +! character(kind=CK,len=:),allocatable :: str +! call f%load('my_file.json') +! call f%serialize(str) +!``` + + subroutine json_file_print_to_string(me,str) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CK,len=:),allocatable,intent(out) :: str !! string to print JSON data to + + call me%core%serialize(me%p,str) + + end subroutine json_file_print_to_string +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 2/3/2014 +! +! Returns information about a variable in a [[json_file(type)]]. +! +!@note If `found` is present, no exceptions will be thrown if an +! error occurs. Otherwise, an exception will be thrown if the +! variable is not found. + + subroutine json_file_variable_info(me,path,found,var_type,n_children,name) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CK,len=*),intent(in) :: path !! path to the variable + logical(LK),intent(out),optional :: found !! the variable exists in the structure + integer(IK),intent(out),optional :: var_type !! variable type + integer(IK),intent(out),optional :: n_children !! number of children + character(kind=CK,len=:),allocatable,intent(out),optional :: name !! variable name + + call me%core%info(me%p,path,found,var_type,n_children,name) + + end subroutine json_file_variable_info +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_file_variable_info]], where ""path"" is kind=CDK. +! +!@note If `found` is present, no exceptions will be thrown if an +! error occurs. Otherwise, an exception will be thrown if the +! variable is not found. + + subroutine wrap_json_file_variable_info(me,path,found,var_type,n_children,name) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CDK,len=*),intent(in) :: path + logical(LK),intent(out),optional :: found + integer(IK),intent(out),optional :: var_type + integer(IK),intent(out),optional :: n_children + character(kind=CK,len=:),allocatable,intent(out),optional :: name !! variable name + + call me%info(to_unicode(path),found,var_type,n_children,name) + + end subroutine wrap_json_file_variable_info +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 6/26/2016 +! +! Returns matrix information about a variable in a [[json_file(type)]]. +! +!@note If `found` is present, no exceptions will be thrown if an +! error occurs. Otherwise, an exception will be thrown if the +! variable is not found. + + subroutine json_file_variable_matrix_info(me,path,is_matrix,found,& + var_type,n_sets,set_size,name) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CK,len=*),intent(in) :: path !! path to the variable + logical(LK),intent(out) :: is_matrix !! true if it is a valid matrix + logical(LK),intent(out),optional :: found !! true if it was found + integer(IK),intent(out),optional :: var_type !! variable type of data in + !! the matrix (if all elements have + !! the same type) + integer(IK),intent(out),optional :: n_sets !! number of data sets (i.e., matrix + !! rows if using row-major order) + integer(IK),intent(out),optional :: set_size !! size of each data set (i.e., matrix + !! cols if using row-major order) + character(kind=CK,len=:),allocatable,intent(out),optional :: name !! variable name + + call me%core%matrix_info(me%p,path,is_matrix,found,var_type,n_sets,set_size,name) + + end subroutine json_file_variable_matrix_info +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_file_variable_matrix_info]], where ""path"" is kind=CDK. +! +!@note If `found` is present, no exceptions will be thrown if an +! error occurs. Otherwise, an exception will be thrown if the +! variable is not found. + + subroutine wrap_json_file_variable_matrix_info(me,path,is_matrix,found,& + var_type,n_sets,set_size,name) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CDK,len=*),intent(in) :: path !! path to the variable + logical(LK),intent(out) :: is_matrix !! true if it is a valid matrix + logical(LK),intent(out),optional :: found !! true if it was found + integer(IK),intent(out),optional :: var_type !! variable type of data in + !! the matrix (if all elements have + !! the same type) + integer(IK),intent(out),optional :: n_sets !! number of data sets (i.e., matrix + !! rows if using row-major order) + integer(IK),intent(out),optional :: set_size !! size of each data set (i.e., matrix + !! cols if using row-major order) + character(kind=CK,len=:),allocatable,intent(out),optional :: name !! variable name + + call me%matrix_info(to_unicode(path),is_matrix,found,var_type,n_sets,set_size,name) + + end subroutine wrap_json_file_variable_matrix_info +!***************************************************************************************** + +!***************************************************************************************** +!> author: Izaak Beekman +! date: 7/23/2015 +! +! Get a [[json_value]] pointer to the JSON file root. +! +!@note This is equivalent to calling ```[[json_file]]%get('$',p)``` + + subroutine json_file_get_root(me,p) + + implicit none + + class(json_file),intent(inout) :: me + type(json_value),pointer,intent(out) :: p !! pointer to the variable + + p => me%p + + end subroutine json_file_get_root +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! +! Assignment operator for [[json_core(type)]] = [[json_core(type)]]. +! This will duplicate the [[json_core(type)]] and also +! perform a deep copy of the [[json_value(type)]] data structure. + + subroutine assign_json_file(me,f) + + implicit none + + class(json_file),intent(out) :: me + type(json_file),intent(in) :: f + + me%core = f%core ! no pointers here so OK to copy + call me%core%clone(f%p,me%p) + + end subroutine assign_json_file +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! +! Assignment operator for character = [[json_core(type)]]. +! This is just a wrapper for the [[json_value_to_string]] routine. +! +!### Note +! * If an exception is raised or the file contains no data, +! this will return an empty string. + + subroutine assign_json_file_to_string(str,me) + + implicit none + + character(kind=CK,len=:),allocatable,intent(out) :: str + class(json_file),intent(in) :: me + + type(json_core) :: core_copy !! a copy of `core` from `me` + + if (me%core%failed() .or. .not. associated(me%p)) then + str = CK_'' + else + + ! This is sort of a hack. Since `me` has to have `intent(in)` + ! for the assignment to work, we need to make a copy of `me%core` + ! so we can call the low level routine (since it needs it to + ! be `intent(inout)`) because it's possible for this + ! function to raise an exception. + + core_copy = me%core ! copy the parser settings + + call core_copy%serialize(me%p,str) + if (me%core%failed()) str = CK_'' + + end if + + end subroutine assign_json_file_to_string +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! +! Assignment operator for [[json_core(type)]] = character. +! This is just a wrapper for the [[json_file_load_from_string]] routine. + + subroutine assign_string_to_json_file(me,str) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CK,len=*),intent(in) :: str + + if (associated(me%p)) call me%destroy() + if (me%core%failed()) call me%core%clear_exceptions() + call me%deserialize(str) + + end subroutine assign_string_to_json_file +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! +! Alternate version of [[assign_string_to_json_file]], where ""str"" is kind=CDK. + + subroutine wrap_assign_string_to_json_file(me,str) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CDK,len=*),intent(in) :: str + + call me%assign_string_to_json_file(to_unicode(str)) + + end subroutine wrap_assign_string_to_json_file +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! +! A wrapper for [[json_file_valid_path]] for the `.in.` operator + + function json_file_valid_path_op(path,me) result(found) + + implicit none + + character(kind=CK,len=*),intent(in) :: path !! the path to the variable + class(json_file),intent(in) :: me !! the JSON file + logical(LK) :: found !! if the variable was found + + type(json_core) :: core_copy !! a copy of `core` from `me` + + ! This is sort of a hack. Since `me` has to have `intent(in)` + ! for the operator to work, we need to make a copy of `me%core` + ! so we can call the low level routine (since it needs it to + ! be `intent(inout)`) because it's technically possible for this + ! function to raise an exception. This normally should never + ! happen here unless the JSON structure is malformed. + + core_copy = me%core ! copy the settings (need them to know + ! how to interpret the path) + + found = core_copy%valid_path(me%p, path) ! call the low-level routine + + call core_copy%destroy() ! just in case (but not really necessary) + + end function json_file_valid_path_op +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! +! Alternate version of [[json_file_valid_path_op]], where ""path"" is kind=CDK. + + function wrap_json_file_valid_path_op(path,me) result(found) + + implicit none + + character(kind=CDK,len=*),intent(in) :: path !! the path to the variable + class(json_file),intent(in) :: me !! the JSON file + logical(LK) :: found !! if the variable was found + + found = to_unicode(path) .in. me + + end function wrap_json_file_valid_path_op +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! +! Returns true if the `path` is present in the JSON file. + + function json_file_valid_path(me,path) result(found) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CK,len=*),intent(in) :: path !! the path to the variable + logical(LK) :: found !! if the variable was found + + found = me%core%valid_path(me%p, path) + + end function json_file_valid_path +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! +! Alternate version of [[json_file_valid_path]], where ""path"" is kind=CDK. + + function wrap_json_file_valid_path(me,path) result(found) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CDK,len=*),intent(in) :: path !! the path to the variable + logical(LK) :: found !! if the variable was found + + found = me%valid_path(to_unicode(path)) + + end function wrap_json_file_valid_path +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! +! Rename a variable in a JSON file. + + subroutine json_file_rename(me,path,name,found) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CK,len=*),intent(in) :: path !! the path to the variable + character(kind=CK,len=*),intent(in) :: name !! the new name + logical(LK),intent(out),optional :: found !! if the variable was found + + call me%core%rename(me%p, path, name, found) + + end subroutine json_file_rename +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! +! Alternate version of [[json_file_rename]], where ""path"" and ""name"" are kind=CDK. + + subroutine wrap_json_file_rename(me,path,name,found) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CDK,len=*),intent(in) :: path !! the path to the variable + character(kind=CDK,len=*),intent(in) :: name !! the new name + logical(LK),intent(out),optional :: found !! if the variable was found + + call me%json_file_rename(to_unicode(path),to_unicode(name),found) + + end subroutine wrap_json_file_rename +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! +! Wrapper for [[json_file_rename]] where ""path"" is kind=CDK). + + subroutine json_file_rename_path_ascii(me,path,name,found) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CDK,len=*),intent(in) :: path !! the path to the variable + character(kind=CK,len=*),intent(in) :: name !! the new name + logical(LK),intent(out),optional :: found !! if the variable was found + + call me%json_file_rename(to_unicode(path),name,found) + + end subroutine json_file_rename_path_ascii +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! +! Wrapper for [[json_file_rename]] where ""name"" is kind=CDK). + + subroutine json_file_rename_name_ascii(me,path,name,found) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CK,len=*),intent(in) :: path !! the path to the variable + character(kind=CDK,len=*),intent(in) :: name !! the new name + logical(LK),intent(out),optional :: found !! if the variable was found + + call me%json_file_rename(path,to_unicode(name),found) + + end subroutine json_file_rename_name_ascii +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 2/3/2014 +! +! Get a [[json_value]] pointer to an object from a JSON file. + + subroutine json_file_get_object(me, path, p, found) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CK,len=*),intent(in) :: path !! the path to the variable + type(json_value),pointer,intent(out) :: p !! pointer to the variable + logical(LK),intent(out),optional :: found !! if it was really found + + call me%core%get(me%p, path, p, found) + + end subroutine json_file_get_object +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_file_get_object]], where ""path"" is kind=CDK. + + subroutine wrap_json_file_get_object(me, path, p, found) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CDK,len=*),intent(in) :: path !! the path to the variable + type(json_value),pointer,intent(out) :: p !! pointer to the variable + logical(LK),intent(out),optional :: found !! if it was really found + + call me%get(to_unicode(path), p, found) + + end subroutine wrap_json_file_get_object +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 12/9/2013 +! +! Get an integer value from a JSON file. + + subroutine json_file_get_integer(me, path, val, found, default) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CK,len=*),intent(in) :: path !! the path to the variable + integer(IK),intent(out) :: val !! value + logical(LK),intent(out),optional :: found !! if it was really found + integer(IK),intent(in),optional :: default + + call me%core%get(me%p, path, val, found, default) + + end subroutine json_file_get_integer +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_file_get_integer]], where ""path"" is kind=CDK. + + subroutine wrap_json_file_get_integer(me, path, val, found, default) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CDK,len=*),intent(in) :: path !! the path to the variable + integer(IK),intent(out) :: val !! value + logical(LK),intent(out),optional :: found !! if it was really found + integer(IK),intent(in),optional :: default + + call me%get(to_unicode(path), val, found, default) + + end subroutine wrap_json_file_get_integer +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 1/20/2014 +! +! Get an integer vector from a JSON file. + + subroutine json_file_get_integer_vec(me, path, vec, found, default) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CK,len=*),intent(in) :: path !! the path to the variable + integer(IK),dimension(:),allocatable,intent(out) :: vec !! the value vector + logical(LK),intent(out),optional :: found !! if it was really found + integer(IK),dimension(:),intent(in),optional :: default + + call me%core%get(me%p, path, vec, found, default) + + end subroutine json_file_get_integer_vec +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_file_get_integer_vec]], where ""path"" is kind=CDK. + + subroutine wrap_json_file_get_integer_vec(me, path, vec, found, default) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CDK,len=*),intent(in) :: path !! the path to the variable + integer(IK),dimension(:),allocatable,intent(out) :: vec !! the value vector + logical(LK),intent(out),optional :: found !! if it was really found + integer(IK),dimension(:),intent(in),optional :: default + + call me%get(to_unicode(path), vec, found, default) + + end subroutine wrap_json_file_get_integer_vec +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 12/9/2013 +! +! Get a real(RK) variable value from a JSON file. + + subroutine json_file_get_real (me, path, val, found, default) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CK,len=*),intent(in) :: path !! the path to the variable + real(RK),intent(out) :: val !! value + logical(LK),intent(out),optional :: found !! if it was really found + real(RK),intent(in),optional :: default + + call me%core%get(me%p, path, val, found, default) + + end subroutine json_file_get_real +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_file_get_real]], where ""path"" is kind=CDK. + + subroutine wrap_json_file_get_real (me, path, val, found, default) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CDK,len=*),intent(in) :: path !! the path to the variable + real(RK),intent(out) :: val !! value + logical(LK),intent(out),optional :: found !! if it was really found + real(RK),intent(in),optional :: default + + call me%get(to_unicode(path), val, found, default) + + end subroutine wrap_json_file_get_real +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 1/19/2014 +! +! Get a real(RK) vector from a JSON file. + + subroutine json_file_get_real_vec(me, path, vec, found, default) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CK,len=*),intent(in) :: path !! the path to the variable + real(RK),dimension(:),allocatable,intent(out) :: vec !! the value vector + logical(LK),intent(out),optional :: found !! if it was really found + real(RK),dimension(:),intent(in),optional :: default + + call me%core%get(me%p, path, vec, found, default) + + end subroutine json_file_get_real_vec +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_file_get_real_vec]], where ""path"" is kind=CDK. + + subroutine wrap_json_file_get_real_vec(me, path, vec, found, default) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CDK,len=*),intent(in) :: path !! the path to the variable + real(RK),dimension(:),allocatable,intent(out) :: vec !! the value vector + logical(LK),intent(out),optional :: found !! if it was really found + real(RK),dimension(:),intent(in),optional :: default + + call me%get(to_unicode(path), vec, found, default) + + end subroutine wrap_json_file_get_real_vec +!***************************************************************************************** + +#ifndef REAL32 +!***************************************************************************************** +!> author: Jacob Williams +! date: 1/21/2019 +! +! Alternate version of [[json_file_get_real]] where `val` is `real32`. + + subroutine json_file_get_real32 (me, path, val, found, default) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CK,len=*),intent(in) :: path !! the path to the variable + real(real32),intent(out) :: val !! value + logical(LK),intent(out),optional :: found !! if it was really found + real(real32),intent(in),optional :: default + + call me%core%get(me%p, path, val, found, default) + + end subroutine json_file_get_real32 +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_file_get_real32]], where ""path"" is kind=CDK. + + subroutine wrap_json_file_get_real32 (me, path, val, found, default) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CDK,len=*),intent(in) :: path !! the path to the variable + real(real32),intent(out) :: val !! value + logical(LK),intent(out),optional :: found !! if it was really found + real(real32),intent(in),optional :: default + + call me%get(to_unicode(path), val, found, default) + + end subroutine wrap_json_file_get_real32 +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 1/21/2019 +! +! Alternate version of [[json_file_get_real_vec]] where `vec` is `real32`. + + subroutine json_file_get_real32_vec(me, path, vec, found, default) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CK,len=*),intent(in) :: path !! the path to the variable + real(real32),dimension(:),allocatable,intent(out) :: vec !! the value vector + logical(LK),intent(out),optional :: found !! if it was really found + real(real32),dimension(:),intent(in),optional :: default + + call me%core%get(me%p, path, vec, found, default) + + end subroutine json_file_get_real32_vec +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_file_get_real32_vec]], where ""path"" is kind=CDK. + + subroutine wrap_json_file_get_real32_vec(me, path, vec, found, default) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CDK,len=*),intent(in) :: path !! the path to the variable + real(real32),dimension(:),allocatable,intent(out) :: vec !! the value vector + logical(LK),intent(out),optional :: found !! if it was really found + real(real32),dimension(:),intent(in),optional :: default + + call me%get(to_unicode(path), vec, found, default) + + end subroutine wrap_json_file_get_real32_vec +!***************************************************************************************** +#endif + +#ifdef REAL128 +!***************************************************************************************** +!> author: Jacob Williams +! date: 1/21/2019 +! +! Alternate version of [[json_file_get_real]] where `val` is `real64`. + + subroutine json_file_get_real64 (me, path, val, found, default) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CK,len=*),intent(in) :: path !! the path to the variable + real(real64),intent(out) :: val !! value + logical(LK),intent(out),optional :: found !! if it was really found + real(real64),intent(in),optional :: default + + call me%core%get(me%p, path, val, found, default) + + end subroutine json_file_get_real64 +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_file_get_real64]], where ""path"" is kind=CDK. + + subroutine wrap_json_file_get_real64 (me, path, val, found, default) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CDK,len=*),intent(in) :: path !! the path to the variable + real(real64),intent(out) :: val !! value + logical(LK),intent(out),optional :: found !! if it was really found + real(real64),intent(in),optional :: default + + call me%get(to_unicode(path), val, found, default) + + end subroutine wrap_json_file_get_real64 +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 1/21/2019 +! +! Alternate version of [[json_file_get_real_vec]] where `vec` is `real64`. + + subroutine json_file_get_real64_vec(me, path, vec, found, default) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CK,len=*),intent(in) :: path !! the path to the variable + real(real64),dimension(:),allocatable,intent(out) :: vec !! the value vector + logical(LK),intent(out),optional :: found !! if it was really found + real(real64),dimension(:),intent(in),optional :: default + + call me%core%get(me%p, path, vec, found, default) + + end subroutine json_file_get_real64_vec +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_file_get_real64_vec]], where ""path"" is kind=CDK. + + subroutine wrap_json_file_get_real64_vec(me, path, vec, found, default) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CDK,len=*),intent(in) :: path !! the path to the variable + real(real64),dimension(:),allocatable,intent(out) :: vec !! the value vector + logical(LK),intent(out),optional :: found !! if it was really found + real(real64),dimension(:),intent(in),optional :: default + + call me%get(to_unicode(path), vec, found, default) + + end subroutine wrap_json_file_get_real64_vec +!***************************************************************************************** +#endif + +!***************************************************************************************** +!> author: Jacob Williams +! date: 12/9/2013 +! +! Get a logical(LK) value from a JSON file. + + subroutine json_file_get_logical(me,path,val,found,default) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CK,len=*),intent(in) :: path !! the path to the variable + logical(LK),intent(out) :: val !! value + logical(LK),intent(out),optional :: found !! if it was really found + logical(LK),intent(in),optional :: default + + call me%core%get(me%p, path, val, found, default) + + end subroutine json_file_get_logical +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_file_get_logical]], where ""path"" is kind=CDK. + + subroutine wrap_json_file_get_logical(me,path,val,found,default) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CDK,len=*),intent(in) :: path !! the path to the variable + logical(LK),intent(out) :: val !! value + logical(LK),intent(out),optional :: found !! if it was really found + logical(LK),intent(in),optional :: default + + call me%get(to_unicode(path), val, found, default) + + end subroutine wrap_json_file_get_logical +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 1/20/2014 +! +! Get a logical(LK) vector from a JSON file. + + subroutine json_file_get_logical_vec(me, path, vec, found, default) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CK,len=*),intent(in) :: path !! the path to the variable + logical(LK),dimension(:),allocatable,intent(out) :: vec !! the value vector + logical(LK),intent(out),optional :: found !! if it was really found + logical(LK),dimension(:),intent(in),optional :: default + + call me%core%get(me%p, path, vec, found, default) + + end subroutine json_file_get_logical_vec +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_file_get_logical_vec]], where ""path"" is kind=CDK. + + subroutine wrap_json_file_get_logical_vec(me, path, vec, found, default) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CDK,len=*),intent(in) :: path !! the path to the variable + logical(LK),dimension(:),allocatable,intent(out) :: vec !! the value vector + logical(LK),intent(out),optional :: found !! if it was really found + logical(LK),dimension(:),intent(in),optional :: default + + call me%get(to_unicode(path), vec, found, default) + + end subroutine wrap_json_file_get_logical_vec +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 12/9/2013 +! +! Get a character string from a json file. +! The output val is an allocatable character string. + + subroutine json_file_get_string(me, path, val, found, default) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CK,len=*),intent(in) :: path !! the path to the variable + character(kind=CK,len=:),allocatable,intent(out) :: val !! value + logical(LK),intent(out),optional :: found !! if it was really found + character(kind=CK,len=*),intent(in),optional :: default + + call me%core%get(me%p, path, val, found, default) + + end subroutine json_file_get_string +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_file_get_string]], where ""path"" is kind=CDK. + + subroutine wrap_json_file_get_string(me, path, val, found, default) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CDK,len=*),intent(in) :: path !! the path to the variable + character(kind=CK,len=:),allocatable,intent(out) :: val !! value + logical(LK),intent(out),optional :: found !! if it was really found + character(kind=CK,len=*),intent(in),optional :: default + + call me%get(to_unicode(path), val, found, default) + + end subroutine wrap_json_file_get_string +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 1/19/2014 +! +! Get a string vector from a JSON file. + + subroutine json_file_get_string_vec(me, path, vec, found, default) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CK,len=*),intent(in) :: path !! the path to the variable + character(kind=CK,len=*),dimension(:),allocatable,intent(out) :: vec !! value vector + logical(LK),intent(out),optional :: found !! if it was really found + character(kind=CK,len=*),dimension(:),intent(in),optional :: default + + call me%core%get(me%p, path, vec, found, default) + + end subroutine json_file_get_string_vec +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_file_get_string_vec]], where ""path"" is kind=CDK. + + subroutine wrap_json_file_get_string_vec(me, path, vec, found, default) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CDK,len=*),intent(in) :: path !! the path to the variable + character(kind=CK,len=*),dimension(:),allocatable,intent(out) :: vec !! value vector + logical(LK),intent(out),optional :: found !! if it was really found + character(kind=CK,len=*),dimension(:),intent(in),optional :: default + + call me%get(to_unicode(path), vec, found, default) + + end subroutine wrap_json_file_get_string_vec +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 12/17/2016 +! +! Get an (allocatable length) string vector from a JSON file. +! This is just a wrapper for [[json_get_alloc_string_vec_by_path]]. + + subroutine json_file_get_alloc_string_vec(me, path, vec, ilen, found, default, default_ilen) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CK,len=*),intent(in) :: path !! the path to the variable + character(kind=CK,len=:),dimension(:),allocatable,intent(out) :: vec !! value vector + integer(IK),dimension(:),allocatable,intent(out) :: ilen !! the actual length + !! of each character + !! string in the array + logical(LK),intent(out),optional :: found + character(kind=CK,len=*),dimension(:),intent(in),optional :: default + integer(IK),dimension(:),intent(in),optional :: default_ilen !! the actual + !! length of `default` + + call me%core%get(me%p, path, vec, ilen, found, default, default_ilen) + + end subroutine json_file_get_alloc_string_vec +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_file_get_alloc_string_vec]], where ""path"" is kind=CDK. +! This is just a wrapper for [[wrap_json_get_alloc_string_vec_by_path]]. + + subroutine wrap_json_file_get_alloc_string_vec(me, path, vec, ilen, found, default, default_ilen) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CDK,len=*),intent(in) :: path !! the path to the variable + character(kind=CK,len=:),dimension(:),allocatable,intent(out) :: vec !! value vector + integer(IK),dimension(:),allocatable,intent(out) :: ilen !! the actual length + !! of each character + !! string in the array + logical(LK),intent(out),optional :: found + character(kind=CK,len=*),dimension(:),intent(in),optional :: default + integer(IK),dimension(:),intent(in),optional :: default_ilen !! the actual + !! length of `default` + + call me%get(to_unicode(path), vec, ilen, found, default, default_ilen) + + end subroutine wrap_json_file_get_alloc_string_vec +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! +! Add a [[json_value]] pointer as the root object to a JSON file. +! +!### Note +! +! This is mostly equivalent to: +!```fortran +! f = [[json_file]](p) +!``` +! But without the finalization calls. +! +! And: +!```fortran +! if (destroy_original) call [[json_file]]%destroy() +! call [[json_file]]%add('$',p) +!``` + + subroutine json_file_add(me,p,destroy_original) + + implicit none + + class(json_file),intent(inout) :: me + type(json_value),pointer,intent(in) :: p !! pointer to the variable to add + logical(LK),intent(in),optional :: destroy_original !! if the file currently contains + !! an associated pointer, it is + !! destroyed. [Default is True] + + logical(LK) :: destroy !! if `me%p` is to be destroyed + + if (present(destroy_original)) then + destroy = destroy_original + else + destroy = .true. ! default + end if + + if (destroy) call me%core%destroy(me%p) + + me%p => p + + end subroutine json_file_add +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! +! Add a [[json_value]] pointer to an object to a JSON file. + + subroutine json_file_add_object(me,path,p,found,was_created) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CK,len=*),intent(in) :: path !! the path to the variable + type(json_value),pointer,intent(in) :: p !! pointer to the variable to add + logical(LK),intent(out),optional :: found !! if the variable was found + logical(LK),intent(out),optional :: was_created !! if the variable had to be created + + if (.not. associated(me%p)) call me%core%create_object(me%p,ck_'') ! create root + + call me%core%add_by_path(me%p,path,p,found,was_created) + + end subroutine json_file_add_object +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! +! Alternate version of [[json_file_add_object]], where ""path"" is kind=CDK. + + subroutine wrap_json_file_add_object(me,path,p,found,was_created) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CDK,len=*),intent(in) :: path !! the path to the variable + type(json_value),pointer,intent(in) :: p !! pointer to the variable to add + logical(LK),intent(out),optional :: found !! if the variable was found + logical(LK),intent(out),optional :: was_created !! if the variable had to be created + + call me%json_file_add_object(to_unicode(path),p,found,was_created) + + end subroutine wrap_json_file_add_object +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! +! Add an integer value to a JSON file. + + subroutine json_file_add_integer(me,path,val,found,was_created) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CK,len=*),intent(in) :: path !! the path to the variable + integer(IK),intent(in) :: val !! value + logical(LK),intent(out),optional :: found !! if the variable was found + logical(LK),intent(out),optional :: was_created !! if the variable had to be created + + if (.not. associated(me%p)) call me%core%create_object(me%p,ck_'') ! create root + + call me%core%add_by_path(me%p,path,val,found,was_created) + + end subroutine json_file_add_integer +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! +! Alternate version of [[json_file_add_integer]], where ""path"" is kind=CDK. + + subroutine wrap_json_file_add_integer(me,path,val,found,was_created) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CDK,len=*),intent(in) :: path !! the path to the variable + integer(IK),intent(in) :: val !! value + logical(LK),intent(out),optional :: found !! if the variable was found + logical(LK),intent(out),optional :: was_created !! if the variable had to be created + + call me%json_file_add_integer(to_unicode(path),val,found,was_created) + + end subroutine wrap_json_file_add_integer +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! +! Add an integer vector to a JSON file. + + subroutine json_file_add_integer_vec(me,path,vec,found,was_created) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CK,len=*),intent(in) :: path !! the path to the variable + integer(IK),dimension(:),intent(in) :: vec !! the value vector + logical(LK),intent(out),optional :: found !! if the variable was found + logical(LK),intent(out),optional :: was_created !! if the variable had to be created + + if (.not. associated(me%p)) call me%core%create_object(me%p,ck_'') ! create root + + call me%core%add_by_path(me%p,path,vec,found,was_created) + + end subroutine json_file_add_integer_vec +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! +! Alternate version of [[json_file_add_integer_vec]], where ""path"" is kind=CDK. + + subroutine wrap_json_file_add_integer_vec(me,path,vec,found,was_created) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CDK,len=*),intent(in) :: path !! the path to the variable + integer(IK),dimension(:),intent(in) :: vec !! the value vector + logical(LK),intent(out),optional :: found !! if the variable was found + logical(LK),intent(out),optional :: was_created !! if the variable had to be created + + call me%json_file_add_integer_vec(to_unicode(path),vec,found,was_created) + + end subroutine wrap_json_file_add_integer_vec +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! +! Add a real(RK) variable value to a JSON file. + + subroutine json_file_add_real(me,path,val,found,was_created) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CK,len=*),intent(in) :: path !! the path to the variable + real(RK),intent(in) :: val !! value + logical(LK),intent(out),optional :: found !! if the variable was found + logical(LK),intent(out),optional :: was_created !! if the variable had to be created + + if (.not. associated(me%p)) call me%core%create_object(me%p,ck_'') ! create root + + call me%core%add_by_path(me%p,path,val,found,was_created) + + end subroutine json_file_add_real +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! +! Alternate version of [[json_file_add_real]], where ""path"" is kind=CDK. + + subroutine wrap_json_file_add_real(me,path,val,found,was_created) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CDK,len=*),intent(in) :: path !! the path to the variable + real(RK),intent(in) :: val !! value + logical(LK),intent(out),optional :: found !! if the variable was found + logical(LK),intent(out),optional :: was_created !! if the variable had to be created + + call me%json_file_add_real(to_unicode(path),val,found,was_created) + + end subroutine wrap_json_file_add_real +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! +! Add a real(RK) vector to a JSON file. + + subroutine json_file_add_real_vec(me,path,vec,found,was_created) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CK,len=*),intent(in) :: path !! the path to the variable + real(RK),dimension(:),intent(in) :: vec !! the value vector + logical(LK),intent(out),optional :: found !! if the variable was found + logical(LK),intent(out),optional :: was_created !! if the variable had to be created + + if (.not. associated(me%p)) call me%core%create_object(me%p,ck_'') ! create root + + call me%core%add_by_path(me%p,path,vec,found,was_created) + + end subroutine json_file_add_real_vec +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! +! Alternate version of [[json_file_add_real_vec]], where ""path"" is kind=CDK. + + subroutine wrap_json_file_add_real_vec(me,path,vec,found,was_created) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CDK,len=*),intent(in) :: path !! the path to the variable + real(RK),dimension(:),intent(in) :: vec !! the value vector + logical(LK),intent(out),optional :: found !! if the variable was found + logical(LK),intent(out),optional :: was_created !! if the variable had to be created + + call me%json_file_add_real_vec(to_unicode(path),vec,found,was_created) + + end subroutine wrap_json_file_add_real_vec +!***************************************************************************************** + +#ifndef REAL32 +!***************************************************************************************** +!> author: Jacob Williams +! +! Alternate version of [[json_file_add_real]] where `val` is `real32`. + + subroutine json_file_add_real32(me,path,val,found,was_created) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CK,len=*),intent(in) :: path !! the path to the variable + real(real32),intent(in) :: val !! value + logical(LK),intent(out),optional :: found !! if the variable was found + logical(LK),intent(out),optional :: was_created !! if the variable had to be created + + call me%core%add_by_path(me%p,path,val,found,was_created) + + end subroutine json_file_add_real32 +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! +! Alternate version of [[json_file_add_real32]], where ""path"" is kind=CDK. + + subroutine wrap_json_file_add_real32(me,path,val,found,was_created) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CDK,len=*),intent(in) :: path !! the path to the variable + real(real32),intent(in) :: val !! value + logical(LK),intent(out),optional :: found !! if the variable was found + logical(LK),intent(out),optional :: was_created !! if the variable had to be created + + call me%json_file_add_real32(to_unicode(path),val,found,was_created) + + end subroutine wrap_json_file_add_real32 +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! +! Alternate version of [[json_file_add_real_vec]] where `vec` is `real32`. + + subroutine json_file_add_real32_vec(me,path,vec,found,was_created) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CK,len=*),intent(in) :: path !! the path to the variable + real(real32),dimension(:),intent(in) :: vec !! the value vector + logical(LK),intent(out),optional :: found !! if the variable was found + logical(LK),intent(out),optional :: was_created !! if the variable had to be created + + call me%core%add_by_path(me%p,path,vec,found,was_created) + + end subroutine json_file_add_real32_vec +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! +! Alternate version of [[json_file_add_real32_vec]], where ""path"" is kind=CDK. + + subroutine wrap_json_file_add_real32_vec(me,path,vec,found,was_created) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CDK,len=*),intent(in) :: path !! the path to the variable + real(real32),dimension(:),intent(in) :: vec !! the value vector + logical(LK),intent(out),optional :: found !! if the variable was found + logical(LK),intent(out),optional :: was_created !! if the variable had to be created + + call me%json_file_add_real32_vec(to_unicode(path),vec,found,was_created) + + end subroutine wrap_json_file_add_real32_vec +!***************************************************************************************** +#endif + +#ifdef REAL128 +!***************************************************************************************** +!> author: Jacob Williams +! +! Alternate version of [[json_file_add_real]] where `val` is `real64`. + + subroutine json_file_add_real64(me,path,val,found,was_created) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CK,len=*),intent(in) :: path !! the path to the variable + real(real64),intent(in) :: val !! value + logical(LK),intent(out),optional :: found !! if the variable was found + logical(LK),intent(out),optional :: was_created !! if the variable had to be created + + call me%core%add_by_path(me%p,path,val,found,was_created) + + end subroutine json_file_add_real64 +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! +! Alternate version of [[json_file_add_real64]], where ""path"" is kind=CDK. + + subroutine wrap_json_file_add_real64(me,path,val,found,was_created) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CDK,len=*),intent(in) :: path !! the path to the variable + real(real64),intent(in) :: val !! value + logical(LK),intent(out),optional :: found !! if the variable was found + logical(LK),intent(out),optional :: was_created !! if the variable had to be created + + call me%json_file_add_real64(to_unicode(path),val,found,was_created) + + end subroutine wrap_json_file_add_real64 +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! +! Alternate version of [[json_file_add_real_vec]] where `vec` is `real64`. + + subroutine json_file_add_real64_vec(me,path,vec,found,was_created) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CK,len=*),intent(in) :: path !! the path to the variable + real(real64),dimension(:),intent(in) :: vec !! the value vector + logical(LK),intent(out),optional :: found !! if the variable was found + logical(LK),intent(out),optional :: was_created !! if the variable had to be created + + call me%core%add_by_path(me%p,path,vec,found,was_created) + + end subroutine json_file_add_real64_vec +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! +! Alternate version of [[json_file_add_real64_vec]], where ""path"" is kind=CDK. + + subroutine wrap_json_file_add_real64_vec(me,path,vec,found,was_created) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CDK,len=*),intent(in) :: path !! the path to the variable + real(real64),dimension(:),intent(in) :: vec !! the value vector + logical(LK),intent(out),optional :: found !! if the variable was found + logical(LK),intent(out),optional :: was_created !! if the variable had to be created + + call me%json_file_add_real64_vec(to_unicode(path),vec,found,was_created) + + end subroutine wrap_json_file_add_real64_vec +!***************************************************************************************** +#endif + +!***************************************************************************************** +!> author: Jacob Williams +! +! Add a logical(LK) value to a JSON file. + + subroutine json_file_add_logical(me,path,val,found,was_created) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CK,len=*),intent(in) :: path !! the path to the variable + logical(LK),intent(in) :: val !! value + logical(LK),intent(out),optional :: found !! if the variable was found + logical(LK),intent(out),optional :: was_created !! if the variable had to be created + + if (.not. associated(me%p)) call me%core%create_object(me%p,ck_'') ! create root + + call me%core%add_by_path(me%p,path,val,found,was_created) + + end subroutine json_file_add_logical +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! +! Alternate version of [[json_file_add_logical]], where ""path"" is kind=CDK. + + subroutine wrap_json_file_add_logical(me,path,val,found,was_created) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CDK,len=*),intent(in) :: path !! the path to the variable + logical(LK),intent(in) :: val !! value + logical(LK),intent(out),optional :: found !! if the variable was found + logical(LK),intent(out),optional :: was_created !! if the variable had to be created + + call me%json_file_add_logical(to_unicode(path),val,found,was_created) + + end subroutine wrap_json_file_add_logical +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! +! Add a logical(LK) vector to a JSON file. + + subroutine json_file_add_logical_vec(me,path,vec,found,was_created) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CK,len=*),intent(in) :: path !! the path to the variable + logical(LK),dimension(:),intent(in) :: vec !! the value vector + logical(LK),intent(out),optional :: found !! if the variable was found + logical(LK),intent(out),optional :: was_created !! if the variable had to be created + + if (.not. associated(me%p)) call me%core%create_object(me%p,ck_'') ! create root + + call me%core%add_by_path(me%p,path,vec,found,was_created) + + end subroutine json_file_add_logical_vec +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! +! Alternate version of [[json_file_add_logical_vec]], where ""path"" is kind=CDK. + + subroutine wrap_json_file_add_logical_vec(me,path,vec,found,was_created) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CDK,len=*),intent(in) :: path !! the path to the variable + logical(LK),dimension(:),intent(in) :: vec !! the value vector + logical(LK),intent(out),optional :: found !! if the variable was found + logical(LK),intent(out),optional :: was_created !! if the variable had to be created + + call me%json_file_add_logical_vec(to_unicode(path),vec,found,was_created) + + end subroutine wrap_json_file_add_logical_vec +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! +! Add a character string to a json file. + + subroutine json_file_add_string(me,path,val,found,was_created,trim_str,adjustl_str) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CK,len=*),intent(in) :: path !! the path to the variable + character(kind=CK,len=*),intent(in) :: val !! value + logical(LK),intent(out),optional :: found !! if the variable was found + logical(LK),intent(out),optional :: was_created !! if the variable had to be created + logical(LK),intent(in),optional :: trim_str !! if TRIM() should be called for the `val` + logical(LK),intent(in),optional :: adjustl_str !! if ADJUSTL() should be called for the `val` + !! (note that ADJUSTL is done before TRIM) + + if (.not. associated(me%p)) call me%core%create_object(me%p,ck_'') ! create root + + call me%core%add_by_path(me%p,path,val,found,was_created,trim_str,adjustl_str) + + end subroutine json_file_add_string +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! +! Alternate version of [[json_file_add_string]], where ""path"" and ""val"" are kind=CDK. + + subroutine wrap_json_file_add_string(me,path,val,found,was_created,trim_str,adjustl_str) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CDK,len=*),intent(in) :: path !! the path to the variable + character(kind=CDK,len=*),intent(in) :: val !! value + logical(LK),intent(out),optional :: found !! if the variable was found + logical(LK),intent(out),optional :: was_created !! if the variable had to be created + logical(LK),intent(in),optional :: trim_str !! if TRIM() should be called for the `val` + logical(LK),intent(in),optional :: adjustl_str !! if ADJUSTL() should be called for the `val` + !! (note that ADJUSTL is done before TRIM) + + call me%json_file_add_string(to_unicode(path),to_unicode(val),found,& + was_created,trim_str,adjustl_str) + + end subroutine wrap_json_file_add_string +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! +! Wrapper for [[json_file_add_string]] where ""path"" is kind=CDK). + + subroutine json_file_add_string_path_ascii(me,path,val,found,& + was_created,trim_str,adjustl_str) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CDK,len=*),intent(in) :: path !! the path to the variable + character(kind=CK,len=*),intent(in) :: val !! value + logical(LK),intent(out),optional :: found !! if the variable was found + logical(LK),intent(out),optional :: was_created !! if the variable had to be created + logical(LK),intent(in),optional :: trim_str !! if TRIM() should be called for the `val` + logical(LK),intent(in),optional :: adjustl_str !! if ADJUSTL() should be called for the `val` + !! (note that ADJUSTL is done before TRIM) + + if (.not. associated(me%p)) call me%core%create_object(me%p,ck_'') ! create root + + call me%json_file_add_string(to_unicode(path),val,found,& + was_created,trim_str,adjustl_str) + + end subroutine json_file_add_string_path_ascii +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! +! Wrapper for [[json_file_add_string]] where ""val"" is kind=CDK). + + subroutine json_file_add_string_value_ascii(me,path,val,found,& + was_created,trim_str,adjustl_str) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CK,len=*),intent(in) :: path !! the path to the variable + character(kind=CDK,len=*),intent(in) :: val !! value + logical(LK),intent(out),optional :: found !! if the variable was found + logical(LK),intent(out),optional :: was_created !! if the variable had to be created + logical(LK),intent(in),optional :: trim_str !! if TRIM() should be called for the `val` + logical(LK),intent(in),optional :: adjustl_str !! if ADJUSTL() should be called for the `val` + !! (note that ADJUSTL is done before TRIM) + + if (.not. associated(me%p)) call me%core%create_object(me%p,ck_'') ! create root + + call me%json_file_add_string(path,to_unicode(val),found,& + was_created,trim_str,adjustl_str) + + end subroutine json_file_add_string_value_ascii +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! +! Add a string vector to a JSON file. + + subroutine json_file_add_string_vec(me,path,vec,found,& + was_created,ilen,trim_str,adjustl_str) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CK,len=*),intent(in) :: path !! the path to the variable + character(kind=CK,len=*),dimension(:),intent(in) :: vec !! the value vector + logical(LK),intent(out),optional :: found !! if the variable was found + logical(LK),intent(out),optional :: was_created !! if the variable had to be created + integer(IK),dimension(:),intent(in),optional :: ilen !! the string lengths of each + !! element in `value`. If not present, + !! the full `len(value)` string is added + !! for each element. + logical(LK),intent(in),optional :: trim_str !! if TRIM() should be called for each element + logical(LK),intent(in),optional :: adjustl_str !! if ADJUSTL() should be called for each element + !! (note that ADJUSTL is done before TRIM) + + if (.not. associated(me%p)) call me%core%create_object(me%p,ck_'') ! create root + + call me%core%add_by_path(me%p,path,vec,found,was_created,ilen,trim_str,adjustl_str) + + end subroutine json_file_add_string_vec +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! +! Alternate version of [[json_file_add_string_vec]], where ""path"" and ""vec"" are kind=CDK. + + subroutine wrap_json_file_add_string_vec(me,path,vec,found,& + was_created,ilen,trim_str,adjustl_str) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CDK,len=*),intent(in) :: path !! the path to the variable + character(kind=CDK,len=*),dimension(:),intent(in):: vec !! the value vector + logical(LK),intent(out),optional :: found !! if the variable was found + logical(LK),intent(out),optional :: was_created !! if the variable had to be created + integer(IK),dimension(:),intent(in),optional :: ilen !! the string lengths of each + !! element in `value`. If not present, + !! the full `len(value)` string is added + !! for each element. + logical(LK),intent(in),optional :: trim_str !! if TRIM() should be called for each element + logical(LK),intent(in),optional :: adjustl_str !! if ADJUSTL() should be called for each element + !! (note that ADJUSTL is done before TRIM) + + call me%json_file_add_string_vec(to_unicode(path),to_unicode(vec),found,& + was_created,ilen,trim_str,adjustl_str) + + end subroutine wrap_json_file_add_string_vec +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! +! Alternate version of [[json_file_add_string_vec]], where ""path"" is kind=CDK. + + subroutine json_file_add_string_vec_path_ascii(me,path,vec,found,& + was_created,ilen,trim_str,adjustl_str) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CDK,len=*),intent(in) :: path !! the path to the variable + character(kind=CK,len=*),dimension(:),intent(in) :: vec !! the value vector + logical(LK),intent(out),optional :: found !! if the variable was found + logical(LK),intent(out),optional :: was_created !! if the variable had to be created + integer(IK),dimension(:),intent(in),optional :: ilen !! the string lengths of each + !! element in `value`. If not present, + !! the full `len(value)` string is added + !! for each element. + logical(LK),intent(in),optional :: trim_str !! if TRIM() should be called for each element + logical(LK),intent(in),optional :: adjustl_str !! if ADJUSTL() should be called for each element + !! (note that ADJUSTL is done before TRIM) + + call me%json_file_add_string_vec(to_unicode(path),vec,found,& + was_created,ilen,trim_str,adjustl_str) + + end subroutine json_file_add_string_vec_path_ascii +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! +! Alternate version of [[json_file_add_string_vec]], where ""vec"" is kind=CDK. + + subroutine json_file_add_string_vec_vec_ascii(me,path,vec,found,& + was_created,ilen,trim_str,adjustl_str) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CK,len=*),intent(in) :: path !! the path to the variable + character(kind=CDK,len=*),dimension(:),intent(in) :: vec !! the value vector + logical(LK),intent(out),optional :: found !! if the variable was found + logical(LK),intent(out),optional :: was_created !! if the variable had to be created + integer(IK),dimension(:),intent(in),optional :: ilen !! the string lengths of each + !! element in `value`. If not present, + !! the full `len(value)` string is added + !! for each element. + logical(LK),intent(in),optional :: trim_str !! if TRIM() should be called for each element + logical(LK),intent(in),optional :: adjustl_str !! if ADJUSTL() should be called for each element + !! (note that ADJUSTL is done before TRIM) + + call me%json_file_add_string_vec(path,to_unicode(vec),found,& + was_created,ilen,trim_str,adjustl_str) + + end subroutine json_file_add_string_vec_vec_ascii +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 1/10/2015 +! +! Given the path string, if the variable is present in the file, +! and is a scalar, then update its value. +! If it is not present, then create it and set its value. +! +!### See also +! * [[json_update_integer]] + + subroutine json_file_update_integer(me,path,val,found) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CK,len=*),intent(in) :: path + integer(IK),intent(in) :: val + logical(LK),intent(out) :: found + + if (.not. me%core%failed()) call me%core%update(me%p,path,val,found) + + end subroutine json_file_update_integer +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_file_update_integer]], where ""path"" is kind=CDK. + + subroutine wrap_json_file_update_integer(me,path,val,found) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CDK,len=*),intent(in) :: path + integer(IK),intent(in) :: val + logical(LK),intent(out) :: found + + call me%update(to_unicode(path),val,found) + + end subroutine wrap_json_file_update_integer +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 1/10/2015 +! +! Given the path string, if the variable is present in the file, +! and is a scalar, then update its value. +! If it is not present, then create it and set its value. +! +!### See also +! * [[json_update_logical]] + + subroutine json_file_update_logical(me,path,val,found) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CK,len=*),intent(in) :: path + logical(LK),intent(in) :: val + logical(LK),intent(out) :: found + + if (.not. me%core%failed()) call me%core%update(me%p,path,val,found) + + end subroutine json_file_update_logical +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_file_update_logical]], where ""path"" is kind=CDK. + + subroutine wrap_json_file_update_logical(me,path,val,found) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CDK,len=*),intent(in) :: path + logical(LK),intent(in) :: val + logical(LK),intent(out) :: found + + call me%update(to_unicode(path),val,found) + + end subroutine wrap_json_file_update_logical +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 1/10/2015 +! +! Given the path string, if the variable is present in the file, +! and is a scalar, then update its value. +! If it is not present, then create it and set its value. + + subroutine json_file_update_real(me,path,val,found) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CK,len=*),intent(in) :: path + real(RK),intent(in) :: val + logical(LK),intent(out) :: found + + if (.not. me%core%failed()) call me%core%update(me%p,path,val,found) + + end subroutine json_file_update_real +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_file_update_real]], where ""path"" is kind=CDK. + + subroutine wrap_json_file_update_real(me,path,val,found) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CDK,len=*),intent(in) :: path + real(RK),intent(in) :: val + logical(LK),intent(out) :: found + + call me%update(to_unicode(path),val,found) + + end subroutine wrap_json_file_update_real +!***************************************************************************************** + +#ifndef REAL32 +!***************************************************************************************** +!> author: Jacob Williams +! date: 1/21/2019 +! +! Alternate version of [[json_file_update_real]] where `val` is `real32`. + + subroutine json_file_update_real32(me,path,val,found) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CK,len=*),intent(in) :: path + real(real32),intent(in) :: val + logical(LK),intent(out) :: found + + call me%update(path,real(val,RK),found) + + end subroutine json_file_update_real32 +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_file_update_real32]], where ""path"" is kind=CDK. + + subroutine wrap_json_file_update_real32(me,path,val,found) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CDK,len=*),intent(in) :: path + real(real32),intent(in) :: val + logical(LK),intent(out) :: found + + call me%update(to_unicode(path),val,found) + + end subroutine wrap_json_file_update_real32 +!***************************************************************************************** +#endif + +#ifdef REAL128 +!***************************************************************************************** +!> author: Jacob Williams +! date: 1/21/2019 +! +! Alternate version of [[json_file_update_real]] where `val` is `real64`. + + subroutine json_file_update_real64(me,path,val,found) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CK,len=*),intent(in) :: path + real(real64),intent(in) :: val + logical(LK),intent(out) :: found + + call me%update(path,real(val,RK),found) + + end subroutine json_file_update_real64 +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_file_update_real64]], where ""path"" is kind=CDK. + + subroutine wrap_json_file_update_real64(me,path,val,found) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CDK,len=*),intent(in) :: path + real(real64),intent(in) :: val + logical(LK),intent(out) :: found + + call me%update(to_unicode(path),val,found) + + end subroutine wrap_json_file_update_real64 +!***************************************************************************************** +#endif + +!***************************************************************************************** +!> author: Jacob Williams +! date: 1/10/2015 +! +! Given the path string, if the variable is present in the file, +! and is a scalar, then update its value. +! If it is not present, then create it and set its value. +! +!### See also +! * [[json_update_string]] + + subroutine json_file_update_string(me,path,val,found,trim_str,adjustl_str) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CK,len=*),intent(in) :: path + character(kind=CK,len=*),intent(in) :: val + logical(LK),intent(out) :: found + logical(LK),intent(in),optional :: trim_str !! if TRIM() should be called for the `val` + logical(LK),intent(in),optional :: adjustl_str !! if ADJUSTL() should be called for the `val` + !! (note that ADJUSTL is done before TRIM) + + if (.not. me%core%failed()) call me%core%update(me%p,path,val,found,trim_str,adjustl_str) + + end subroutine json_file_update_string +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_file_update_string]], where ""path"" and ""val"" are kind=CDK. + + subroutine wrap_json_file_update_string(me,path,val,found,trim_str,adjustl_str) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CDK,len=*),intent(in) :: path + character(kind=CDK,len=*),intent(in) :: val + logical(LK),intent(out) :: found + logical(LK),intent(in),optional :: trim_str !! if TRIM() should be called for the `val` + logical(LK),intent(in),optional :: adjustl_str !! if ADJUSTL() should be called for the `val` + !! (note that ADJUSTL is done before TRIM) + + call me%update(to_unicode(path),to_unicode(val),found,trim_str,adjustl_str) + + end subroutine wrap_json_file_update_string +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_file_update_string]], where ""path"" is kind=CDK. + + subroutine json_file_update_string_name_ascii(me,path,val,found,trim_str,adjustl_str) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CDK,len=*),intent(in) :: path + character(kind=CK, len=*),intent(in) :: val + logical(LK),intent(out) :: found + logical(LK),intent(in),optional :: trim_str !! if TRIM() should be called for the `val` + logical(LK),intent(in),optional :: adjustl_str !! if ADJUSTL() should be called for the `val` + !! (note that ADJUSTL is done before TRIM) + + call me%update(to_unicode(path),val,found,trim_str,adjustl_str) + + end subroutine json_file_update_string_name_ascii +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_file_update_string]], where ""val"" is kind=CDK. + + subroutine json_file_update_string_val_ascii(me,path,val,found,trim_str,adjustl_str) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CK, len=*),intent(in) :: path + character(kind=CDK,len=*),intent(in) :: val + logical(LK),intent(out) :: found + logical(LK),intent(in),optional :: trim_str !! if TRIM() should be called for the `val` + logical(LK),intent(in),optional :: adjustl_str !! if ADJUSTL() should be called for the `val` + !! (note that ADJUSTL is done before TRIM) + + call me%update(path,to_unicode(val),found,trim_str,adjustl_str) + + end subroutine json_file_update_string_val_ascii +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 6/11/2016 +! +! Traverse the JSON structure in the file. +! This routine calls the user-specified [[json_traverse_callback_func]] +! for each element of the structure. + + subroutine json_file_traverse(me,traverse_callback) + + implicit none + + class(json_file),intent(inout) :: me + procedure(json_traverse_callback_func) :: traverse_callback + + call me%core%traverse(me%p,traverse_callback) + + end subroutine json_file_traverse +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 7/7/2018 +! +! Remove a variable from a JSON file. +! +!@note This is just a wrapper to [[remove_if_present]]. + + subroutine json_file_remove(me,path) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CK,len=*),intent(in) :: path !! the path to the variable + + call me%core%remove_if_present(me%p,path) + + end subroutine json_file_remove +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_file_remove]], where ""path"" is kind=CDK. + + subroutine wrap_json_file_remove(me,path) + + implicit none + + class(json_file),intent(inout) :: me + character(kind=CDK,len=*),intent(in) :: path !! the path to the variable + + call me%remove(to_unicode(path)) + + end subroutine wrap_json_file_remove +!***************************************************************************************** + +!***************************************************************************************** + end module json_file_module +!***************************************************************************************** +","Fortran" +"Genesis","genesis-release-r-ccs/genesis","src/lib/json-fortran/json_value_module.F90",".F90","456578","11550","!***************************************************************************************** +!> author: Jacob Williams +! license: BSD +! +! This module provides a low-level interface for manipulation of JSON data. +! The two public entities are [[json_value]], and [[json_core(type)]]. +! The [[json_file_module]] provides a higher-level interface to some +! of these routines. +! +!### License +! * JSON-Fortran is released under a BSD-style license. +! See the [LICENSE](https://github.com/jacobwilliams/json-fortran/blob/master/LICENSE) +! file for details. + + module json_value_module + + use,intrinsic :: iso_fortran_env, only: iostat_end,error_unit,output_unit + use,intrinsic :: ieee_arithmetic + use json_kinds + use json_parameters + use json_string_utilities + + implicit none + + private + +#include ""json_macros.inc"" + + !********************************************************* + !> + ! If Unicode is not enabled, then + ! JSON files are opened using access='STREAM' and + ! form='UNFORMATTED'. This allows the file to + ! be read faster. + ! +#ifdef USE_UCS4 + logical,parameter :: use_unformatted_stream = .false. +#else + logical,parameter :: use_unformatted_stream = .true. +#endif + !********************************************************* + + !********************************************************* + !> + ! If Unicode is not enabled, then + ! JSON files are opened using access='STREAM' and + ! form='UNFORMATTED'. This allows the file to + ! be read faster. + ! +#ifdef USE_UCS4 + character(kind=CDK,len=*),parameter :: access_spec = 'SEQUENTIAL' +#else + character(kind=CDK,len=*),parameter :: access_spec = 'STREAM' +#endif + !********************************************************* + + !********************************************************* + !> + ! If Unicode is not enabled, then + ! JSON files are opened using access='STREAM' and + ! form='UNFORMATTED'. This allows the file to + ! be read faster. + ! +#ifdef USE_UCS4 + character(kind=CDK,len=*),parameter :: form_spec = 'FORMATTED' +#else + character(kind=CDK,len=*),parameter :: form_spec = 'UNFORMATTED' +#endif + !********************************************************* + + !********************************************************* + !> + ! Type used to construct the linked-list JSON structure. + ! Normally, this should always be a pointer variable. + ! This type should only be used by an instance of [[json_core(type)]]. + ! + !### Example + ! + ! The following test program: + ! + !````fortran + ! program test + ! use json_module + ! implicit none + ! type(json_core) :: json + ! type(json_value),pointer :: p + ! call json%create_object(p,'') !create the root + ! call json%add(p,'year',1805) !add some data + ! call json%add(p,'value',1.0_RK) !add some data + ! call json%print(p,'test.json') !write it to a file + ! call json%destroy(p) !cleanup + ! end program test + !```` + ! + ! Produces the JSON file **test.json**: + ! + !````json + ! { + ! ""year"": 1805, + ! ""value"": 0.1E+1 + ! } + !```` + ! + !@warning Pointers of this type should only be allocated + ! using the methods from [[json_core(type)]]. + + type,public :: json_value + + !force the constituents to be stored contiguously + ![note: on Intel, the order of the variables below + ! is significant to avoid the misaligned field warnings] + sequence + + private + + !for the linked list: + type(json_value),pointer :: previous => null() !! previous item in the list + type(json_value),pointer :: next => null() !! next item in the list + type(json_value),pointer :: parent => null() !! parent item of this + type(json_value),pointer :: children => null() !! first child item of this + type(json_value),pointer :: tail => null() !! last child item of this + + character(kind=CK,len=:),allocatable :: name !! variable name (unescaped) + + real(RK),allocatable :: dbl_value !! real data for this variable + logical(LK),allocatable :: log_value !! logical data for this variable + character(kind=CK,len=:),allocatable :: str_value !! string data for this variable + !! (unescaped) + integer(IK),allocatable :: int_value !! integer data for this variable + + integer(IK) :: var_type = json_unknown !! variable type + + integer(IK),private :: n_children = 0 !! number of children + + end type json_value + !********************************************************* + + !********************************************************* + !> + ! To access the core routines for manipulation + ! of [[json_value]] pointer variables. This class allows + ! for thread safe use of the module. + ! + !### Usage + !````fortran + ! program test + ! use json_module, wp=>json_RK + ! implicit none + ! type(json_core) :: json !<--have to declare this + ! type(json_value),pointer :: p + ! call json%create_object(p,'') !create the root + ! call json%add(p,'year',1805) !add some data + ! call json%add(p,'value',1.0_wp) !add some data + ! call json%print(p,'test.json') !write it to a file + ! call json%destroy(p) !cleanup + ! end program test + !```` + type,public :: json_core + + private + + integer(IK) :: spaces_per_tab = 2 !! number of spaces for indenting + + logical(LK) :: compact_real = .true. !! to use the ""compact"" form of real + !! numbers for output + character(kind=CDK,len=:),allocatable :: real_fmt !! the format string to use + !! for converting real numbers to strings. + !! It can be set in [[json_initialize]], + !! and used in [[json_value_print]] + !! If not set, then `default_real_fmt` + !! is used instead. + + logical(LK) :: is_verbose = .false. !! if true, all exceptions are + !! immediately printed to console. + + logical(LK) :: stop_on_error = .false. !! if true, then the program is + !! stopped immediately when an + !! exception is raised. + + logical(LK) :: exception_thrown = .false. !! The error flag. Will be set to true + !! when an error is thrown in the class. + !! Many of the methods will check this + !! and return immediately if it is true. + character(kind=CK,len=:),allocatable :: err_message + !! the error message. + !! if `exception_thrown=False` then + !! this variable is not allocated. + + integer(IK) :: char_count = 0 !! character position in the current line + integer(IK) :: line_count = 1 !! lines read counter + integer(IK) :: pushed_index = 0 !! used when parsing lines in file + character(kind=CK,len=pushed_char_size) :: pushed_char = CK_'' !! used when parsing + !! lines in file + + integer(IK) :: ipos = 1 !! for allocatable strings: next character to read + + logical(LK) :: strict_type_checking = .false. !! if true, then no type conversions are done + !! in the `get` routines if the actual variable + !! type is different from the return type (for + !! example, integer to real). + + logical(LK) :: trailing_spaces_significant = .false. !! for name and path comparisons, if trailing + !! space is to be considered significant. + + logical(LK) :: case_sensitive_keys = .true. !! if name and path comparisons + !! are case sensitive. + + logical(LK) :: no_whitespace = .false. !! when printing a JSON string, don't include + !! non-significant spaces or line breaks. + !! If true, the entire structure will be + !! printed on one line. + + logical(LK) :: unescaped_strings = .true. !! If false, then the escaped + !! string is returned from [[json_get_string]] + !! and similar routines. If true [default], + !! then the string is returned unescaped. + + logical(LK) :: allow_comments = .true. !! if true, any comments will be ignored when + !! parsing a file. The comment tokens are defined + !! by the `comment_char` character variable. + character(kind=CK,len=:),allocatable :: comment_char !! comment tokens when + !! `allow_comments` is true. + !! Examples: '`!`' or '`#`'. + !! Default is `CK_'/!#'`. + + integer(IK) :: path_mode = 1_IK !! How the path strings are interpreted in the + !! `get_by_path` routines: + !! + !! * 1 -- Default mode (see [[json_get_by_path_default]]) + !! * 2 -- as RFC 6901 ""JSON Pointer"" paths + !! (see [[json_get_by_path_rfc6901]]) + !! * 3 -- JSONPath ""bracket-notation"" + !! see [[json_get_by_path_jsonpath_bracket]]) + + character(kind=CK,len=1) :: path_separator = dot !! The `path` separator to use + !! in the ""default"" mode for + !! the paths in the various + !! `get_by_path` routines. + !! Note: if `path_mode/=1` + !! then this is ignored. + + logical(LK) :: compress_vectors = .false. !! If true, then arrays of integers, + !! nulls, reals, & logicals are + !! printed all on one line. + !! [Note: `no_whitespace` will + !! override this option if necessary] + + logical(LK) :: allow_duplicate_keys = .true. !! If False, then after parsing, if any + !! duplicate keys are found, an error is + !! thrown. A call to [[json_value_validate]] + !! will also check for duplicates. If True + !! [default] then no special checks are done + + logical(LK) :: escape_solidus = .false. !! If True then the solidus ""`/`"" is always escaped + !! (""`\/`"") when serializing JSON. + !! If False [default], then it is not escaped. + !! Note that this option does not affect parsing + !! (both escaped and unescaped versions are still + !! valid in all cases). + + integer(IK) :: null_to_real_mode = 2_IK !! if `strict_type_checking=false`: + !! + !! * 1 : an exception will be raised if + !! try to retrieve a `null` as a real. + !! * 2 : a `null` retrieved as a real + !! will return NaN. [default] + !! * 3 : a `null` retrieved as a real + !! will return 0.0. + + logical(LK) :: non_normals_to_null = .false. !! How to serialize NaN, Infinity, + !! and -Infinity real values: + !! + !! * If true : as JSON `null` values + !! * If false : as strings (e.g., ""NaN"", + !! ""Infinity"", ""-Infinity"") [default] + + logical(LK) :: use_quiet_nan = .true. !! if true [default], `null_to_real_mode=2` + !! and [[string_to_real]] will use + !! `ieee_quiet_nan` for NaN values. If false, + !! `ieee_signaling_nan` will be used. + + logical(LK) :: strict_integer_type_checking = .true. + !! * If false, when parsing JSON, if an integer numeric value + !! cannot be converted to an integer (`integer(IK)`), + !! then an attempt is then make to convert it + !! to a real (`real(RK)`). + !! * If true [default], an exception will be raised if an integer + !! value cannot be read when parsing JSON. + + integer :: ichunk = 0 !! index in `chunk` for [[pop_char]] + !! when `use_unformatted_stream=True` + integer :: filesize = 0 !! the file size when when `use_unformatted_stream=True` + character(kind=CK,len=:),allocatable :: chunk !! a chunk read from a stream file + !! when `use_unformatted_stream=True` + + contains + + private + + !> + ! Return a child of a [[json_value]] structure. + generic,public :: get_child => json_value_get_child_by_index, & + json_value_get_child,& + MAYBEWRAP(json_value_get_child_by_name) + procedure,private :: json_value_get_child_by_index + procedure,private :: MAYBEWRAP(json_value_get_child_by_name) + procedure,private :: json_value_get_child + + !> + ! Add objects to a linked list of [[json_value]]s. + ! + !@note It might make more sense to call this `add_child`. + generic,public :: add => json_value_add_member, & + MAYBEWRAP(json_value_add_null), & + MAYBEWRAP(json_value_add_integer), & + MAYBEWRAP(json_value_add_integer_vec), & +#ifndef REAL32 + MAYBEWRAP(json_value_add_real32), & + MAYBEWRAP(json_value_add_real32_vec), & +#endif + MAYBEWRAP(json_value_add_real), & + MAYBEWRAP(json_value_add_real_vec), & +#ifdef REAL128 + MAYBEWRAP(json_value_add_real64), & + MAYBEWRAP(json_value_add_real64_vec), & +#endif + MAYBEWRAP(json_value_add_logical), & + MAYBEWRAP(json_value_add_logical_vec), & + MAYBEWRAP(json_value_add_string), & + MAYBEWRAP(json_value_add_string_vec) +#ifdef USE_UCS4 + generic,public :: add => json_value_add_string_name_ascii, & + json_value_add_string_val_ascii, & + json_value_add_string_vec_name_ascii, & + json_value_add_string_vec_val_ascii +#endif + + procedure,private :: json_value_add_member + procedure,private :: MAYBEWRAP(json_value_add_integer) + procedure,private :: MAYBEWRAP(json_value_add_null) + procedure,private :: MAYBEWRAP(json_value_add_integer_vec) +#ifndef REAL32 + procedure,private :: MAYBEWRAP(json_value_add_real32) + procedure,private :: MAYBEWRAP(json_value_add_real32_vec) +#endif + procedure,private :: MAYBEWRAP(json_value_add_real) + procedure,private :: MAYBEWRAP(json_value_add_real_vec) +#ifdef REAL128 + procedure,private :: MAYBEWRAP(json_value_add_real64) + procedure,private :: MAYBEWRAP(json_value_add_real64_vec) +#endif + procedure,private :: MAYBEWRAP(json_value_add_logical) + procedure,private :: MAYBEWRAP(json_value_add_logical_vec) + procedure,private :: MAYBEWRAP(json_value_add_string) + procedure,private :: MAYBEWRAP(json_value_add_string_vec) +#ifdef USE_UCS4 + procedure,private :: json_value_add_string_name_ascii + procedure,private :: json_value_add_string_val_ascii + procedure,private :: json_value_add_string_vec_name_ascii + procedure,private :: json_value_add_string_vec_val_ascii +#endif + + !> + ! These are like the `add` methods, except if a variable with the + ! same path is already present, then its value is simply updated. + ! Note that currently, these only work for scalar variables. + ! These routines can also change the variable's type (but an error will be + ! thrown if the existing variable is not a scalar). + ! + !### See also + ! * [[json_core(type):add_by_path]] - this one can be used to change + ! arrays and objects to scalars if so desired. + ! + !@note Unlike some routines, the `found` output is not optional, + ! so it doesn't present exceptions from being thrown. + ! + !@note These have been mostly supplanted by the [[json_core(type):add_by_path]] + ! methods, which do a similar thing (and can be used for + ! scalars and vectors, etc.) + generic,public :: update => MAYBEWRAP(json_update_logical),& +#ifndef REAL32 + MAYBEWRAP(json_update_real32),& +#endif + MAYBEWRAP(json_update_real),& +#ifdef REAL128 + MAYBEWRAP(json_update_real64),& +#endif + + MAYBEWRAP(json_update_integer),& + MAYBEWRAP(json_update_string) +#ifdef USE_UCS4 + generic,public :: update => json_update_string_name_ascii,& + json_update_string_val_ascii +#endif + procedure,private :: MAYBEWRAP(json_update_logical) +#ifndef REAL32 + procedure,private :: MAYBEWRAP(json_update_real32) +#endif + procedure,private :: MAYBEWRAP(json_update_real) +#ifdef REAL128 + procedure,private :: MAYBEWRAP(json_update_real64) +#endif + procedure,private :: MAYBEWRAP(json_update_integer) + procedure,private :: MAYBEWRAP(json_update_string) +#ifdef USE_UCS4 + procedure,private :: json_update_string_name_ascii + procedure,private :: json_update_string_val_ascii +#endif + + !> + ! Add variables to a [[json_value]] linked list + ! by specifying their paths. + ! + !### Example + ! + !````fortran + ! use, intrinsic :: iso_fortran_env, only: output_unit + ! use json_module, wp=>json_RK + ! type(json_core) :: json + ! type(json_value) :: p + ! call json%create_object(p,'root') ! create the root + ! ! now add some variables using the paths: + ! call json%add_by_path(p,'inputs.t', 0.0_wp ) + ! call json%add_by_path(p,'inputs.x(1)', 100.0_wp) + ! call json%add_by_path(p,'inputs.x(2)', 200.0_wp) + ! call json%print(p) ! now print to console + !```` + ! + !### Notes + ! * This uses [[json_create_by_path]] + ! + !### See also + ! * The `json_core%update` methods. + ! * [[json_create_by_path]] + + generic,public :: add_by_path => MAYBEWRAP(json_add_member_by_path),& + MAYBEWRAP(json_add_integer_by_path),& +#ifndef REAL32 + MAYBEWRAP(json_add_real32_by_path),& +#endif + MAYBEWRAP(json_add_real_by_path),& +#ifdef REAL128 + MAYBEWRAP(json_add_real64_by_path),& +#endif + MAYBEWRAP(json_add_logical_by_path),& + MAYBEWRAP(json_add_string_by_path),& + MAYBEWRAP(json_add_integer_vec_by_path),& +#ifndef REAL32 + MAYBEWRAP(json_add_real32_vec_by_path),& +#endif + MAYBEWRAP(json_add_real_vec_by_path),& +#ifdef REAL128 + MAYBEWRAP(json_add_real64_vec_by_path),& +#endif + MAYBEWRAP(json_add_logical_vec_by_path),& + MAYBEWRAP(json_add_string_vec_by_path) +#ifdef USE_UCS4 + generic,public :: add_by_path => json_add_string_by_path_value_ascii,& + json_add_string_by_path_path_ascii,& + json_add_string_vec_by_path_value_ascii,& + json_add_string_vec_by_path_path_ascii +#endif + procedure :: MAYBEWRAP(json_add_member_by_path) + procedure :: MAYBEWRAP(json_add_integer_by_path) +#ifndef REAL32 + procedure :: MAYBEWRAP(json_add_real32_by_path) +#endif + procedure :: MAYBEWRAP(json_add_real_by_path) +#ifdef REAL128 + procedure :: MAYBEWRAP(json_add_real64_by_path) +#endif + procedure :: MAYBEWRAP(json_add_logical_by_path) + procedure :: MAYBEWRAP(json_add_string_by_path) + procedure :: MAYBEWRAP(json_add_integer_vec_by_path) +#ifndef REAL32 + procedure :: MAYBEWRAP(json_add_real32_vec_by_path) +#endif + procedure :: MAYBEWRAP(json_add_real_vec_by_path) +#ifdef REAL128 + procedure :: MAYBEWRAP(json_add_real64_vec_by_path) +#endif + procedure :: MAYBEWRAP(json_add_logical_vec_by_path) + procedure :: MAYBEWRAP(json_add_string_vec_by_path) +#ifdef USE_UCS4 + procedure :: json_add_string_by_path_value_ascii + procedure :: json_add_string_by_path_path_ascii + procedure :: json_add_string_vec_by_path_value_ascii + procedure :: json_add_string_vec_by_path_path_ascii +#endif + + !> + ! Create a [[json_value]] linked list using the + ! path to the variables. Optionally return a + ! pointer to the variable. + ! + ! (This will create a `null` variable) + ! + !### See also + ! * [[json_core(type):add_by_path]] + + generic,public :: create => MAYBEWRAP(json_create_by_path) + procedure :: MAYBEWRAP(json_create_by_path) + + !> + ! Get data from a [[json_value]] linked list. + ! + !@note There are two versions (e.g. [[json_get_integer]] and [[json_get_integer_by_path]]). + ! The first one gets the value from the [[json_value]] passed into the routine, + ! while the second one gets the value from the [[json_value]] found by parsing the + ! path. The path version is split up into unicode and non-unicode versions. + + generic,public :: get => & + MAYBEWRAP(json_get_by_path), & + json_get_integer, MAYBEWRAP(json_get_integer_by_path), & + json_get_integer_vec, MAYBEWRAP(json_get_integer_vec_by_path), & +#ifndef REAL32 + json_get_real32, MAYBEWRAP(json_get_real32_by_path), & + json_get_real32_vec, MAYBEWRAP(json_get_real32_vec_by_path), & +#endif + json_get_real, MAYBEWRAP(json_get_real_by_path), & + json_get_real_vec, MAYBEWRAP(json_get_real_vec_by_path), & +#ifdef REAL128 + json_get_real64, MAYBEWRAP(json_get_real64_by_path), & + json_get_real64_vec, MAYBEWRAP(json_get_real64_vec_by_path), & +#endif + json_get_logical, MAYBEWRAP(json_get_logical_by_path), & + json_get_logical_vec, MAYBEWRAP(json_get_logical_vec_by_path), & + json_get_string, MAYBEWRAP(json_get_string_by_path), & + json_get_string_vec, MAYBEWRAP(json_get_string_vec_by_path), & + json_get_alloc_string_vec, MAYBEWRAP(json_get_alloc_string_vec_by_path),& + json_get_array, MAYBEWRAP(json_get_array_by_path) + + procedure,private :: json_get_integer + procedure,private :: json_get_integer_vec +#ifndef REAL32 + procedure,private :: json_get_real32 + procedure,private :: json_get_real32_vec +#endif + procedure,private :: json_get_real + procedure,private :: json_get_real_vec +#ifdef REAL128 + procedure,private :: json_get_real64 + procedure,private :: json_get_real64_vec +#endif + procedure,private :: json_get_logical + procedure,private :: json_get_logical_vec + procedure,private :: json_get_string + procedure,private :: json_get_string_vec + procedure,private :: json_get_alloc_string_vec + procedure,private :: json_get_array + procedure,private :: MAYBEWRAP(json_get_by_path) + procedure,private :: MAYBEWRAP(json_get_integer_by_path) + procedure,private :: MAYBEWRAP(json_get_integer_vec_by_path) +#ifndef REAL32 + procedure,private :: MAYBEWRAP(json_get_real32_by_path) + procedure,private :: MAYBEWRAP(json_get_real32_vec_by_path) +#endif + procedure,private :: MAYBEWRAP(json_get_real_by_path) + procedure,private :: MAYBEWRAP(json_get_real_vec_by_path) +#ifdef REAL128 + procedure,private :: MAYBEWRAP(json_get_real64_by_path) + procedure,private :: MAYBEWRAP(json_get_real64_vec_by_path) +#endif + procedure,private :: MAYBEWRAP(json_get_logical_by_path) + procedure,private :: MAYBEWRAP(json_get_logical_vec_by_path) + procedure,private :: MAYBEWRAP(json_get_string_by_path) + procedure,private :: MAYBEWRAP(json_get_string_vec_by_path) + procedure,private :: MAYBEWRAP(json_get_array_by_path) + procedure,private :: MAYBEWRAP(json_get_alloc_string_vec_by_path) + procedure,private :: json_get_by_path_default + procedure,private :: json_get_by_path_rfc6901 + procedure,private :: json_get_by_path_jsonpath_bracket + + !> + ! Print the [[json_value]] to an output unit or file. + ! + !### Example + ! + !````fortran + ! type(json_core) :: json + ! type(json_value) :: p + ! !... + ! call json%print(p,'test.json') !this is [[json_print_to_filename]] + !```` + generic,public :: print => json_print_to_console,& + json_print_to_unit,& + json_print_to_filename + procedure :: json_print_to_console + procedure :: json_print_to_unit + procedure :: json_print_to_filename + + !> + ! Destructor routine for a [[json_value]] pointer. + ! This must be called explicitly if it is no longer needed, + ! before it goes out of scope. Otherwise, a memory leak will result. + ! + !### Example + ! + ! Destroy the [[json_value]] pointer before the variable goes out of scope: + !````fortran + ! subroutine example1() + ! type(json_core) :: json + ! type(json_value),pointer :: p + ! call json%create_object(p,'') + ! call json%add(p,'year',2015) + ! call json%print(p) + ! call json%destroy(p) + ! end subroutine example1 + !```` + ! + ! Note: it should NOT be called for a [[json_value]] pointer than has already been + ! added to another [[json_value]] structure, since doing so may render the + ! other structure invalid. Consider the following example: + !````fortran + ! subroutine example2(p) + ! type(json_core) :: json + ! type(json_value),pointer,intent(out) :: p + ! type(json_value),pointer :: q + ! call json%create_object(p,'') + ! call json%add(p,'year',2015) + ! call json%create_object(q,'q') + ! call json%add(q,'val',1) + ! call json%add(p, q) !add q to p structure + ! ! do NOT call json%destroy(q) here, because q is + ! ! now part of the output structure p. p should be destroyed + ! ! somewhere upstream by the caller of this routine. + ! nullify(q) !OK, but not strictly necessary + ! end subroutine example2 + !```` + generic,public :: destroy => json_value_destroy,destroy_json_core + procedure :: json_value_destroy + procedure :: destroy_json_core + + !> + ! If the child variable is present, then remove it. + generic,public :: remove_if_present => MAYBEWRAP(json_value_remove_if_present) + procedure :: MAYBEWRAP(json_value_remove_if_present) + + !> + ! Allocate a [[json_value]] pointer and make it a real variable. + ! The pointer should not already be allocated. + ! + !### Example + ! + !````fortran + ! type(json_core) :: json + ! type(json_value),pointer :: p + ! call json%create_real(p,'value',1.0_RK) + !```` + ! + !### Note + ! * [[json_core(type):create_real]] is just an alias + ! to this one for backward compatibility. + generic,public :: create_real => MAYBEWRAP(json_value_create_real) + procedure :: MAYBEWRAP(json_value_create_real) +#ifndef REAL32 + generic,public :: create_real => MAYBEWRAP(json_value_create_real32) + procedure :: MAYBEWRAP(json_value_create_real32) +#endif +#ifdef REAL128 + generic,public :: create_real => MAYBEWRAP(json_value_create_real64) + procedure :: MAYBEWRAP(json_value_create_real64) +#endif + + !> + ! This is equivalent to [[json_core(type):create_real]], + ! and is here only for backward compatibility. + generic,public :: create_double => MAYBEWRAP(json_value_create_real) +#ifndef REAL32 + generic,public :: create_double => MAYBEWRAP(json_value_create_real32) +#endif +#ifdef REAL128 + generic,public :: create_double => MAYBEWRAP(json_value_create_real64) +#endif + + !> + ! Allocate a [[json_value]] pointer and make it an array variable. + ! The pointer should not already be allocated. + ! + !### Example + ! + !````fortran + ! type(json_core) :: json + ! type(json_value),pointer :: p + ! call json%create_array(p,'arrayname') + !```` + generic,public :: create_array => MAYBEWRAP(json_value_create_array) + procedure :: MAYBEWRAP(json_value_create_array) + + !> + ! Allocate a [[json_value]] pointer and make it an object variable. + ! The pointer should not already be allocated. + ! + !### Example + ! + !````fortran + ! type(json_core) :: json + ! type(json_value),pointer :: p + ! call json%create_object(p,'objectname') + !```` + ! + !@note The name is not significant for the root structure or an array element. + ! In those cases, an empty string can be used. + generic,public :: create_object => MAYBEWRAP(json_value_create_object) + procedure :: MAYBEWRAP(json_value_create_object) + + !> + ! Allocate a json_value pointer and make it a null variable. + ! The pointer should not already be allocated. + ! + !### Example + ! + !````fortran + ! type(json_core) :: json + ! type(json_value),pointer :: p + ! call json%create_null(p,'value') + !```` + generic,public :: create_null => MAYBEWRAP(json_value_create_null) + procedure :: MAYBEWRAP(json_value_create_null) + + !> + ! Allocate a json_value pointer and make it a string variable. + ! The pointer should not already be allocated. + ! + !### Example + ! + !````fortran + ! type(json_core) :: json + ! type(json_value),pointer :: p + ! call json%create_string(p,'value','foobar') + !```` + generic,public :: create_string => MAYBEWRAP(json_value_create_string) + procedure :: MAYBEWRAP(json_value_create_string) + + !> + ! Allocate a json_value pointer and make it an integer variable. + ! The pointer should not already be allocated. + ! + !### Example + ! + !````fortran + ! type(json_core) :: json + ! type(json_value),pointer :: p + ! call json%create_integer(p,42,'value') + !```` + generic,public :: create_integer => MAYBEWRAP(json_value_create_integer) + procedure :: MAYBEWRAP(json_value_create_integer) + + !> + ! Allocate a json_value pointer and make it a logical variable. + ! The pointer should not already be allocated. + ! + !### Example + ! + !````fortran + ! type(json_core) :: json + ! type(json_value),pointer :: p + ! call json%create_logical(p,'value',.true.) + !```` + generic,public :: create_logical => MAYBEWRAP(json_value_create_logical) + procedure :: MAYBEWRAP(json_value_create_logical) + + !> + ! Parse the JSON file and populate the [[json_value]] tree. + generic,public :: load => json_parse_file + procedure :: json_parse_file + + !> + ! Print the [[json_value]] structure to an allocatable string + procedure,public :: serialize => json_value_to_string + + !> + ! The same as `serialize`, but only here for backward compatibility + procedure,public :: print_to_string => json_value_to_string + + !> + ! Parse the JSON string and populate the [[json_value]] tree. + generic,public :: deserialize => MAYBEWRAP(json_parse_string) + procedure :: MAYBEWRAP(json_parse_string) + + !> + ! Same as `load` and `deserialize` but only here for backward compatibility. + generic,public :: parse => json_parse_file, & + MAYBEWRAP(json_parse_string) + + !> + ! Throw an exception. + generic,public :: throw_exception => MAYBEWRAP(json_throw_exception) + procedure :: MAYBEWRAP(json_throw_exception) + + !> + ! Rename a [[json_value]] variable. + generic,public :: rename => MAYBEWRAP(json_value_rename),& + MAYBEWRAP(json_rename_by_path) + procedure :: MAYBEWRAP(json_value_rename) + procedure :: MAYBEWRAP(json_rename_by_path) +#ifdef USE_UCS4 + generic,public :: rename => json_rename_by_path_name_ascii,& + json_rename_by_path_path_ascii + procedure :: json_rename_by_path_name_ascii + procedure :: json_rename_by_path_path_ascii +#endif + + !> + ! get info about a [[json_value]] + generic,public :: info => json_info, MAYBEWRAP(json_info_by_path) + procedure :: json_info + procedure :: MAYBEWRAP(json_info_by_path) + + !> + ! get string info about a [[json_value]] + generic,public :: string_info => json_string_info + procedure :: json_string_info + + !> + ! get matrix info about a [[json_value]] + generic,public :: matrix_info => json_matrix_info, MAYBEWRAP(json_matrix_info_by_path) + procedure :: json_matrix_info + procedure :: MAYBEWRAP(json_matrix_info_by_path) + + !> + ! insert a new element after an existing one, + ! updating the JSON structure accordingly + generic,public :: insert_after => json_value_insert_after, & + json_value_insert_after_child_by_index + procedure :: json_value_insert_after + procedure :: json_value_insert_after_child_by_index + + !> + ! get the path to a JSON variable in a structure: + generic,public :: get_path => MAYBEWRAP(json_get_path) + procedure :: MAYBEWRAP(json_get_path) + + !> + ! verify if a path is valid + ! (i.e., a variable with this path exists in the file). + generic,public :: valid_path => MAYBEWRAP(json_valid_path) + procedure :: MAYBEWRAP(json_valid_path) + + procedure,public :: remove => json_value_remove !! Remove a [[json_value]] from a + !! linked-list structure. + procedure,public :: replace => json_value_replace !! Replace a [[json_value]] in a + !! linked-list structure. + procedure,public :: reverse => json_value_reverse !! Reverse the order of the children + !! of an array of object. + procedure,public :: check_for_errors => json_check_for_errors !! check for error and get error message + procedure,public :: clear_exceptions => json_clear_exceptions !! clear exceptions + procedure,public :: count => json_count !! count the number of children + procedure,public :: clone => json_clone !! clone a JSON structure (deep copy) + procedure,public :: failed => json_failed !! check for error + procedure,public :: get_parent => json_get_parent !! get pointer to json_value parent + procedure,public :: get_next => json_get_next !! get pointer to json_value next + procedure,public :: get_previous => json_get_previous !! get pointer to json_value previous + procedure,public :: get_tail => json_get_tail !! get pointer to json_value tail + procedure,public :: initialize => json_initialize !! to initialize some parsing parameters + procedure,public :: traverse => json_traverse !! to traverse all elements of a JSON + !! structure + procedure,public :: print_error_message => json_print_error_message !! simply routine to print error + !! messages + procedure,public :: swap => json_value_swap !! Swap two [[json_value]] pointers + !! in a structure (or two different + !! structures). + procedure,public :: is_child_of => json_value_is_child_of !! Check if a [[json_value]] is a + !! descendant of another. + procedure,public :: validate => json_value_validate !! Check that a [[json_value]] linked + !! list is valid (i.e., is properly + !! constructed). This may be useful + !! if it has been constructed externally. + procedure,public :: check_for_duplicate_keys & + => json_check_all_for_duplicate_keys !! Check entire JSON structure + !! for duplicate keys (recursively) + procedure,public :: check_children_for_duplicate_keys & + => json_check_children_for_duplicate_keys !! Check a `json_value` object's + !! children for duplicate keys + + !other private routines: + procedure :: name_equal + procedure :: name_strings_equal + procedure :: json_value_print + procedure :: string_to_int + procedure :: string_to_dble + procedure :: prepare_parser => json_prepare_parser + procedure :: parse_end => json_parse_end + procedure :: parse_value + procedure :: parse_number + procedure :: parse_string + procedure :: parse_for_chars + procedure :: parse_object + procedure :: parse_array + procedure :: annotate_invalid_json + procedure :: pop_char + procedure :: push_char + procedure :: get_current_line_from_file_stream + procedure,nopass :: get_current_line_from_file_sequential + procedure :: convert + procedure :: to_string + procedure :: to_logical + procedure :: to_integer + procedure :: to_real + procedure :: to_null + procedure :: to_object + procedure :: to_array + procedure,nopass :: json_value_clone_func + procedure :: is_vector => json_is_vector + + end type json_core + !********************************************************* + + !********************************************************* + !> + ! Structure constructor to initialize a + ! [[json_core(type)]] object + ! + !### Example + ! + !```fortran + ! type(json_file) :: json_core + ! json_core = json_core() + !``` + interface json_core + module procedure initialize_json_core + end interface + !********************************************************* + + !************************************************************************************* + abstract interface + + subroutine json_array_callback_func(json, element, i, count) + !! Array element callback function. Used by [[json_get_array]] + import :: json_value,json_core,IK + implicit none + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: element + integer(IK),intent(in) :: i !! index + integer(IK),intent(in) :: count !! size of array + end subroutine json_array_callback_func + + subroutine json_traverse_callback_func(json,p,finished) + !! Callback function used by [[json_traverse]] + import :: json_value,json_core,LK + implicit none + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: p + logical(LK),intent(out) :: finished !! set true to stop traversing + end subroutine json_traverse_callback_func + + end interface + public :: json_array_callback_func + public :: json_traverse_callback_func + !************************************************************************************* + + contains +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 4/17/2016 +! +! Destructor for the [[json_core(type)]] type. + + subroutine destroy_json_core(me) + + implicit none + + class(json_core),intent(out) :: me + + end subroutine destroy_json_core +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 4/26/2016 +! +! Function constructor for a [[json_core(type)]]. +! This is just a wrapper for [[json_initialize]]. +! +!@note [[initialize_json_core]], [[json_initialize]], +! [[initialize_json_core_in_file]], and [[initialize_json_file]] +! all have a similar interface. + + function initialize_json_core(& +#include ""json_initialize_dummy_arguments.inc"" + ) result(json_core_object) + + implicit none + + type(json_core) :: json_core_object +#include ""json_initialize_arguments.inc"" + + call json_core_object%initialize(& +#include ""json_initialize_dummy_arguments.inc"" + ) + + end function initialize_json_core +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 12/4/2013 +! +! Initialize the [[json_core(type)]] instance. +! +! The routine may be called before any of the [[json_core(type)]] methods are used in +! order to specify certain parameters. If it is not called, then the defaults +! are used. This routine is also called internally by various routines. +! It can also be called to clear exceptions, or to reset some +! of the variables (note that only the arguments present are changed). +! +!### Modified +! * Izaak Beekman : 02/24/2015 +! +!@note [[initialize_json_core]], [[json_initialize]], +! [[initialize_json_core_in_file]], and [[initialize_json_file]] +! all have a similar interface. + + subroutine json_initialize(me,& +#include ""json_initialize_dummy_arguments.inc"" + ) + + implicit none + + class(json_core),intent(inout) :: me +#include ""json_initialize_arguments.inc"" + + character(kind=CDK,len=10) :: w !! max string length + character(kind=CDK,len=10) :: d !! real precision digits + character(kind=CDK,len=10) :: e !! real exponent digits + character(kind=CDK,len=2) :: sgn !! sign flag: `ss` or `sp` + character(kind=CDK,len=2) :: rl_edit_desc !! `G`, `E`, `EN`, or `ES` + integer(IK) :: istat !! `iostat` flag for + !! write statements + logical(LK) :: sgn_prnt !! print sign flag + character(kind=CK,len=max_integer_str_len) :: istr !! for integer to + !! string conversion + + !reset exception to false: + call me%clear_exceptions() + + !Just in case, clear these global variables also: + me%pushed_index = 0 + me%pushed_char = CK_'' + me%char_count = 0 + me%line_count = 1 + me%ipos = 1 + if (use_unformatted_stream) then + me%filesize = 0 + me%ichunk = 0 + me%chunk = repeat(space, stream_chunk_size) ! default chunk size + end if + +#ifdef USE_UCS4 + ! reopen stdout and stderr with utf-8 encoding + open(output_unit,encoding='utf-8') + open(error_unit, encoding='utf-8') +#endif + + !various optional inputs: + if (present(spaces_per_tab)) & + me%spaces_per_tab = spaces_per_tab + if (present(stop_on_error)) & + me%stop_on_error = stop_on_error + if (present(verbose)) & + me%is_verbose = verbose + if (present(strict_type_checking)) & + me%strict_type_checking = strict_type_checking + if (present(trailing_spaces_significant)) & + me%trailing_spaces_significant = trailing_spaces_significant + if (present(case_sensitive_keys)) & + me%case_sensitive_keys = case_sensitive_keys + if (present(no_whitespace)) & + me%no_whitespace = no_whitespace + if (present(unescape_strings)) & + me%unescaped_strings = unescape_strings + if (present(path_mode)) then + if (path_mode==1_IK .or. path_mode==2_IK .or. path_mode==3_IK) then + me%path_mode = path_mode + else + me%path_mode = 1_IK ! just to have a valid value + call me%throw_exception('Invalid path_mode.') + end if + end if + + ! if we are allowing comments in the file: + ! [an empty string disables comments] + if (present(comment_char)) then + me%allow_comments = comment_char/=CK_'' + me%comment_char = trim(adjustl(comment_char)) + end if + + ! path separator: + if (present(path_separator)) then + me%path_separator = path_separator + end if + + ! printing vectors in compressed form: + if (present(compress_vectors)) then + me%compress_vectors = compress_vectors + end if + + ! checking for duplicate keys: + if (present(allow_duplicate_keys)) then + me%allow_duplicate_keys = allow_duplicate_keys + end if + + ! if escaping the forward slash: + if (present(escape_solidus)) then + me%escape_solidus = escape_solidus + end if + + ! how to handle null to read conversions: + if (present(null_to_real_mode)) then + select case (null_to_real_mode) + case(1_IK:3_IK) + me%null_to_real_mode = null_to_real_mode + case default + me%null_to_real_mode = 2_IK ! just to have a valid value + call integer_to_string(null_to_real_mode,int_fmt,istr) + call me%throw_exception('Invalid null_to_real_mode: '//istr) + end select + end if + + ! how to handle NaN and Infinities: + if (present(non_normal_mode)) then + select case (non_normal_mode) + case(1_IK) ! use strings + me%non_normals_to_null = .false. + case(2_IK) ! use null + me%non_normals_to_null = .true. + case default + call integer_to_string(non_normal_mode,int_fmt,istr) + call me%throw_exception('Invalid non_normal_mode: '//istr) + end select + end if + + if (present(use_quiet_nan)) then + me%use_quiet_nan = use_quiet_nan + end if + + if (present(strict_integer_type_checking)) then + me%strict_integer_type_checking = strict_integer_type_checking + end if + + !Set the format for real numbers: + ! [if not changing it, then it remains the same] + + if ( (.not. allocated(me%real_fmt)) .or. & ! if this hasn't been done yet + present(compact_reals) .or. & + present(print_signs) .or. & + present(real_format) ) then + + !allow the special case where real format is '*': + ! [this overrides the other options] + if (present(real_format)) then + if (real_format==star) then + if (present(compact_reals)) then + ! we will also allow for compact reals with + ! '*' format, if both arguments are present. + me%compact_real = compact_reals + else + me%compact_real = .false. + end if + me%real_fmt = star + return + end if + end if + + if (present(compact_reals)) me%compact_real = compact_reals + + !set defaults + sgn_prnt = .false. + if ( present( print_signs) ) sgn_prnt = print_signs + if ( sgn_prnt ) then + sgn = 'sp' + else + sgn = 'ss' + end if + + rl_edit_desc = 'E' + if ( present( real_format ) ) then + select case ( real_format ) + case ('g','G','e','E','en','EN','es','ES') + rl_edit_desc = real_format + case default + call me%throw_exception('Invalid real format, ""' // & + trim(real_format) // '"", passed to json_initialize.'// & + new_line('a') // 'Acceptable formats are: ""G"", ""E"", ""EN"", and ""ES"".' ) + end select + end if + + ! set the default output/input format for reals: + write(w,'(ss,I0)',iostat=istat) max_numeric_str_len + if (istat==0) write(d,'(ss,I0)',iostat=istat) real_precision + if (istat==0) write(e,'(ss,I0)',iostat=istat) real_exponent_digits + if (istat==0) then + me%real_fmt = '(' // sgn // ',' // trim(rl_edit_desc) //& + trim(w) // '.' // trim(d) // 'E' // trim(e) // ')' + else + me%real_fmt = '(' // sgn // ',' // trim(rl_edit_desc) // & + '27.17E4)' !just use this one (should never happen) + end if + + end if + + end subroutine json_initialize +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! +! Returns true if `name` is equal to `p%name`, using the specified +! settings for case sensitivity and trailing whitespace. +! +!### History +! * 4/30/2016 : original version +! * 8/25/2017 : now just a wrapper for [[name_strings_equal]] + + function name_equal(json,p,name) result(is_equal) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),intent(in) :: p !! the json object + character(kind=CK,len=*),intent(in) :: name !! the name to check for + logical(LK) :: is_equal !! true if the string are + !! lexically equal + + if (allocated(p%name)) then + ! call the low-level routines for the name strings: + is_equal = json%name_strings_equal(p%name,name) + else + is_equal = name == CK_'' ! check a blank name + end if + + end function name_equal +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 8/25/2017 +! +! Returns true if the name strings `name1` is equal to `name2`, using +! the specified settings for case sensitivity and trailing whitespace. + + function name_strings_equal(json,name1,name2) result(is_equal) + + implicit none + + class(json_core),intent(inout) :: json + character(kind=CK,len=*),intent(in) :: name1 !! the name to check + character(kind=CK,len=*),intent(in) :: name2 !! the name to check + logical(LK) :: is_equal !! true if the string are + !! lexically equal + + !must be the same length if we are treating + !trailing spaces as significant, so do a + !quick test of this first: + if (json%trailing_spaces_significant) then + is_equal = len(name1) == len(name2) + if (.not. is_equal) return + end if + + if (json%case_sensitive_keys) then + is_equal = name1 == name2 + else + is_equal = lowercase_string(name1) == lowercase_string(name2) + end if + + end function name_strings_equal +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 10/31/2015 +! +! Create a deep copy of a [[json_value]] linked-list structure. +! +!### Notes +! +! * If `from` has children, then they are also cloned. +! * The parent of `from` is not linked to `to`. +! * If `from` is an element of an array, then the previous and +! next entries are not cloned (only that element and it's children, if any). +! +!### Example +! +!````fortran +! program test +! use json_module +! implicit none +! type(json_core) :: json +! type(json_value),pointer :: j1, j2 +! call json%load('../files/inputs/test1.json',j1) +! call json%clone(j1,j2) !now have two independent copies +! call json%destroy(j1) !destroys j1, but j2 remains +! call json%print(j2,'j2.json') +! call json%destroy(j2) +! end program test +!```` + + subroutine json_clone(json,from,to) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: from !! this is the structure to clone + type(json_value),pointer :: to !! the clone is put here + !! (it must not already be associated) + + !call the main function: + call json%json_value_clone_func(from,to) + + end subroutine json_clone +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 10/31/2015 +! +! Recursive deep copy function called by [[json_clone]]. +! +!@note If new data is added to the [[json_value]] type, +! then this would need to be updated. + + recursive subroutine json_value_clone_func(from,to,parent,previous,tail) + + implicit none + + type(json_value),pointer :: from !! this is the structure to clone + type(json_value),pointer :: to !! the clone is put here (it + !! must not already be associated) + type(json_value),pointer,optional :: parent !! to%parent + type(json_value),pointer,optional :: previous !! to%previous + logical,optional :: tail !! if ""to"" is the tail of + !! its parent's children + + nullify(to) + + if (associated(from)) then + + allocate(to) + + !copy over the data variables: + ! [note: the allocate() statements don't work here for the + ! deferred-length characters in gfortran-4.9] + if (allocated(from%name)) to%name = from%name + if (allocated(from%dbl_value)) allocate(to%dbl_value,source=from%dbl_value) + if (allocated(from%log_value)) allocate(to%log_value,source=from%log_value) + if (allocated(from%str_value)) to%str_value = from%str_value + if (allocated(from%int_value)) allocate(to%int_value,source=from%int_value) + to%var_type = from%var_type + to%n_children = from%n_children + + ! allocate and associate the pointers as necessary: + if (present(parent)) to%parent => parent + if (present(previous)) to%previous => previous + if (present(tail)) then + if (tail .and. associated(to%parent)) to%parent%tail => to + end if + + if (associated(from%next) .and. associated(to%parent)) then + ! we only clone the next entry in an array + ! if the parent has also been cloned + call json_value_clone_func(from = from%next,& + to = to%next,& + previous = to,& + parent = to%parent,& + tail = (.not. associated(from%next%next))) + end if + + if (associated(from%children)) then + call json_value_clone_func(from = from%children,& + to = to%children,& + parent = to,& + tail = (.not. associated(from%children%next))) + end if + + end if + + end subroutine json_value_clone_func +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! +! Destroy the data within a [[json_value]], and reset type to `json_unknown`. + + pure subroutine destroy_json_data(d) + + implicit none + + type(json_value),intent(inout) :: d + + d%var_type = json_unknown + + if (allocated(d%log_value)) deallocate(d%log_value) + if (allocated(d%int_value)) deallocate(d%int_value) + if (allocated(d%dbl_value)) deallocate(d%dbl_value) + if (allocated(d%str_value)) deallocate(d%str_value) + + end subroutine destroy_json_data +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 2/13/2014 +! +! Returns information about a [[json_value]]. + + subroutine json_info(json,p,var_type,n_children,name) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + integer(IK),intent(out),optional :: var_type !! variable type + integer(IK),intent(out),optional :: n_children !! number of children + character(kind=CK,len=:),allocatable,intent(out),optional :: name !! variable name + + if (.not. json%exception_thrown .and. associated(p)) then + + if (present(var_type)) var_type = p%var_type + if (present(n_children)) n_children = json%count(p) + if (present(name)) then + if (allocated(p%name)) then + name = p%name + else + name = CK_'' + end if + end if + + else ! error + + if (.not. json%exception_thrown) then + call json%throw_exception('Error in json_info: '//& + 'pointer is not associated.' ) + end if + if (present(var_type)) var_type = json_unknown + if (present(n_children)) n_children = 0 + if (present(name)) name = CK_'' + + end if + + end subroutine json_info +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 12/18/2016 +! +! Returns information about character strings returned from a [[json_value]]. + + subroutine json_string_info(json,p,ilen,max_str_len,found) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + integer(IK),dimension(:),allocatable,intent(out),optional :: ilen !! if `p` is an array, this + !! is the actual length + !! of each character + !! string in the array. + !! if not an array, this + !! is returned unallocated. + integer(IK),intent(out),optional :: max_str_len !! The maximum length required to + !! hold the string representation returned + !! by a call to a `get` routine. If a scalar, + !! this is just the length of the scalar. If + !! a vector, this is the maximum length of + !! any element. + logical(LK),intent(out),optional :: found !! true if there were no errors. + !! if not present, an error will + !! throw an exception + + character(kind=CK,len=:),allocatable :: cval !! for getting values as strings. + logical(LK) :: initialized !! if the output array has been sized + logical(LK) :: get_max_len !! if we are returning the `max_str_len` + logical(LK) :: get_ilen !! if we are returning the `ilen` array + integer(IK) :: var_type !! variable type + + get_max_len = present(max_str_len) + get_ilen = present(ilen) + + if (.not. json%exception_thrown) then + + if (present(found)) found = .true. + initialized = .false. + + if (get_max_len) max_str_len = 0 + + select case (p%var_type) + + case (json_array) ! it's an array + + ! call routine for each element + call json%get(p, array_callback=get_string_lengths) + + case default ! not an array + + if (json%strict_type_checking) then + ! only allowing strings to be returned + ! as strings, so we can check size directly + call json%info(p,var_type=var_type) + if (var_type==json_string) then + if (allocated(p%str_value) .and. get_max_len) & + max_str_len = len(p%str_value) + else + ! it isn't a string, so there is no length + call json%throw_exception('Error in json_string_info: '//& + 'When strict_type_checking is true '//& + 'the variable must be a character string.',& + found) + end if + else + ! in this case, we have to get the value + ! as a string to know what size it is. + call json%get(p, value=cval) + if (.not. json%exception_thrown) then + if (allocated(cval) .and. get_max_len) & + max_str_len = len(cval) + end if + end if + + end select + + end if + + if (json%exception_thrown) then + if (present(found)) then + call json%clear_exceptions() + found = .false. + end if + if (get_max_len) max_str_len = 0 + if (get_ilen) then + if (allocated(ilen)) deallocate(ilen) + end if + end if + + contains + + subroutine get_string_lengths(json, element, i, count) + + !! callback function to call for each element in the array. + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: element + integer(IK),intent(in) :: i !! index + integer(IK),intent(in) :: count !! size of array + + character(kind=CK,len=:),allocatable :: cval + integer(IK) :: var_type + + if (json%exception_thrown) return + + if (.not. initialized) then + if (get_ilen) allocate(ilen(count)) + initialized = .true. + end if + + if (json%strict_type_checking) then + ! only allowing strings to be returned + ! as strings, so we can check size directly + call json%info(element,var_type=var_type) + if (var_type==json_string) then + if (allocated(element%str_value)) then + if (get_max_len) then + if (len(element%str_value)>max_str_len) & + max_str_len = len(element%str_value) + end if + if (get_ilen) ilen(i) = len(element%str_value) + else + if (get_ilen) ilen(i) = 0 + end if + else + ! it isn't a string, so there is no length + call json%throw_exception('Error in json_string_info: '//& + 'When strict_type_checking is true '//& + 'the array must contain only '//& + 'character strings.',found) + end if + else + ! in this case, we have to get the value + ! as a string to know what size it is. + call json%get(element, value=cval) + if (json%exception_thrown) return + if (allocated(cval)) then + if (get_max_len) then + if (len(cval)>max_str_len) max_str_len = len(cval) + end if + if (get_ilen) ilen(i) = len(cval) + else + if (get_ilen) ilen(i) = 0 + end if + end if + + end subroutine get_string_lengths + + end subroutine json_string_info +!***************************************************************************************** + +!***************************************************************************************** +! +! Returns information about a [[json_value]], given the path. +! +!### See also +! * [[json_info]] +! +!@note If `found` is present, no exceptions will be thrown if an +! error occurs. Otherwise, an exception will be thrown if the +! variable is not found. + + subroutine json_info_by_path(json,p,path,found,var_type,n_children,name) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: p !! a JSON linked list + character(kind=CK,len=*),intent(in) :: path !! path to the variable + logical(LK),intent(out),optional :: found !! true if it was found + integer(IK),intent(out),optional :: var_type !! variable type + integer(IK),intent(out),optional :: n_children !! number of children + character(kind=CK,len=:),allocatable,intent(out),optional :: name !! variable name + + type(json_value),pointer :: p_var !! temporary pointer + logical(LK) :: ok !! if the variable was found +#if defined __GFORTRAN__ + character(kind=CK,len=:),allocatable :: p_name !! temporary variable for getting name +#endif + + call json%get(p,path,p_var,found) + + !check if it was found: + if (present(found)) then + ok = found + else + ok = .not. json%exception_thrown + end if + + if (.not. ok) then + if (present(var_type)) var_type = json_unknown + if (present(n_children)) n_children = 0 + if (present(name)) name = CK_'' + else + !get info: + +#if defined __GFORTRAN__ + call json%info(p_var,var_type,n_children) + if (present(name)) then !workaround for gfortran bug + if (allocated(p_var%name)) then + p_name = p_var%name + name = p_name + else + name = CK_'' + end if + end if +#else + call json%info(p_var,var_type,n_children,name) +#endif + + end if + + end subroutine json_info_by_path +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_info_by_path]] where ""path"" is kind=CDK. + + subroutine wrap_json_info_by_path(json,p,path,found,var_type,n_children,name) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: p !! a JSON linked list + character(kind=CDK,len=*),intent(in) :: path !! path to the variable + logical(LK),intent(out),optional :: found !! true if it was found + integer(IK),intent(out),optional :: var_type !! variable type + integer(IK),intent(out),optional :: n_children !! number of children + character(kind=CK,len=:),allocatable,intent(out),optional :: name !! variable name + + call json%info(p,to_unicode(path),found,var_type,n_children,name) + + end subroutine wrap_json_info_by_path +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 10/16/2015 +! +! Alternate version of [[json_info]] that returns matrix +! information about a [[json_value]]. +! +! A [[json_value]] is a valid rank 2 matrix if all of the following are true: +! +! * The var_type is *json_array* +! * Each child is also a *json_array*, each of which has the same number of elements +! * Each individual element has the same variable type (integer, logical, etc.) +! +! The idea here is that if it is a valid matrix, it can be interoperable with +! a Fortran rank 2 array of the same type. +! +!### Example +! +! The following example is an array with `var_type=json_integer`, +! `n_sets=3`, and `set_size=4` +! +!```json +! { +! ""matrix"": [ +! [1,2,3,4], +! [5,6,7,8], +! [9,10,11,12] +! ] +! } +!``` + + subroutine json_matrix_info(json,p,is_matrix,var_type,n_sets,set_size,name) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p !! a JSON linked list + logical(LK),intent(out) :: is_matrix !! true if it is a valid matrix + integer(IK),intent(out),optional :: var_type !! variable type of data in the matrix + !! (if all elements have the same type) + integer(IK),intent(out),optional :: n_sets !! number of data sets (i.e., matrix + !! rows if using row-major order) + integer(IK),intent(out),optional :: set_size !! size of each data set (i.e., matrix + !! cols if using row-major order) + character(kind=CK,len=:),allocatable,intent(out),optional :: name !! variable name + + type(json_value),pointer :: p_row !! for getting a set + type(json_value),pointer :: p_element !! for getting an element in a set + integer(IK) :: vartype !! json variable type of `p` + integer(IK) :: row_vartype !! json variable type of a row + integer(IK) :: element_vartype !! json variable type of an element in a row + integer(IK) :: nr !! number of children of `p` + integer(IK) :: nc !! number of elements in first child of `p` + integer(IK) :: icount !! number of elements in a set + integer(IK) :: i !! counter + integer(IK) :: j !! counter +#if defined __GFORTRAN__ + character(kind=CK,len=:),allocatable :: p_name !! temporary variable for getting name +#endif + + !get info about the variable: +#if defined __GFORTRAN__ + call json%info(p,vartype,nr) + if (present(name)) then !workaround for gfortran bug + if (allocated(p%name)) then + p_name = p%name + name = p_name + else + name = CK_'' + end if + end if +#else + call json%info(p,vartype,nr,name) +#endif + + is_matrix = (vartype==json_array) + + if (is_matrix) then + + main : do i=1,nr + + nullify(p_row) + call json%get_child(p,i,p_row) + if (.not. associated(p_row)) then + is_matrix = .false. + call json%throw_exception('Error in json_matrix_info: '//& + 'Malformed JSON linked list') + exit main + end if + call json%info(p_row,var_type=row_vartype,n_children=icount) + + if (row_vartype==json_array) then + if (i==1) nc = icount !number of columns in first row + if (icount==nc) then !make sure each row has the same number of columns + !see if all the variables in this row are the same type: + do j=1,icount + nullify(p_element) + call json%get_child(p_row,j,p_element) + if (.not. associated(p_element)) then + is_matrix = .false. + call json%throw_exception('Error in json_matrix_info: '//& + 'Malformed JSON linked list') + exit main + end if + call json%info(p_element,var_type=element_vartype) + if (i==1 .and. j==1) vartype = element_vartype !type of first element + !in the row + if (vartype/=element_vartype) then + !not all variables are the same time + is_matrix = .false. + exit main + end if + end do + else + is_matrix = .false. + exit main + end if + else + is_matrix = .false. + exit main + end if + + end do main + + end if + + if (is_matrix) then + if (present(var_type)) var_type = vartype + if (present(n_sets)) n_sets = nr + if (present(set_size)) set_size = nc + else + if (present(var_type)) var_type = json_unknown + if (present(n_sets)) n_sets = 0 + if (present(set_size)) set_size = 0 + end if + + end subroutine json_matrix_info +!***************************************************************************************** + +!***************************************************************************************** +!> +! Returns matrix information about a [[json_value]], given the path. +! +!### See also +! * [[json_matrix_info]] +! +!@note If `found` is present, no exceptions will be thrown if an +! error occurs. Otherwise, an exception will be thrown if the +! variable is not found. + + subroutine json_matrix_info_by_path(json,p,path,is_matrix,found,& + var_type,n_sets,set_size,name) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p !! a JSON linked list + character(kind=CK,len=*),intent(in) :: path !! path to the variable + logical(LK),intent(out) :: is_matrix !! true if it is a valid matrix + logical(LK),intent(out),optional :: found !! true if it was found + integer(IK),intent(out),optional :: var_type !! variable type of data in + !! the matrix (if all elements have + !! the same type) + integer(IK),intent(out),optional :: n_sets !! number of data sets (i.e., matrix + !! rows if using row-major order) + integer(IK),intent(out),optional :: set_size !! size of each data set (i.e., matrix + !! cols if using row-major order) + character(kind=CK,len=:),allocatable,intent(out),optional :: name !! variable name + + type(json_value),pointer :: p_var + logical(LK) :: ok +#if defined __GFORTRAN__ + character(kind=CK,len=:),allocatable :: p_name !! temporary variable for getting name +#endif + + call json%get(p,path,p_var,found) + + !check if it was found: + if (present(found)) then + ok = found + else + ok = .not. json%exception_thrown + end if + + if (.not. ok) then + if (present(var_type)) var_type = json_unknown + if (present(n_sets)) n_sets = 0 + if (present(set_size)) set_size = 0 + if (present(name)) name = CK_'' + else + + !get info about the variable: +#if defined __GFORTRAN__ + call json%matrix_info(p_var,is_matrix,var_type,n_sets,set_size) + if (present(name)) then !workaround for gfortran bug + if (allocated(p_var%name)) then + p_name = p_var%name + name = p_name + else + name = CK_'' + end if + end if +#else + call json%matrix_info(p_var,is_matrix,var_type,n_sets,set_size,name) +#endif + if (json%exception_thrown .and. present(found)) then + found = .false. + call json%clear_exceptions() + end if + end if + + end subroutine json_matrix_info_by_path +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_matrix_info_by_path]] where ""path"" is kind=CDK. + + subroutine wrap_json_matrix_info_by_path(json,p,path,is_matrix,found,& + var_type,n_sets,set_size,name) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p !! a JSON linked list + character(kind=CDK,len=*),intent(in) :: path !! path to the variable + logical(LK),intent(out) :: is_matrix !! true if it is a valid matrix + logical(LK),intent(out),optional :: found !! true if it was found + integer(IK),intent(out),optional :: var_type !! variable type of data in + !! the matrix (if all elements have + !! the same type) + integer(IK),intent(out),optional :: n_sets !! number of data sets (i.e., matrix + !! rows if using row-major order) + integer(IK),intent(out),optional :: set_size !! size of each data set (i.e., matrix + !! cols if using row-major order) + character(kind=CK,len=:),allocatable,intent(out),optional :: name !! variable name + + call json%matrix_info(p,to_unicode(path),is_matrix,found,var_type,n_sets,set_size,name) + + end subroutine wrap_json_matrix_info_by_path +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 4/29/2016 +! +! Rename a [[json_value]]. + + subroutine json_value_rename(json,p,name) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: p + character(kind=CK,len=*),intent(in) :: name !! new variable name + + if (json%trailing_spaces_significant) then + p%name = name + else + p%name = trim(name) + end if + + end subroutine json_value_rename +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 4/29/2016 +! +! Alternate version of [[json_value_rename]], where `name` is kind=CDK. + + subroutine wrap_json_value_rename(json,p,name) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: p + character(kind=CDK,len=*),intent(in) :: name !! new variable name + + call json%rename(p,to_unicode(name)) + + end subroutine wrap_json_value_rename +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 12/4/2013 +! +! Clear exceptions in the [[json_core(type)]]. + + pure subroutine json_clear_exceptions(json) + + implicit none + + class(json_core),intent(inout) :: json + + !clear the flag and message: + json%exception_thrown = .false. + if (allocated(json%err_message)) deallocate(json%err_message) + + end subroutine json_clear_exceptions +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 12/4/2013 +! +! Throw an exception in the [[json_core(type)]]. +! This routine sets the error flag, and prevents any subsequent routine +! from doing anything, until [[json_clear_exceptions]] is called. +! +!@note If `is_verbose` is true, this will also print a +! traceback if the Intel compiler is used. +! +!@note If `stop_on_error` is true, then the program is stopped. + + subroutine json_throw_exception(json,msg,found) + +#ifdef __INTEL_COMPILER + use ifcore, only: tracebackqq +#endif + + implicit none + + class(json_core),intent(inout) :: json + character(kind=CK,len=*),intent(in) :: msg !! the error message + logical(LK),intent(inout),optional :: found !! if the caller is handling the + !! exception with an optimal return + !! argument. If so, `json%stop_on_error` + !! is ignored. + + logical(LK) :: stop_on_error + + json%exception_thrown = .true. + json%err_message = trim(msg) + stop_on_error = json%stop_on_error .and. .not. present(found) + + if (stop_on_error) then + +#ifdef __INTEL_COMPILER + ! for Intel, we raise a traceback and quit + call tracebackqq(string=trim(msg), user_exit_code=0) +#else + write(error_unit,'(A)') 'JSON-Fortran Exception: '//trim(msg) + error stop 1 +#endif + + elseif (json%is_verbose) then + + write(output_unit,'(A)') '***********************' + write(output_unit,'(A)') 'JSON-Fortran Exception: '//trim(msg) + +!#if defined __GFORTRAN__ +! call backtrace() ! (have to compile with -fbacktrace -fall-intrinsics flags) +!#endif + +#ifdef __INTEL_COMPILER + call tracebackqq(user_exit_code=-1) ! print a traceback and return +#endif + + write(output_unit,'(A)') '***********************' + + end if + + end subroutine json_throw_exception +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_throw_exception]], where `msg` is kind=CDK. + + subroutine wrap_json_throw_exception(json,msg,found) + + implicit none + + class(json_core),intent(inout) :: json + character(kind=CDK,len=*),intent(in) :: msg !! the error message + logical(LK),intent(inout),optional :: found !! if the caller is handling the + !! exception with an optimal return + !! argument. If so, `json%stop_on_error` + !! is ignored. + + call json%throw_exception(to_unicode(msg),found) + + end subroutine wrap_json_throw_exception +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 12/4/2013 +! +! Retrieve error code from the [[json_core(type)]]. +! This should be called after `parse` to check for errors. +! If an error is thrown, before using the class again, [[json_initialize]] +! should be called to clean up before it is used again. +! +!### Example +! +!````fortran +! type(json_file) :: json +! logical :: status_ok +! character(kind=CK,len=:),allocatable :: error_msg +! call json%load(filename='myfile.json') +! call json%check_for_errors(status_ok, error_msg) +! if (.not. status_ok) then +! write(*,*) 'Error: '//error_msg +! call json%clear_exceptions() +! call json%destroy() +! end if +!```` +! +!### See also +! * [[json_failed]] +! * [[json_throw_exception]] + + subroutine json_check_for_errors(json,status_ok,error_msg) + + implicit none + + class(json_core),intent(in) :: json + logical(LK),intent(out),optional :: status_ok !! true if there were no errors + character(kind=CK,len=:),allocatable,intent(out),optional :: error_msg !! the error message. + !! (not allocated if + !! there were no errors) + +#if defined __GFORTRAN__ + character(kind=CK,len=:),allocatable :: tmp !! workaround for gfortran bugs +#endif + + if (present(status_ok)) status_ok = .not. json%exception_thrown + + if (present(error_msg)) then + if (json%exception_thrown) then + ! if an exception has been thrown, + ! then this will always be allocated + ! [see json_throw_exception] +#if defined __GFORTRAN__ + tmp = json%err_message + error_msg = tmp +#else + error_msg = json%err_message +#endif + end if + end if + + end subroutine json_check_for_errors +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 12/5/2013 +! +! Logical function to indicate if an exception has been thrown in a [[json_core(type)]]. +! +!### Example +! +!````fortran +! type(json_core) :: json +! type(json_value),pointer :: p +! logical :: status_ok +! character(len=:),allocatable :: error_msg +! call json%load(filename='myfile.json',p) +! if (json%failed()) then +! call json%check_for_errors(status_ok, error_msg) +! write(*,*) 'Error: '//error_msg +! call json%clear_exceptions() +! call json%destroy(p) +! end if +!```` +! +! Note that [[json_file]] contains a wrapper for this routine, which is used like: +!````fortran +! type(json_file) :: f +! logical :: status_ok +! character(len=:),allocatable :: error_msg +! call f%load(filename='myfile.json') +! if (f%failed()) then +! call f%check_for_errors(status_ok, error_msg) +! write(*,*) 'Error: '//error_msg +! call f%clear_exceptions() +! call f%destroy() +! end if +!```` +! +!### See also +! * [[json_check_for_errors]] + + pure function json_failed(json) result(failed) + + implicit none + + class(json_core),intent(in) :: json + logical(LK) :: failed !! will be true if an exception + !! has been thrown. + + failed = json%exception_thrown + + end function json_failed +!***************************************************************************************** + +!***************************************************************************************** +!> +! Allocate a [[json_value]] pointer variable. +! This should be called before adding data to it. +! +!### Example +! +!````fortran +! type(json_value),pointer :: var +! call json_value_create(var) +! call json%to_real(var,1.0_RK) +!```` +! +!### Notes +! 1. This routine does not check for exceptions. +! 2. The pointer should not already be allocated, or a memory leak will occur. + + subroutine json_value_create(p) + + implicit none + + type(json_value),pointer :: p + + nullify(p) + allocate(p) + + end subroutine json_value_create +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 1/22/2014 +! +! Destroy a [[json_value]] linked-list structure. +! +!@note The original FSON version of this +! routine was not properly freeing the memory. +! It was rewritten. +! +!@note This routine destroys this variable, it's children, and +! (if `destroy_next` is true) the subsequent elements in +! an object or array. It does not destroy the parent or +! previous elements. +! +!@Note There is some protection here to enable destruction of +! improperly-created linked lists. However, likely there +! are cases not handled. Use the [[json_value_validate]] +! method to validate a JSON structure that was manually +! created using [[json_value]] pointers. + + pure recursive subroutine json_value_destroy(json,p,destroy_next) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p !! variable to destroy + logical(LK),intent(in),optional :: destroy_next !! if true, then `p%next` + !! is also destroyed (default is true) + + logical(LK) :: des_next !! local copy of `destroy_next` + !! optional argument + type(json_value),pointer :: child !! for getting child elements + logical :: circular !! to check to malformed linked lists + + if (associated(p)) then + + if (present(destroy_next)) then + des_next = destroy_next + else + des_next = .true. + end if + + if (allocated(p%name)) deallocate(p%name) + + call destroy_json_data(p) + + if (associated(p%next)) then + ! check for circular references: + if (associated(p, p%next)) nullify(p%next) + end if + + if (associated(p%children)) then + do while (p%n_children > 0) + child => p%children + if (associated(child)) then + p%children => p%children%next + p%n_children = p%n_children - 1 + ! check children for circular references: + circular = (associated(p%children) .and. & + associated(p%children,child)) + call json%destroy(child,destroy_next=.false.) + if (circular) exit + else + ! it is a malformed JSON object. But, we will + ! press ahead with the destroy process, since + ! otherwise, there would be no way to destroy it. + exit + end if + end do + nullify(p%children) + nullify(child) + end if + + if (associated(p%next) .and. des_next) call json%destroy(p%next) + + nullify(p%previous) + nullify(p%parent) + nullify(p%tail) + + if (associated(p)) deallocate(p) + nullify(p) + + end if + + end subroutine json_value_destroy +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 9/9/2014 +! +! Remove a [[json_value]] (and all its children) +! from a linked-list structure, preserving the rest of the structure. +! +!### Examples +! +! To extract an object from one JSON structure, and add it to another: +!````fortran +! type(json_core) :: json +! type(json_value),pointer :: json1,json2,p +! logical :: found +! !create and populate json1 and json2 +! call json%get(json1,'name',p,found) ! get pointer to name element of json1 +! call json%remove(p,destroy=.false.) ! remove it from json1 (don't destroy) +! call json%add(json2,p) ! add it to json2 +!```` +! +! To remove an object from a JSON structure (and destroy it): +!````fortran +! type(json_core) :: json +! type(json_value),pointer :: json1,p +! logical :: found +! !create and populate json1 +! call json%get(json1,'name',p,found) ! get pointer to name element of json1 +! call json%remove(p) ! remove and destroy it +!```` +! +!### History +! * Jacob Williams : 12/28/2014 : added destroy optional argument. +! * Jacob Williams : 12/04/2020 : bug fix. + + subroutine json_value_remove(json,p,destroy) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + logical(LK),intent(in),optional :: destroy !! Option to destroy `p` after it is removed: + !! + !! * If `destroy` is not present, it is also destroyed. + !! * If `destroy` is present and true, it is destroyed. + !! * If `destroy` is present and false, it is not destroyed. + + type(json_value),pointer :: parent !! pointer to parent + type(json_value),pointer :: previous !! pointer to previous + type(json_value),pointer :: next !! pointer to next + logical(LK) :: destroy_it !! if `p` should be destroyed + + if (associated(p)) then + + !optional input argument: + if (present(destroy)) then + destroy_it = destroy + else + destroy_it = .true. + end if + + if (associated(p%parent)) then + + parent => p%parent + + if (associated(p%next)) then + + !there are later items in the list: + next => p%next + + if (associated(p%previous)) then + !there are earlier items in the list + previous => p%previous + previous%next => next + next%previous => previous + else + !this is the first item in the list + parent%children => next + nullify(next%previous) + end if + + else + + if (associated(p%previous)) then + !there are earlier items in the list: + previous => p%previous + nullify(previous%next) + parent%tail => previous + else + !this is the only item in the list: + nullify(parent%children) + nullify(parent%tail) + end if + + end if + + ! nullify all pointers to original structure: + nullify(p%next) + nullify(p%previous) + nullify(p%parent) + + parent%n_children = parent%n_children - 1 + + end if + + if (destroy_it) call json%destroy(p) + + end if + + end subroutine json_value_remove +!***************************************************************************************** + +!***************************************************************************************** +!> +! Replace `p1` with `p2` in a JSON structure. +! +!@note The replacement is done using an insert and remove +! See [[json_value_insert_after]] and [[json_value_remove]] +! for details. + + subroutine json_value_replace(json,p1,p2,destroy) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p1 !! the item to replace + type(json_value),pointer :: p2 !! item to take the place of `p1` + logical(LK),intent(in),optional :: destroy !! Should `p1` also be destroyed + !! (default is True). Normally, + !! this should be true to avoid + !! a memory leak. + + logical(LK) :: destroy_p1 !! if `p1` is to be destroyed + + if (present(destroy)) then + destroy_p1 = destroy + else + destroy_p1 = .true. ! default + end if + + call json%insert_after(p1,p2) + call json%remove(p1,destroy_p1) + + end subroutine json_value_replace +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 4/11/2017 +! +! Reverse the order of the children of an array or object. + + subroutine json_value_reverse(json,p) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + + type(json_value),pointer :: tmp !! temp variable for traversing the list + type(json_value),pointer :: current !! temp variable for traversing the list + integer(IK) :: var_type !! for getting the variable type + + if (associated(p)) then + + call json%info(p,var_type=var_type) + + ! can only reverse objects or arrays + if (var_type==json_object .or. var_type==json_array) then + + nullify(tmp) + current => p%children + p%tail => current + + ! Swap next and previous for all nodes: + do + if (.not. associated(current)) exit + tmp => current%previous + current%previous => current%next + current%next => tmp + current => current%previous + end do + + if (associated(tmp)) then + p%children => tmp%previous + end if + + end if + + end if + + end subroutine json_value_reverse +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 4/26/2016 +! +! Swap two elements in a JSON structure. +! All of the children are carried along as well. +! +!@note If both are not associated, then an error is thrown. +! +!@note The assumption here is that both variables are part of a valid +! [[json_value]] linked list (so the normal `parent`, `previous`, +! `next`, etc. pointers are properly associated if necessary). +! +!@warning This cannot be used to swap a parent/child pair, since that +! could lead to a circular linkage. An exception is thrown if +! this is tried. +! +!@warning There are also other situations where using this routine may +! produce a malformed JSON structure, such as moving an array +! element outside of an array. This is not checked for. +! +!@note If `p1` and `p2` have a common parent, it is always safe to swap them. + + subroutine json_value_swap(json,p1,p2) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p1 !! swap with `p2` + type(json_value),pointer :: p2 !! swap with `p1` + + logical :: same_parent !! if `p1` and `p2` have the same parent + logical :: first_last !! if `p1` and `p2` are the first,last or + !! last,first children of a common parent + logical :: adjacent !! if `p1` and `p2` are adjacent + !! elements in an array + type(json_value),pointer :: a !! temporary variable + type(json_value),pointer :: b !! temporary variable + + if (json%exception_thrown) return + + !both have to be associated: + if (associated(p1) .and. associated(p2)) then + + !simple check to make sure that they both + !aren't pointing to the same thing: + if (.not. associated(p1,p2)) then + + !we will not allow swapping an item with one of its descendants: + if (json%is_child_of(p1,p2) .or. json%is_child_of(p2,p1)) then + call json%throw_exception('Error in json_value_swap: '//& + 'cannot swap an item with one of its descendants') + else + + same_parent = ( associated(p1%parent) .and. & + associated(p2%parent) .and. & + associated(p1%parent,p2%parent) ) + if (same_parent) then + first_last = (associated(p1%parent%children,p1) .and. & + associated(p2%parent%tail,p2)) .or. & + (associated(p1%parent%tail,p1) .and. & + associated(p2%parent%children,p2)) + else + first_last = .false. + end if + + !first, we fix children,tail pointers: + + if (same_parent .and. first_last) then + + !this is all we have to do for the parent in this case: + call swap_pointers(p1%parent%children,p2%parent%tail) + + else if (same_parent .and. .not. first_last) then + + if (associated(p1%parent%children,p1)) then + p1%parent%children => p2 ! p1 is the first child of the parent + else if (associated(p1%parent%children,p2)) then + p1%parent%children => p1 ! p2 is the first child of the parent + end if + if (associated(p1%parent%tail,p1)) then + p1%parent%tail => p2 ! p1 is the last child of the parent + else if (associated(p1%parent%tail,p2)) then + p1%parent%tail => p1 ! p2 is the last child of the parent + end if + + else ! general case: different parents + + if (associated(p1%parent)) then + if (associated(p1%parent%children,p1)) p1%parent%children => p2 + if (associated(p1%parent%tail,p1)) p1%parent%tail => p2 + end if + if (associated(p2%parent)) then + if (associated(p2%parent%children,p2)) p2%parent%children => p1 + if (associated(p2%parent%tail,p2)) p2%parent%tail => p1 + end if + call swap_pointers(p1%parent, p2%parent) + + end if + + !now, have to fix previous,next pointers: + + !first, see if they are adjacent: + adjacent = associated(p1%next,p2) .or. & + associated(p2%next,p1) + if (associated(p2%next,p1)) then !p2,p1 + a => p2 + b => p1 + else !p1,p2 (or not adjacent) + a => p1 + b => p2 + end if + if (associated(a%previous)) a%previous%next => b + if (associated(b%next)) b%next%previous => a + + if (adjacent) then + !a comes before b in the original list + b%previous => a%previous + a%next => b%next + a%previous => b + b%next => a + else + if (associated(a%next)) a%next%previous => b + if (associated(b%previous)) b%previous%next => a + call swap_pointers(a%previous,b%previous) + call swap_pointers(a%next, b%next) + end if + + end if + + else + call json%throw_exception('Error in json_value_swap: '//& + 'both pointers must be associated') + end if + + end if + + contains + + pure subroutine swap_pointers(s1,s2) + + implicit none + + type(json_value),pointer,intent(inout) :: s1 + type(json_value),pointer,intent(inout) :: s2 + + type(json_value),pointer :: tmp !! temporary pointer + + if (.not. associated(s1,s2)) then + tmp => s1 + s1 => s2 + s2 => tmp + end if + + end subroutine swap_pointers + + end subroutine json_value_swap +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 4/28/2016 +! +! Returns True if `p2` is a descendant of `p1` +! (i.e, a child, or a child of child, etc.) + + function json_value_is_child_of(json,p1,p2) result(is_child_of) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p1 + type(json_value),pointer :: p2 + logical(LK) :: is_child_of + + is_child_of = .false. + + if (json%exception_thrown) return + + if (associated(p1) .and. associated(p2)) then + if (associated(p1%children)) then + call json%traverse(p1%children,is_child_of_callback) + end if + end if + + contains + + subroutine is_child_of_callback(json,p,finished) + !! Traverse until `p` is `p2`. + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: p + logical(LK),intent(out) :: finished + + is_child_of = associated(p,p2) + finished = is_child_of ! stop searching if found + + end subroutine is_child_of_callback + + end function json_value_is_child_of +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 5/2/2016 +! +! Validate a [[json_value]] linked list by checking to make sure +! all the pointers are properly associated, arrays and objects +! have the correct number of children, and the correct data is +! allocated for the variable types. +! +! It recursively traverses the entire structure and checks every element. +! +!### History +! * Jacob Williams, 8/26/2017 : added duplicate key check. +! +!@note It will return on the first error it encounters. +! +!@note This routine does not check or throw any exceptions. +! If `json` is currently in a state of exception, it will +! remain so after calling this routine. + + subroutine json_value_validate(json,p,is_valid,error_msg) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: p + logical(LK),intent(out) :: is_valid !! True if the structure is valid. + character(kind=CK,len=:),allocatable,intent(out) :: error_msg !! if not valid, this will contain + !! a description of the problem + + logical(LK) :: has_duplicate !! to check for duplicate keys + character(kind=CK,len=:),allocatable :: path !! path to duplicate key + logical(LK) :: status_ok !! to check for existing exception + character(kind=CK,len=:),allocatable :: exception_msg !! error message for an existing exception + character(kind=CK,len=:),allocatable :: exception_msg2 !! error message for a new exception + + if (associated(p)) then + + is_valid = .true. + call check_if_valid(p,require_parent=associated(p%parent)) + + if (is_valid .and. .not. json%allow_duplicate_keys) then + ! if no errors so far, also check the + ! entire structure for duplicate keys: + + ! note: check_for_duplicate_keys does call routines + ! that check and throw exceptions, so let's clear any + ! first. (save message for later) + call json%check_for_errors(status_ok, exception_msg) + call json%clear_exceptions() + + call json%check_for_duplicate_keys(p,has_duplicate,path=path) + if (json%failed()) then + ! if an exception was thrown during this call, + ! then clear it but make that the error message + ! returned by this routine. Normally this should + ! never actually occur since we have already + ! validated the structure. + call json%check_for_errors(is_valid, exception_msg2) + error_msg = exception_msg2 + call json%clear_exceptions() + is_valid = .false. + else + if (has_duplicate) then + error_msg = 'duplicate key found: '//path + is_valid = .false. + end if + end if + + if (.not. status_ok) then + ! restore any existing exception if necessary + call json%throw_exception(exception_msg) + end if + + ! cleanup: + if (allocated(path)) deallocate(path) + if (allocated(exception_msg)) deallocate(exception_msg) + if (allocated(exception_msg2)) deallocate(exception_msg2) + + end if + + else + error_msg = 'The pointer is not associated' + is_valid = .false. + end if + + contains + + recursive subroutine check_if_valid(p,require_parent) + + implicit none + + type(json_value),pointer,intent(in) :: p + logical,intent(in) :: require_parent !! the first one may be a root (so no parent), + !! but all descendants must have a parent. + + integer(IK) :: i !! counter + type(json_value),pointer :: element + type(json_value),pointer :: previous + + if (is_valid .and. associated(p)) then + + ! data type: + select case (p%var_type) + case(json_null,json_object,json_array) + if (allocated(p%log_value) .or. allocated(p%int_value) .or. & + allocated(p%dbl_value) .or. allocated(p%str_value)) then + error_msg = 'incorrect data allocated for '//& + 'json_null, json_object, or json_array variable type' + is_valid = .false. + return + end if + case(json_logical) + if (.not. allocated(p%log_value)) then + error_msg = 'log_value should be allocated for json_logical variable type' + is_valid = .false. + return + else if (allocated(p%int_value) .or. & + allocated(p%dbl_value) .or. allocated(p%str_value)) then + error_msg = 'incorrect data allocated for json_logical variable type' + is_valid = .false. + return + end if + case(json_integer) + if (.not. allocated(p%int_value)) then + error_msg = 'int_value should be allocated for json_integer variable type' + is_valid = .false. + return + else if (allocated(p%log_value) .or. & + allocated(p%dbl_value) .or. allocated(p%str_value)) then + error_msg = 'incorrect data allocated for json_integer variable type' + is_valid = .false. + return + end if + case(json_real) + if (.not. allocated(p%dbl_value)) then + error_msg = 'dbl_value should be allocated for json_real variable type' + is_valid = .false. + return + else if (allocated(p%log_value) .or. allocated(p%int_value) .or. & + allocated(p%str_value)) then + error_msg = 'incorrect data allocated for json_real variable type' + is_valid = .false. + return + end if + case(json_string) + if (.not. allocated(p%str_value)) then + error_msg = 'str_value should be allocated for json_string variable type' + is_valid = .false. + return + else if (allocated(p%log_value) .or. allocated(p%int_value) .or. & + allocated(p%dbl_value)) then + error_msg = 'incorrect data allocated for json_string variable type' + is_valid = .false. + return + end if + case default + error_msg = 'invalid JSON variable type' + is_valid = .false. + return + end select + + if (require_parent .and. .not. associated(p%parent)) then + error_msg = 'parent pointer is not associated' + is_valid = .false. + return + end if + + if (.not. allocated(p%name)) then + if (associated(p%parent)) then + if (p%parent%var_type/=json_array) then + error_msg = 'JSON variable must have a name if not an '//& + 'array element or the root' + is_valid = .false. + return + end if + end if + end if + + if (associated(p%children) .neqv. associated(p%tail)) then + error_msg = 'both children and tail pointers must be associated' + is_valid = .false. + return + end if + + ! now, check next one: + if (associated(p%next)) then + if (associated(p,p%next)) then + error_msg = 'circular linked list' + is_valid = .false. + return + else + ! if it's an element in an + ! array, then require a parent: + call check_if_valid(p%next,require_parent=.true.) + end if + end if + + if (associated(p%children)) then + + if (p%var_type/=json_array .and. p%var_type/=json_object) then + error_msg = 'only arrays and objects can have children' + is_valid = .false. + return + end if + + ! first validate children pointers: + + previous => null() + element => p%children + do i = 1_IK, p%n_children + if (.not. associated(element%parent,p)) then + error_msg = 'child''s parent pointer not properly associated' + is_valid = .false. + return + end if + if (i==1 .and. associated(element%previous)) then + error_msg = 'first child shouldn''t have a previous' + is_valid = .false. + return + end if + if (i1) then + if (.not. associated(previous,element%previous)) then + error_msg = 'previous pointer not properly associated' + is_valid = .false. + return + end if + end if + if (i==p%n_children .and. & + .not. associated(element%parent%tail,element)) then + error_msg = 'parent''s tail pointer not properly associated' + is_valid = .false. + return + end if + if (i element + element => element%next + end if + end do + + !now check all the children: + call check_if_valid(p%children,require_parent=.true.) + + end if + + end if + + end subroutine check_if_valid + + end subroutine json_value_validate +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 12/6/2014 +! +! Given the path string, remove the variable +! from [[json_value]], if it exists. + + subroutine json_value_remove_if_present(json,p,path) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + character(kind=CK,len=*),intent(in) :: path !! the path to the variable to remove + + type(json_value),pointer :: p_var + logical(LK) :: found + + call json%get(p,path,p_var,found) + if (found) call json%remove(p_var) + + end subroutine json_value_remove_if_present +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_value_remove_if_present]], where `path` is kind=CDK. + + subroutine wrap_json_value_remove_if_present(json,p,path) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + character(kind=CDK,len=*),intent(in) :: path + + call json%remove_if_present(p,to_unicode(path)) + + end subroutine wrap_json_value_remove_if_present +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 12/6/2014 +! +! Given the path string, if the variable is present, +! and is a scalar, then update its value. +! If it is not present, then create it and set its value. +! +!@note If the variable is not a scalar, an exception will be thrown. + + subroutine json_update_logical(json,p,path,val,found) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + character(kind=CK,len=*),intent(in) :: path !! path to the variable in the structure + logical(LK),intent(in) :: val !! the new value + logical(LK),intent(out) :: found !! if the variable was found and was a scalar. + + type(json_value),pointer :: p_var + integer(IK) :: var_type + + call json%get(p,path,p_var,found) + if (found) then + + call json%info(p_var,var_type) + select case (var_type) + case (json_null,json_logical,json_integer,json_real,json_string) + call json%to_logical(p_var,val) !update the value + case default + found = .false. + call json%throw_exception('Error in json_update_logical: '//& + 'the variable is not a scalar value',found) + end select + + else + call json%add_by_path(p,path,val) !add the new element + end if + + end subroutine json_update_logical +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_update_logical]], where `path` is kind=CDK. + + subroutine wrap_json_update_logical(json,p,path,val,found) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + character(kind=CDK,len=*),intent(in) :: path !! path to the variable in the structure + logical(LK),intent(in) :: val !! the new value + logical(LK),intent(out) :: found !! if the variable was found and was a scalar. + + call json%update(p,to_unicode(path),val,found) + + end subroutine wrap_json_update_logical +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 12/6/2014 +! +! Given the path string, if the variable is present, +! and is a scalar, then update its value. +! If it is not present, then create it and set its value. +! +!@note If the variable is not a scalar, an exception will be thrown. + + subroutine json_update_real(json,p,path,val,found) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + character(kind=CK,len=*),intent(in) :: path !! path to the variable in the structure + real(RK),intent(in) :: val !! the new value + logical(LK),intent(out) :: found !! if the variable was found and was a scalar. + + type(json_value),pointer :: p_var + integer(IK) :: var_type + + call json%get(p,path,p_var,found) + if (found) then + + call json%info(p_var,var_type) + select case (var_type) + case (json_null,json_logical,json_integer,json_real,json_string) + call json%to_real(p_var,val) !update the value + case default + found = .false. + call json%throw_exception('Error in json_update_real: '//& + 'the variable is not a scalar value',found) + end select + + else + call json%add_by_path(p,path,val) !add the new element + end if + + end subroutine json_update_real +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_update_real]], where `path` is kind=CDK. + + subroutine wrap_json_update_real(json,p,path,val,found) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + character(kind=CDK,len=*),intent(in) :: path !! path to the variable in the structure + real(RK),intent(in) :: val !! the new value + logical(LK),intent(out) :: found !! if the variable was found and was a scalar. + + call json%update(p,to_unicode(path),val,found) + + end subroutine wrap_json_update_real +!***************************************************************************************** + +#ifndef REAL32 +!***************************************************************************************** +!> +! Alternate version of [[json_update_real]], where `val` is `real32`. + + subroutine json_update_real32(json,p,path,val,found) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + character(kind=CK,len=*),intent(in) :: path !! path to the variable in the structure + real(real32),intent(in) :: val !! the new value + logical(LK),intent(out) :: found !! if the variable was found and was a scalar. + + call json%update(p,path,real(val,RK),found) + + end subroutine json_update_real32 +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_update_real32]], where `path` is kind=CDK. + + subroutine wrap_json_update_real32(json,p,path,val,found) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + character(kind=CDK,len=*),intent(in) :: path !! path to the variable in the structure + real(real32),intent(in) :: val !! the new value + logical(LK),intent(out) :: found !! if the variable was found and was a scalar. + + call json%update(p,to_unicode(path),real(val,RK),found) + + end subroutine wrap_json_update_real32 +!***************************************************************************************** +#endif + +#ifdef REAL128 +!***************************************************************************************** +!> +! Alternate version of [[json_update_real]], where `val` is `real64`. + + subroutine json_update_real64(json,p,path,val,found) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + character(kind=CK,len=*),intent(in) :: path !! path to the variable in the structure + real(real64),intent(in) :: val !! the new value + logical(LK),intent(out) :: found !! if the variable was found and was a scalar. + + call json%update(p,path,real(val,RK),found) + + end subroutine json_update_real64 +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_update_real64]], where `path` is kind=CDK. + + subroutine wrap_json_update_real64(json,p,path,val,found) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + character(kind=CDK,len=*),intent(in) :: path !! path to the variable in the structure + real(real64),intent(in) :: val !! the new value + logical(LK),intent(out) :: found !! if the variable was found and was a scalar. + + call json%update(p,to_unicode(path),real(val,RK),found) + + end subroutine wrap_json_update_real64 +!***************************************************************************************** +#endif + +!***************************************************************************************** +!> author: Jacob Williams +! date: 12/6/2014 +! +! Given the path string, if the variable is present, +! and is a scalar, then update its value. +! If it is not present, then create it and set its value. +! +!@note If the variable is not a scalar, an exception will be thrown. + + subroutine json_update_integer(json,p,path,val,found) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + character(kind=CK,len=*),intent(in) :: path !! path to the variable in the structure + integer(IK),intent(in) :: val !! the new value + logical(LK),intent(out) :: found !! if the variable was found and was a scalar. + + type(json_value),pointer :: p_var + integer(IK) :: var_type + + call json%get(p,path,p_var,found) + if (found) then + + call json%info(p_var,var_type) + select case (var_type) + case (json_null,json_logical,json_integer,json_real,json_string) + call json%to_integer(p_var,val) !update the value + case default + found = .false. + call json%throw_exception('Error in json_update_integer: '//& + 'the variable is not a scalar value',found) + end select + + else + call json%add_by_path(p,path,val) !add the new element + end if + + end subroutine json_update_integer +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_update_integer]], where `path` is kind=CDK. + + subroutine wrap_json_update_integer(json,p,path,val,found) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + character(kind=CDK,len=*),intent(in) :: path !! path to the variable in the structure + integer(IK),intent(in) :: val !! the new value + logical(LK),intent(out) :: found !! if the variable was found and was a scalar. + + call json%update(p,to_unicode(path),val,found) + + end subroutine wrap_json_update_integer +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 12/6/2014 +! +! Given the path string, if the variable is present, +! and is a scalar, then update its value. +! If it is not present, then create it and set its value. +! +!@note If the variable is not a scalar, an exception will be thrown. + + subroutine json_update_string(json,p,path,val,found,trim_str,adjustl_str) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + character(kind=CK,len=*),intent(in) :: path !! path to the variable in the structure + character(kind=CK,len=*),intent(in) :: val !! the new value + logical(LK),intent(out) :: found !! if the variable was found and was a scalar. + logical(LK),intent(in),optional :: trim_str !! if TRIM() should be called for the `val` + !! (only used if `val` is present) + logical(LK),intent(in),optional :: adjustl_str !! if ADJUSTL() should be called for the `val` + !! (only used if `val` is present) + !! (note that ADJUSTL is done before TRIM) + + type(json_value),pointer :: p_var + integer(IK) :: var_type + + call json%get(p,path,p_var,found) + if (found) then + + call json%info(p_var,var_type) + select case (var_type) + case (json_null,json_logical,json_integer,json_real,json_string) + call json%to_string(p_var,val,trim_str=trim_str,adjustl_str=adjustl_str) ! update the value + case default + found = .false. + call json%throw_exception('Error in json_update_string: '//& + 'the variable is not a scalar value',found) + end select + + else + call json%add_by_path(p,path,val) !add the new element + end if + + end subroutine json_update_string +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_update_string]], where `path` and `value` are kind=CDK. + + subroutine wrap_json_update_string(json,p,path,val,found,trim_str,adjustl_str) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + character(kind=CDK,len=*),intent(in) :: path !! path to the variable in the structure + character(kind=CDK,len=*),intent(in) :: val !! the new value + logical(LK),intent(out) :: found !! if the variable was found and was a scalar. + logical(LK),intent(in),optional :: trim_str !! if TRIM() should be called for the `val` + !! (only used if `val` is present) + logical(LK),intent(in),optional :: adjustl_str !! if ADJUSTL() should be called for the `val` + !! (only used if `val` is present) + !! (note that ADJUSTL is done before TRIM) + + call json%update(p,to_unicode(path),to_unicode(val),found,trim_str,adjustl_str) + + end subroutine wrap_json_update_string +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_update_string]], where `path` is kind=CDK. + + subroutine json_update_string_name_ascii(json,p,path,val,found,trim_str,adjustl_str) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + character(kind=CDK,len=*),intent(in) :: path !! path to the variable in the structure + character(kind=CK, len=*),intent(in) :: val !! the new value + logical(LK),intent(out) :: found !! if the variable was found and was a scalar. + logical(LK),intent(in),optional :: trim_str !! if TRIM() should be called for the `val` + !! (only used if `val` is present) + logical(LK),intent(in),optional :: adjustl_str !! if ADJUSTL() should be called for the `val` + !! (only used if `val` is present) + !! (note that ADJUSTL is done before TRIM) + + call json%update(p,to_unicode(path),val,found,trim_str,adjustl_str) + + end subroutine json_update_string_name_ascii +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_update_string]], where `val` is kind=CDK. + + subroutine json_update_string_val_ascii(json,p,path,val,found,trim_str,adjustl_str) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + character(kind=CK, len=*),intent(in) :: path !! path to the variable in the structure + character(kind=CDK,len=*),intent(in) :: val !! the new value + logical(LK),intent(out) :: found !! if the variable was found and was a scalar. + logical(LK),intent(in),optional :: trim_str !! if TRIM() should be called for the `val` + !! (only used if `val` is present) + logical(LK),intent(in),optional :: adjustl_str !! if ADJUSTL() should be called for the `val` + !! (only used if `val` is present) + !! (note that ADJUSTL is done before TRIM) + + call json%update(p,path,to_unicode(val),found,trim_str,adjustl_str) + + end subroutine json_update_string_val_ascii +!***************************************************************************************** + +!***************************************************************************************** +!> +! Adds `member` as a child of `p`. + + subroutine json_value_add_member(json,p,member) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p !! `p` must be a `json_object` + !! or a `json_array` + type(json_value),pointer :: member !! the child member + !! to add to `p` + + integer(IK) :: var_type !! variable type of `p` + + if (.not. json%exception_thrown) then + + if (associated(p)) then + + call json%info(p,var_type=var_type) + + select case (var_type) + case(json_object, json_array) + + ! associate the parent + member%parent => p + + ! add to linked list + if (associated(p%children)) then + p%tail%next => member + member%previous => p%tail + else + p%children => member + member%previous => null() !first in the list + end if + + ! new member is now the last one in the list + p%tail => member + p%n_children = p%n_children + 1 + + case default + call json%throw_exception('Error in json_value_add_member: '//& + 'can only add child to object or array') + end select + + else + call json%throw_exception('Error in json_value_add_member: '//& + 'the pointer is not associated') + end if + + end if + + end subroutine json_value_add_member +!***************************************************************************************** + +!***************************************************************************************** +!> +! Inserts `element` after `p`, and updates the JSON structure accordingly. +! +!### Example +! +!````fortran +! program test +! use json_module +! implicit none +! logical(json_LK) :: found +! type(json_core) :: json +! type(json_value),pointer :: p,new,element +! call json%load(file='myfile.json', p=p) +! call json%get(p,'x(3)',element,found) ! get pointer to an array element in the file +! call json%create_integer(new,1,'') ! create a new element +! call json%insert_after(element,new) ! insert new element after x(3) +! call json%print(p,'myfile2.json') ! write it to a file +! call json%destroy(p) ! cleanup +! end program test +!```` +! +!### Details +! +! * This routine can be used to insert a new element (or set of elements) +! into an array or object at a specific index. +! See [[json_value_insert_after_child_by_index]] +! * Children and subsequent elements of `element` are carried along. +! * If the inserted elements are part of an existing list, then +! they are removed from that list. +! +!```` +! p +! [1] - [2] - [3] - [4] +! | +! [5] - [6] - [7] n=3 elements inserted +! element last +! +! Result is: +! +! [1] - [2] - [5] - [6] - [7] - [3] - [4] +! +!```` + + subroutine json_value_insert_after(json,p,element) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p !! a value from a JSON structure + !! (presumably, this is a child of + !! an object or array). + type(json_value),pointer :: element !! the element to insert after `p` + + type(json_value),pointer :: parent !! the parent of `p` + type(json_value),pointer :: next !! temp pointer for traversing structure + type(json_value),pointer :: last !! the last of the items being inserted + integer :: n !! number of items being inserted + + if (.not. json%exception_thrown) then + + parent => p%parent + + ! set first parent of inserted list: + element%parent => parent + + ! Count the number of inserted elements. + ! and set their parents. + n = 1 ! initialize counter + next => element%next + last => element + do + if (.not. associated(next)) exit + n = n + 1 + next%parent => parent + last => next + next => next%next + end do + + if (associated(parent)) then + ! update parent's child counter: + parent%n_children = parent%n_children + n + ! if p is last of parents children then + ! also have to update parent tail pointer: + if (associated(parent%tail,p)) then + parent%tail => last + end if + end if + + if (associated(element%previous)) then + ! element is apparently part of an existing list, + ! so have to update that as well. + if (associated(element%previous%parent)) then + element%previous%parent%n_children = & + element%previous%parent%n_children - n + element%previous%parent%tail => & + element%previous ! now the last one in the list + else + ! this would be a memory leak if the previous entries + ! are not otherwise being pointed too + ! [throw an error in this case???] + end if + !remove element from the other list: + element%previous%next => null() + end if + element%previous => p + + if (associated(p%next)) then + ! if there are any in the list after p: + last%next => p%next + last%next%previous => element + else + last%next => null() + end if + p%next => element + + end if + + end subroutine json_value_insert_after +!***************************************************************************************** + +!***************************************************************************************** +!> +! Inserts `element` after the `idx`-th child of `p`, +! and updates the JSON structure accordingly. This is just +! a wrapper for [[json_value_insert_after]]. + + subroutine json_value_insert_after_child_by_index(json,p,idx,element) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p !! a JSON object or array. + integer(IK),intent(in) :: idx !! the index of the child of `p` to + !! insert the new element after + !! (this is a 1-based Fortran + !! style array index) + type(json_value),pointer :: element !! the element to insert + + type(json_value),pointer :: tmp !! for getting the `idx`-th child of `p` + + if (.not. json%exception_thrown) then + + ! get the idx-th child of p: + call json%get_child(p,idx,tmp) + + ! call json_value_insert_after: + if (.not. json%exception_thrown) call json%insert_after(tmp,element) + + end if + + end subroutine json_value_insert_after_child_by_index +!***************************************************************************************** + +!***************************************************************************************** +!> +! Add a new member (`json_value` pointer) to a JSON structure, given the path. +! +!@warning If the path points to an existing variable in the structure, +! then this routine will destroy it and replace it with the +! new value. + + subroutine json_add_member_by_path(json,me,path,p,found,was_created) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: me !! the JSON structure + character(kind=CK,len=*),intent(in) :: path !! the path to the variable + type(json_value),pointer,intent(in) :: p !! the value to add + logical(LK),intent(out),optional :: found !! if the variable was found + logical(LK),intent(out),optional :: was_created !! if the variable had to be created + + type(json_value),pointer :: tmp + character(kind=CK,len=:),allocatable :: name !! name of the variable + + if ( .not. json%exception_thrown ) then + + if (.not. associated(p)) then + call json%throw_exception('Error in json_add_member_by_path:'//& + ' Input pointer p is not associated.',found) + if (present(found)) then + found = .false. + call json%clear_exceptions() + end if + if ( present(was_created) ) was_created = .false. + else + + ! return a pointer to the path (possibly creating it) + call json%create(me,path,tmp,found,was_created) + + if (.not. associated(tmp)) then + + call json%throw_exception('Error in json_add_member_by_path:'//& + ' Unable to resolve path: '//trim(path),found) + if (present(found)) then + found = .false. + call json%clear_exceptions() + end if + + else + + call json%info(tmp,name=name) + + ! replace it with the new one: + call json%replace(tmp,p,destroy=.true.) + call json%rename(p,name) + + end if + + end if + + else + if ( present(found) ) found = .false. + if ( present(was_created) ) was_created = .false. + end if + + end subroutine json_add_member_by_path +!***************************************************************************************** + +!***************************************************************************************** +!> +! Wrapper to [[json_add_member_by_path]] where ""path"" is kind=CDK. + + subroutine wrap_json_add_member_by_path(json,me,path,p,found,was_created) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: me !! the JSON structure + character(kind=CDK,len=*),intent(in) :: path !! the path to the variable + type(json_value),pointer,intent(in) :: p !! the value to add + logical(LK),intent(out),optional :: found !! if the variable was found + logical(LK),intent(out),optional :: was_created !! if the variable had to be created + + call json%json_add_member_by_path(me,to_unicode(path),p,found,was_created) + + end subroutine wrap_json_add_member_by_path +!***************************************************************************************** + +!***************************************************************************************** +!> +! Add an integer value to a [[json_value]], given the path. +! +!@warning If the path points to an existing variable in the structure, +! then this routine will destroy it and replace it with the +! new value. + + subroutine json_add_integer_by_path(json,me,path,value,found,was_created) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: me !! the JSON structure + character(kind=CK,len=*),intent(in) :: path !! the path to the variable + integer(IK),intent(in) :: value !! the value to add + logical(LK),intent(out),optional :: found !! if the variable was found + logical(LK),intent(out),optional :: was_created !! if the variable had to be created + + type(json_value),pointer :: p + type(json_value),pointer :: tmp + character(kind=CK,len=:),allocatable :: name !! variable name + + if ( .not. json%exception_thrown ) then + + nullify(p) + + ! return a pointer to the path (possibly creating it) + ! If the variable had to be created, then + ! it will be a json_null variable. + call json%create(me,path,p,found,was_created) + + if (.not. associated(p)) then + + call json%throw_exception('Error in json_add_integer_by_path:'//& + ' Unable to resolve path: '//trim(path),found) + if (present(found)) then + found = .false. + call json%clear_exceptions() + end if + + else + + !NOTE: a new object is created, and the old one + ! is replaced and destroyed. This is to + ! prevent memory leaks if the type is + ! being changed (for example, if an array + ! is being replaced with a scalar). + + if (p%var_type==json_integer) then + p%int_value = value + else + call json%info(p,name=name) + call json%create_integer(tmp,value,name) + call json%replace(p,tmp,destroy=.true.) + end if + + end if + + else + if ( present(found) ) found = .false. + if ( present(was_created) ) was_created = .false. + end if + + end subroutine json_add_integer_by_path +!***************************************************************************************** + +!***************************************************************************************** +!> +! Wrapper to [[json_add_integer_by_path]] where ""path"" is kind=CDK. + + subroutine wrap_json_add_integer_by_path(json,me,path,value,found,was_created) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: me !! the JSON structure + character(kind=CDK,len=*),intent(in) :: path !! the path to the variable + integer(IK),intent(in) :: value !! the value to add + logical(LK),intent(out),optional :: found !! if the variable was found + logical(LK),intent(out),optional :: was_created !! if the variable had to be created + + call json%json_add_integer_by_path(me,to_unicode(path),value,found,was_created) + + end subroutine wrap_json_add_integer_by_path +!***************************************************************************************** + +!***************************************************************************************** +!> +! Add an real value to a [[json_value]], given the path. +! +!@warning If the path points to an existing variable in the structure, +! then this routine will destroy it and replace it with the +! new value. + + subroutine json_add_real_by_path(json,me,path,value,found,was_created) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: me !! the JSON structure + character(kind=CK,len=*),intent(in) :: path !! the path to the variable + real(RK),intent(in) :: value !! the value to add + logical(LK),intent(out),optional :: found !! if the variable was found + logical(LK),intent(out),optional :: was_created !! if the variable had to be created + + type(json_value),pointer :: p + type(json_value),pointer :: tmp + character(kind=CK,len=:),allocatable :: name !! variable name + + if ( .not. json%exception_thrown ) then + + nullify(p) + + ! return a pointer to the path (possibly creating it) + ! If the variable had to be created, then + ! it will be a json_null variable. + call json%create(me,path,p,found,was_created) + + if (.not. associated(p)) then + + call json%throw_exception('Error in json_add_real_by_path:'//& + ' Unable to resolve path: '//trim(path),found) + if (present(found)) then + found = .false. + call json%clear_exceptions() + end if + + else + + !NOTE: a new object is created, and the old one + ! is replaced and destroyed. This is to + ! prevent memory leaks if the type is + ! being changed (for example, if an array + ! is being replaced with a scalar). + + if (p%var_type==json_real) then + p%dbl_value = value + else + call json%info(p,name=name) + call json%create_real(tmp,value,name) + call json%replace(p,tmp,destroy=.true.) + end if + + end if + + else + if ( present(found) ) found = .false. + if ( present(was_created) ) was_created = .false. + end if + + end subroutine json_add_real_by_path +!***************************************************************************************** + +!***************************************************************************************** +!> +! Wrapper to [[json_add_real_by_path]] where ""path"" is kind=CDK. + + subroutine wrap_json_add_real_by_path(json,me,path,value,found,was_created) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: me !! the JSON structure + character(kind=CDK,len=*),intent(in) :: path !! the path to the variable + real(RK),intent(in) :: value !! the value to add + logical(LK),intent(out),optional :: found !! if the variable was found + logical(LK),intent(out),optional :: was_created !! if the variable had to be created + + call json%json_add_real_by_path(me,to_unicode(path),value,found,was_created) + + end subroutine wrap_json_add_real_by_path +!***************************************************************************************** + +#ifndef REAL32 +!***************************************************************************************** +!> +! Alternate version of [[json_add_real_by_path]] where value=real32. + + subroutine json_add_real32_by_path(json,me,path,value,found,was_created) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: me !! the JSON structure + character(kind=CK,len=*),intent(in) :: path !! the path to the variable + real(real32),intent(in) :: value !! the value to add + logical(LK),intent(out),optional :: found !! if the variable was found + logical(LK),intent(out),optional :: was_created !! if the variable had to be created + + call json%add_by_path(me,path,real(value,RK),found,was_created) + + end subroutine json_add_real32_by_path +!***************************************************************************************** + +!***************************************************************************************** +!> +! Wrapper to [[json_add_real32_by_path]] where ""path"" is kind=CDK. + + subroutine wrap_json_add_real32_by_path(json,me,path,value,found,was_created) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: me !! the JSON structure + character(kind=CDK,len=*),intent(in) :: path !! the path to the variable + real(real32),intent(in) :: value !! the value to add + logical(LK),intent(out),optional :: found !! if the variable was found + logical(LK),intent(out),optional :: was_created !! if the variable had to be created + + call json%add_by_path(me,to_unicode(path),real(value,RK),found,was_created) + + end subroutine wrap_json_add_real32_by_path +!***************************************************************************************** +#endif + +#ifdef REAL128 +!***************************************************************************************** +!> +! Alternate version of [[json_add_real_by_path]] where value=real32. + + subroutine json_add_real64_by_path(json,me,path,value,found,was_created) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: me !! the JSON structure + character(kind=CK,len=*),intent(in) :: path !! the path to the variable + real(real64),intent(in) :: value !! the value to add + logical(LK),intent(out),optional :: found !! if the variable was found + logical(LK),intent(out),optional :: was_created !! if the variable had to be created + + call json%add_by_path(me,path,real(value,RK),found,was_created) + + end subroutine json_add_real64_by_path +!***************************************************************************************** + +!***************************************************************************************** +!> +! Wrapper to [[json_add_real64_by_path]] where ""path"" is kind=CDK. + + subroutine wrap_json_add_real64_by_path(json,me,path,value,found,was_created) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: me !! the JSON structure + character(kind=CDK,len=*),intent(in) :: path !! the path to the variable + real(real64),intent(in) :: value !! the value to add + logical(LK),intent(out),optional :: found !! if the variable was found + logical(LK),intent(out),optional :: was_created !! if the variable had to be created + + call json%add_by_path(me,to_unicode(path),real(value,RK),found,was_created) + + end subroutine wrap_json_add_real64_by_path +!***************************************************************************************** +#endif + +!***************************************************************************************** +!> +! Add a logical value to a [[json_value]], given the path. +! +!@warning If the path points to an existing variable in the structure, +! then this routine will destroy it and replace it with the +! new value. + + subroutine json_add_logical_by_path(json,me,path,value,found,was_created) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: me !! the JSON structure + character(kind=CK,len=*),intent(in) :: path !! the path to the variable + logical(LK),intent(in) :: value !! the value to add + logical(LK),intent(out),optional :: found !! if the variable was found + logical(LK),intent(out),optional :: was_created !! if the variable had to be created + + type(json_value),pointer :: p + type(json_value),pointer :: tmp + character(kind=CK,len=:),allocatable :: name !! variable name + + if ( .not. json%exception_thrown ) then + + nullify(p) + + ! return a pointer to the path (possibly creating it) + ! If the variable had to be created, then + ! it will be a json_null variable. + call json%create(me,path,p,found,was_created) + + if (.not. associated(p)) then + + call json%throw_exception('Error in json_add_logical_by_path:'//& + ' Unable to resolve path: '//trim(path),found) + if (present(found)) then + found = .false. + call json%clear_exceptions() + end if + + else + + !NOTE: a new object is created, and the old one + ! is replaced and destroyed. This is to + ! prevent memory leaks if the type is + ! being changed (for example, if an array + ! is being replaced with a scalar). + + if (p%var_type==json_logical) then + p%log_value = value + else + call json%info(p,name=name) + call json%create_logical(tmp,value,name) + call json%replace(p,tmp,destroy=.true.) + end if + + end if + + else + if ( present(found) ) found = .false. + if ( present(was_created) ) was_created = .false. + end if + + end subroutine json_add_logical_by_path +!***************************************************************************************** + +!***************************************************************************************** +!> +! Wrapper to [[json_add_logical_by_path]] where ""path"" is kind=CDK. + + subroutine wrap_json_add_logical_by_path(json,me,path,value,found,was_created) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: me !! the JSON structure + character(kind=CDK,len=*),intent(in) :: path !! the path to the variable + logical(LK),intent(in) :: value !! the value to add + logical(LK),intent(out),optional :: found !! if the variable was found + logical(LK),intent(out),optional :: was_created !! if the variable had to be created + + call json%json_add_logical_by_path(me,to_unicode(path),value,found,was_created) + + end subroutine wrap_json_add_logical_by_path +!***************************************************************************************** + +!***************************************************************************************** +!> +! Add a string value to a [[json_value]], given the path. +! +!@warning If the path points to an existing variable in the structure, +! then this routine will destroy it and replace it with the +! new value. + + subroutine json_add_string_by_path(json,me,path,value,found,& + was_created,trim_str,adjustl_str) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: me !! the JSON structure + character(kind=CK,len=*),intent(in) :: path !! the path to the variable + character(kind=CK,len=*),intent(in) :: value !! the value to add + logical(LK),intent(out),optional :: found !! if the variable was found + logical(LK),intent(out),optional :: was_created !! if the variable had to be created + logical(LK),intent(in),optional :: trim_str !! if TRIM() should be called for each element + logical(LK),intent(in),optional :: adjustl_str !! if ADJUSTL() should be called for each element + + type(json_value),pointer :: p + type(json_value),pointer :: tmp + character(kind=CK,len=:),allocatable :: name !! variable name + + if ( .not. json%exception_thrown ) then + + nullify(p) + + ! return a pointer to the path (possibly creating it) + ! If the variable had to be created, then + ! it will be a json_null variable. + call json%create(me,path,p,found,was_created) + + if (.not. associated(p)) then + + call json%throw_exception('Error in json_add_string_by_path:'//& + ' Unable to resolve path: '//trim(path),found) + if (present(found)) then + found = .false. + call json%clear_exceptions() + end if + + else + + !NOTE: a new object is created, and the old one + ! is replaced and destroyed. This is to + ! prevent memory leaks if the type is + ! being changed (for example, if an array + ! is being replaced with a scalar). + + if (p%var_type==json_string) then + p%str_value = value + else + call json%info(p,name=name) + call json%create_string(tmp,value,name,trim_str,adjustl_str) + call json%replace(p,tmp,destroy=.true.) + end if + + end if + + else + if ( present(found) ) found = .false. + if ( present(was_created) ) was_created = .false. + end if + + end subroutine json_add_string_by_path +!***************************************************************************************** + +!***************************************************************************************** +!> +! Wrapper to [[json_add_string_by_path]] where ""path"" is kind=CDK. + + subroutine wrap_json_add_string_by_path(json,me,path,value,found,& + was_created,trim_str,adjustl_str) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: me !! the JSON structure + character(kind=CDK,len=*),intent(in) :: path !! the path to the variable + character(kind=CDK,len=*),intent(in) :: value !! the value to add + logical(LK),intent(out),optional :: found !! if the variable was found + logical(LK),intent(out),optional :: was_created !! if the variable had to be created + logical(LK),intent(in),optional :: trim_str !! if TRIM() should be called for each element + logical(LK),intent(in),optional :: adjustl_str !! if ADJUSTL() should be called for each element + + call json%json_add_string_by_path(me,to_unicode(path),to_unicode(value),& + found,was_created,trim_str,adjustl_str) + + end subroutine wrap_json_add_string_by_path +!***************************************************************************************** + +!***************************************************************************************** +!> +! Wrapper for [[json_add_string_by_path]] where ""path"" is kind=CDK. + + subroutine json_add_string_by_path_path_ascii(json,me,path,value,found,& + was_created,trim_str,adjustl_str) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: me !! the JSON structure + character(kind=CDK,len=*),intent(in) :: path !! the path to the variable + character(kind=CK,len=*),intent(in) :: value !! the value to add + logical(LK),intent(out),optional :: found !! if the variable was found + logical(LK),intent(out),optional :: was_created !! if the variable had to be created + logical(LK),intent(in),optional :: trim_str !! if TRIM() should be called for each element + logical(LK),intent(in),optional :: adjustl_str !! if ADJUSTL() should be called for each element + + call json%json_add_string_by_path(me,to_unicode(path),value,found,was_created,trim_str,adjustl_str) + + end subroutine json_add_string_by_path_path_ascii +!***************************************************************************************** + +!***************************************************************************************** +!> +! Wrapper for [[json_add_string_by_path]] where ""value"" is kind=CDK. + + subroutine json_add_string_by_path_value_ascii(json,me,path,value,found,& + was_created,trim_str,adjustl_str) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: me !! the JSON structure + character(kind=CK,len=*),intent(in) :: path !! the path to the variable + character(kind=CDK,len=*),intent(in) :: value !! the value to add + logical(LK),intent(out),optional :: found !! if the variable was found + logical(LK),intent(out),optional :: was_created !! if the variable had to be created + logical(LK),intent(in),optional :: trim_str !! if TRIM() should be called for each element + logical(LK),intent(in),optional :: adjustl_str !! if ADJUSTL() should be called for each element + + call json%json_add_string_by_path(me,path,to_unicode(value),found,was_created,trim_str,adjustl_str) + + end subroutine json_add_string_by_path_value_ascii +!***************************************************************************************** + +!***************************************************************************************** +!> +! Wrapper to [[json_add_integer_by_path]] for adding an integer vector by path. + + subroutine json_add_integer_vec_by_path(json,me,path,value,found,was_created) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: me !! the JSON structure + character(kind=CK,len=*),intent(in) :: path !! the path to the variable + integer(IK),dimension(:),intent(in) :: value !! the vector to add + logical(LK),intent(out),optional :: found !! if the variable was found + logical(LK),intent(out),optional :: was_created !! if the variable had to be created + + type(json_value),pointer :: p !! pointer to path (which may exist) + type(json_value),pointer :: var !! new variable that is created + integer(IK) :: i !! counter + character(kind=CK,len=:),allocatable :: name !! the variable name + logical(LK) :: p_found !! if the path was successfully found (or created) + + if ( .not. json%exception_thrown ) then + + !get a pointer to the variable + !(creating it if necessary) + call json%create(me,path,p,found=p_found) + if (p_found) then + call json%info(p,name=name) ! want to keep the existing name + call json%create_array(var,name) ! create a new array variable + call json%replace(p,var,destroy=.true.) ! replace p with this array (destroy p) + !populate each element of the array: + do i=1,size(value) + call json%add(var, CK_'', value(i)) + end do + end if + + else + if ( present(found) ) found = .false. + if ( present(was_created) ) was_created = .false. + end if + + end subroutine json_add_integer_vec_by_path +!***************************************************************************************** + +!***************************************************************************************** +!> +! Wrapper for [[json_add_integer_vec_by_path]] where ""path"" is kind=CDK). + + subroutine wrap_json_add_integer_vec_by_path(json,me,path,value,found,was_created) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: me !! the JSON structure + character(kind=CDK,len=*),intent(in) :: path !! the path to the variable + integer(IK),dimension(:),intent(in) :: value !! the vector to add + logical(LK),intent(out),optional :: found !! if the variable was found + logical(LK),intent(out),optional :: was_created !! if the variable had to be created + + call json%json_add_integer_vec_by_path(me,to_unicode(path),value,found,was_created) + + end subroutine wrap_json_add_integer_vec_by_path +!***************************************************************************************** + +!***************************************************************************************** +!> +! Wrapper to [[json_add_logical_by_path]] for adding a logical vector by path. + + subroutine json_add_logical_vec_by_path(json,me,path,value,found,was_created) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: me !! the JSON structure + character(kind=CK,len=*),intent(in) :: path !! the path to the variable + logical(LK),dimension(:),intent(in) :: value !! the vector to add + logical(LK),intent(out),optional :: found !! if the variable was found + logical(LK),intent(out),optional :: was_created !! if the variable had to be created + + type(json_value),pointer :: p !! pointer to path (which may exist) + type(json_value),pointer :: var !! new variable that is created + integer(IK) :: i !! counter + character(kind=CK,len=:),allocatable :: name !! the variable name + logical(LK) :: p_found !! if the path was successfully found (or created) + + if ( .not. json%exception_thrown ) then + + !get a pointer to the variable + !(creating it if necessary) + call json%create(me,path,p,found=p_found) + if (p_found) then + call json%info(p,name=name) ! want to keep the existing name + call json%create_array(var,name) ! create a new array variable + call json%replace(p,var,destroy=.true.) ! replace p with this array (destroy p) + !populate each element of the array: + do i=1,size(value) + call json%add(var, CK_'', value(i)) + end do + end if + + else + if ( present(found) ) found = .false. + if ( present(was_created) ) was_created = .false. + end if + + end subroutine json_add_logical_vec_by_path +!***************************************************************************************** + +!***************************************************************************************** +!> +! Wrapper for [[json_add_logical_vec_by_path]] where ""path"" is kind=CDK). + + subroutine wrap_json_add_logical_vec_by_path(json,me,path,value,found,was_created) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: me !! the JSON structure + character(kind=CDK,len=*),intent(in) :: path !! the path to the variable + logical(LK),dimension(:),intent(in) :: value !! the vector to add + logical(LK),intent(out),optional :: found !! if the variable was found + logical(LK),intent(out),optional :: was_created !! if the variable had to be created + + call json%json_add_logical_vec_by_path(me,to_unicode(path),value,found,was_created) + + end subroutine wrap_json_add_logical_vec_by_path +!***************************************************************************************** + +!***************************************************************************************** +!> +! Wrapper to [[json_add_real_by_path]] for adding a real vector by path. + + subroutine json_add_real_vec_by_path(json,me,path,value,found,was_created) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: me !! the JSON structure + character(kind=CK,len=*),intent(in) :: path !! the path to the variable + real(RK),dimension(:),intent(in) :: value !! the vector to add + logical(LK),intent(out),optional :: found !! if the variable was found + logical(LK),intent(out),optional :: was_created !! if the variable had to be created + + type(json_value),pointer :: p !! pointer to path (which may exist) + type(json_value),pointer :: var !! new variable that is created + integer(IK) :: i !! counter + character(kind=CK,len=:),allocatable :: name !! the variable name + logical(LK) :: p_found !! if the path was successfully found (or created) + + if ( .not. json%exception_thrown ) then + + !get a pointer to the variable + !(creating it if necessary) + call json%create(me,path,p,found=p_found) + if (p_found) then + call json%info(p,name=name) ! want to keep the existing name + call json%create_array(var,name) ! create a new array variable + call json%replace(p,var,destroy=.true.) ! replace p with this array (destroy p) + !populate each element of the array: + do i=1,size(value) + call json%add(var, CK_'', value(i)) + end do + end if + + else + if ( present(found) ) found = .false. + if ( present(was_created) ) was_created = .false. + end if + + end subroutine json_add_real_vec_by_path +!***************************************************************************************** + +!***************************************************************************************** +!> +! Wrapper for [[json_add_real_vec_by_path]] where ""path"" is kind=CDK). + + subroutine wrap_json_add_real_vec_by_path(json,me,path,value,found,was_created) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: me !! the JSON structure + character(kind=CDK,len=*),intent(in) :: path !! the path to the variable + real(RK),dimension(:),intent(in) :: value !! the vector to add + logical(LK),intent(out),optional :: found !! if the variable was found + logical(LK),intent(out),optional :: was_created !! if the variable had to be created + + call json%json_add_real_vec_by_path(me,to_unicode(path),value,found,was_created) + + end subroutine wrap_json_add_real_vec_by_path +!***************************************************************************************** + +#ifndef REAL32 +!***************************************************************************************** +!> +! Wrapper to [[json_add_real_by_path]] for adding a real vector by path. + + subroutine json_add_real32_vec_by_path(json,me,path,value,found,was_created) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: me !! the JSON structure + character(kind=CK,len=*),intent(in) :: path !! the path to the variable + real(real32),dimension(:),intent(in) :: value !! the vector to add + logical(LK),intent(out),optional :: found !! if the variable was found + logical(LK),intent(out),optional :: was_created !! if the variable had to be created + + call json%add_by_path(me,path,real(value,RK),found,was_created) + + end subroutine json_add_real32_vec_by_path +!***************************************************************************************** + +!***************************************************************************************** +!> +! Wrapper for [[json_add_real32_vec_by_path]] where ""path"" is kind=CDK). + + subroutine wrap_json_add_real32_vec_by_path(json,me,path,value,found,was_created) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: me !! the JSON structure + character(kind=CDK,len=*),intent(in) :: path !! the path to the variable + real(real32),dimension(:),intent(in) :: value !! the vector to add + logical(LK),intent(out),optional :: found !! if the variable was found + logical(LK),intent(out),optional :: was_created !! if the variable had to be created + + call json%add_by_path(me,to_unicode(path),real(value,RK),found,was_created) + + end subroutine wrap_json_add_real32_vec_by_path +!***************************************************************************************** +#endif + +#ifdef REAL128 +!***************************************************************************************** +!> +! Wrapper to [[json_add_real_by_path]] for adding a real vector by path. + + subroutine json_add_real64_vec_by_path(json,me,path,value,found,was_created) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: me !! the JSON structure + character(kind=CK,len=*),intent(in) :: path !! the path to the variable + real(real64),dimension(:),intent(in) :: value !! the vector to add + logical(LK),intent(out),optional :: found !! if the variable was found + logical(LK),intent(out),optional :: was_created !! if the variable had to be created + + call json%add_by_path(me,path,real(value,RK),found,was_created) + + end subroutine json_add_real64_vec_by_path +!***************************************************************************************** + +!***************************************************************************************** +!> +! Wrapper for [[json_add_real64_vec_by_path]] where ""path"" is kind=CDK). + + subroutine wrap_json_add_real64_vec_by_path(json,me,path,value,found,was_created) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: me !! the JSON structure + character(kind=CDK,len=*),intent(in) :: path !! the path to the variable + real(real64),dimension(:),intent(in) :: value !! the vector to add + logical(LK),intent(out),optional :: found !! if the variable was found + logical(LK),intent(out),optional :: was_created !! if the variable had to be created + + call json%add_by_path(me,to_unicode(path),real(value,RK),found,was_created) + + end subroutine wrap_json_add_real64_vec_by_path +!***************************************************************************************** +#endif + +!***************************************************************************************** +!> +! Wrapper to [[json_add_string_by_path]] for adding a string vector by path. +! +!@note The `ilen` input can be used to specify the actual lengths of the +! the strings in the array. They must all be `<= len(value)`. + + subroutine json_add_string_vec_by_path(json,me,path,value,found,was_created,ilen,trim_str,adjustl_str) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: me !! the JSON structure + character(kind=CK,len=*),intent(in) :: path !! the path to the variable + character(kind=CK,len=*),dimension(:),intent(in) :: value !! the vector to add + logical(LK),intent(out),optional :: found !! if the variable was found + logical(LK),intent(out),optional :: was_created !! if the variable had to be created + integer(IK),dimension(:),intent(in),optional :: ilen !! the string lengths of each + !! element in `value`. If not present, + !! the full `len(value)` string is added + !! for each element. + logical(LK),intent(in),optional :: trim_str !! if TRIM() should be called for each element + logical(LK),intent(in),optional :: adjustl_str !! if ADJUSTL() should be called for each element + + type(json_value),pointer :: p !! pointer to path (which may exist) + type(json_value),pointer :: var !! new variable that is created + integer(IK) :: i !! counter + character(kind=CK,len=:),allocatable :: name !! the variable name + logical(LK) :: p_found !! if the path was successfully found (or created) + + if ( .not. json%exception_thrown ) then + + ! validate ilen array if present: + if (present(ilen)) then + if (size(ilen)/=size(value)) then + call json%throw_exception('Error in json_add_string_vec_by_path: '//& + 'Invalid size of ilen input vector.',found) + if (present(found)) then + found = .false. + call json%clear_exceptions() + end if + if (present(was_created)) was_created = .false. + return + else + ! also have to validate the specified lengths. + ! (must not be greater than input string length) + do i = 1, size(value) + if (ilen(i)>len(value)) then + call json%throw_exception('Error in json_add_string_vec_by_path: '//& + 'Invalid ilen element.',found) + if (present(found)) then + found = .false. + call json%clear_exceptions() + end if + if (present(was_created)) was_created = .false. + return + end if + end do + end if + end if + + !get a pointer to the variable + !(creating it if necessary) + call json%create(me,path,p,found=p_found) + if (p_found) then + call json%info(p,name=name) ! want to keep the existing name + call json%create_array(var,name) ! create a new array variable + call json%replace(p,var,destroy=.true.) ! replace p with this array (destroy p) + !populate each element of the array: + do i=1,size(value) + if (present(ilen)) then + call json%add(var, CK_'', value(i)(1:ilen(i)), & + trim_str=trim_str, adjustl_str=adjustl_str) + else + call json%add(var, CK_'', value(i), & + trim_str=trim_str, adjustl_str=adjustl_str) + end if + end do + end if + + else + if ( present(found) ) found = .false. + if ( present(was_created) ) was_created = .false. + end if + + end subroutine json_add_string_vec_by_path +!***************************************************************************************** + +!***************************************************************************************** +!> +! Wrapper for [[json_add_string_vec_by_path]] where ""path"" and ""value"" are kind=CDK). + + subroutine wrap_json_add_string_vec_by_path(json,me,path,value,& + found,was_created,ilen,& + trim_str,adjustl_str) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: me !! the JSON structure + character(kind=CDK,len=*),intent(in) :: path !! the path to the variable + character(kind=CDK,len=*),dimension(:),intent(in):: value !! the vector to add + logical(LK),intent(out),optional :: found !! if the variable was found + logical(LK),intent(out),optional :: was_created !! if the variable had to be created + integer(IK),dimension(:),intent(in),optional :: ilen !! the string lengths of each + !! element in `value`. If not present, + !! the full `len(value)` string is added + !! for each element. + logical(LK),intent(in),optional :: trim_str !! if TRIM() should be called for each element + logical(LK),intent(in),optional :: adjustl_str !! if ADJUSTL() should be called for each element + + call json%json_add_string_vec_by_path(me,to_unicode(path),to_unicode(value),& + found,was_created,ilen,trim_str,adjustl_str) + + end subroutine wrap_json_add_string_vec_by_path +!***************************************************************************************** + +!***************************************************************************************** +!> +! Wrapper for [[json_add_string_vec_by_path]] where ""value"" is kind=CDK). + + subroutine json_add_string_vec_by_path_value_ascii(json,me,path,value,& + found,was_created,ilen,& + trim_str,adjustl_str) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: me !! the JSON structure + character(kind=CK,len=*),intent(in) :: path !! the path to the variable + character(kind=CDK,len=*),dimension(:),intent(in):: value !! the vector to add + logical(LK),intent(out),optional :: found !! if the variable was found + logical(LK),intent(out),optional :: was_created !! if the variable had to be created + integer(IK),dimension(:),intent(in),optional :: ilen !! the string lengths of each + !! element in `value`. If not present, + !! the full `len(value)` string is added + !! for each element. + logical(LK),intent(in),optional :: trim_str !! if TRIM() should be called for each element + logical(LK),intent(in),optional :: adjustl_str !! if ADJUSTL() should be called for each element + + call json%json_add_string_vec_by_path(me,path,to_unicode(value),& + found,was_created,ilen,trim_str,adjustl_str) + + end subroutine json_add_string_vec_by_path_value_ascii +!***************************************************************************************** + +!***************************************************************************************** +!> +! Wrapper for [[json_add_string_vec_by_path]] where ""path"" is kind=CDK). + + subroutine json_add_string_vec_by_path_path_ascii(json,me,path,value,& + found,was_created,ilen,& + trim_str,adjustl_str) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: me !! the JSON structure + character(kind=CDK,len=*),intent(in) :: path !! the path to the variable + character(kind=CK,len=*),dimension(:),intent(in) :: value !! the vector to add + logical(LK),intent(out),optional :: found !! if the variable was found + logical(LK),intent(out),optional :: was_created !! if the variable had to be created + integer(IK),dimension(:),intent(in),optional :: ilen !! the string lengths of each + !! element in `value`. If not present, + !! the full `len(value)` string is added + !! for each element. + logical(LK),intent(in),optional :: trim_str !! if TRIM() should be called for each element + logical(LK),intent(in),optional :: adjustl_str !! if ADJUSTL() should be called for each element + + call json%json_add_string_vec_by_path(me,to_unicode(path),value,& + found,was_created,ilen,trim_str,adjustl_str) + + end subroutine json_add_string_vec_by_path_path_ascii +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 1/19/2014 +! +! Add a real value child to the [[json_value]] variable. +! +!@note This routine is part of the public API that can be +! used to build a JSON structure using [[json_value]] pointers. + + subroutine json_value_add_real(json,p,name,val) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + character(kind=CK,len=*),intent(in) :: name !! variable name + real(RK),intent(in) :: val !! real value + + type(json_value),pointer :: var + + !create the variable: + call json%create_real(var,val,name) + + !add it: + call json%add(p, var) + + end subroutine json_value_add_real +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_value_add_real]] where `name` is kind=CDK. + + subroutine wrap_json_value_add_real(json,p,name,val) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + character(kind=CDK,len=*),intent(in) :: name !! variable name + real(RK),intent(in) :: val !! real value + + call json%add(p, to_unicode(name), val) + + end subroutine wrap_json_value_add_real +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 1/20/2014 +! +! Add a real vector child to the [[json_value]] variable. +! +!@note This routine is part of the public API that can be +! used to build a JSON structure using [[json_value]] pointers. + + subroutine json_value_add_real_vec(json, p, name, val) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + character(kind=CK,len=*),intent(in) :: name + real(RK),dimension(:),intent(in) :: val + + type(json_value),pointer :: var + integer(IK) :: i !! counter + + !create the variable as an array: + call json%create_array(var,name) + + !populate the array: + do i=1,size(val) + call json%add(var, CK_'', val(i)) + end do + + !add it: + call json%add(p, var) + + end subroutine json_value_add_real_vec +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_value_add_real_vec]] where `name` is kind=CDK. + + subroutine wrap_json_value_add_real_vec(json, p, name, val) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + character(kind=CDK,len=*),intent(in) :: name + real(RK),dimension(:),intent(in) :: val + + call json%add(p, to_unicode(name), val) + + end subroutine wrap_json_value_add_real_vec +!***************************************************************************************** + +#ifndef REAL32 +!***************************************************************************************** +!> +! Alternate version of [[json_value_add_real]] where `val` is `real32`. + + subroutine json_value_add_real32(json,p,name,val) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + character(kind=CK,len=*),intent(in) :: name !! variable name + real(real32),intent(in) :: val !! real value + + call json%add(p,name,real(val,RK)) + + end subroutine json_value_add_real32 +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_value_add_real32]] where `name` is kind=CDK. + + subroutine wrap_json_value_add_real32(json,p,name,val) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + character(kind=CDK,len=*),intent(in) :: name !! variable name + real(real32),intent(in) :: val !! real value + + call json%add(p, to_unicode(name), val) + + end subroutine wrap_json_value_add_real32 +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_value_add_real_vec]] where `val` is `real32`. + + subroutine json_value_add_real32_vec(json, p, name, val) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + character(kind=CK,len=*),intent(in) :: name + real(real32),dimension(:),intent(in) :: val + + call json%add(p,name,real(val,RK)) + + end subroutine json_value_add_real32_vec +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_value_add_real32_vec]] where `name` is kind=CDK. + + subroutine wrap_json_value_add_real32_vec(json, p, name, val) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + character(kind=CDK,len=*),intent(in) :: name + real(real32),dimension(:),intent(in) :: val + + call json%add(p, to_unicode(name), val) + + end subroutine wrap_json_value_add_real32_vec +!***************************************************************************************** +#endif + +#ifdef REAL128 +!***************************************************************************************** +!> +! Alternate version of [[json_value_add_real]] where `val` is `real64`. + + subroutine json_value_add_real64(json,p,name,val) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + character(kind=CK,len=*),intent(in) :: name !! variable name + real(real64),intent(in) :: val !! real value + + call json%add(p,name,real(val,RK)) + + end subroutine json_value_add_real64 +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_value_add_real64]] where `name` is kind=CDK. + + subroutine wrap_json_value_add_real64(json,p,name,val) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + character(kind=CDK,len=*),intent(in) :: name !! variable name + real(real64),intent(in) :: val !! real value + + call json%add(p, to_unicode(name), val) + + end subroutine wrap_json_value_add_real64 +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_value_add_real_vec]] where `val` is `real64`. + + subroutine json_value_add_real64_vec(json, p, name, val) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + character(kind=CK,len=*),intent(in) :: name + real(real64),dimension(:),intent(in) :: val + + call json%add(p, name, real(val,RK)) + + end subroutine json_value_add_real64_vec +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_value_add_real64_vec]] where `name` is kind=CDK. + + subroutine wrap_json_value_add_real64_vec(json, p, name, val) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + character(kind=CDK,len=*),intent(in) :: name + real(real64),dimension(:),intent(in) :: val + + call json%add(p, to_unicode(name), val) + + end subroutine wrap_json_value_add_real64_vec +!***************************************************************************************** +#endif + +!***************************************************************************************** +!> +! Add a NULL value child to the [[json_value]] variable. +! +!@note This routine is part of the public API that can be +! used to build a JSON structure using [[json_value]] pointers. + + subroutine json_value_add_null(json, p, name) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + character(kind=CK,len=*),intent(in) :: name + + type(json_value),pointer :: var + + !create the variable: + call json%create_null(var,name) + + !add it: + call json%add(p, var) + + end subroutine json_value_add_null +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_value_add_null]] where `name` is kind=CDK. + + subroutine wrap_json_value_add_null(json, p, name) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + character(kind=CDK,len=*),intent(in) :: name !! name of the variable + + call json%add(p, to_unicode(name)) + + end subroutine wrap_json_value_add_null +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 1/20/2014 +! +! Add an integer value child to the [[json_value]] variable. +! +!@note This routine is part of the public API that can be +! used to build a JSON structure using [[json_value]] pointers. + + subroutine json_value_add_integer(json, p, name, val) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + character(kind=CK,len=*),intent(in) :: name + integer(IK),intent(in) :: val + + type(json_value),pointer :: var + + !create the variable: + call json%create_integer(var,val,name) + + !add it: + call json%add(p, var) + + end subroutine json_value_add_integer +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_value_add_integer]] where `name` is kind=CDK. + + subroutine wrap_json_value_add_integer(json, p, name, val) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + character(kind=CDK,len=*),intent(in) :: name !! name of the variable + integer(IK),intent(in) :: val !! value + + call json%add(p, to_unicode(name), val) + + end subroutine wrap_json_value_add_integer +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 1/20/2014 +! +! Add a integer vector child to the [[json_value]] variable. +! +!@note This routine is part of the public API that can be +! used to build a JSON structure using [[json_value]] pointers. + + subroutine json_value_add_integer_vec(json, p, name, val) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + character(kind=CK,len=*),intent(in) :: name !! name of the variable + integer(IK),dimension(:),intent(in) :: val !! value + + type(json_value),pointer :: var + integer(IK) :: i !! counter + + !create a variable as an array: + call json%create_array(var,name) + + !populate the array: + do i=1,size(val) + call json%add(var, CK_'', val(i)) + end do + + !add it: + call json%add(p, var) + + end subroutine json_value_add_integer_vec +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_value_add_integer_vec]] where `name` is kind=CDK. + + subroutine wrap_json_value_add_integer_vec(json, p, name, val) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + character(kind=CDK,len=*),intent(in) :: name !! name of the variable + integer(IK),dimension(:),intent(in) :: val !! value + + call json%add(p, to_unicode(name), val) + + end subroutine wrap_json_value_add_integer_vec +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 1/20/2014 +! +! Add a logical value child to the [[json_value]] variable. +! +!@note This routine is part of the public API that can be +! used to build a JSON structure using [[json_value]] pointers. + + subroutine json_value_add_logical(json, p, name, val) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + character(kind=CK,len=*),intent(in) :: name !! name of the variable + logical(LK),intent(in) :: val !! value + + type(json_value),pointer :: var + + !create the variable: + call json%create_logical(var,val,name) + + !add it: + call json%add(p, var) + + end subroutine json_value_add_logical +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_value_add_logical]] where `name` is kind=CDK. + + subroutine wrap_json_value_add_logical(json, p, name, val) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + character(kind=CDK,len=*),intent(in) :: name !! name of the variable + logical(LK),intent(in) :: val !! value + + call json%add(p, to_unicode(name), val) + + end subroutine wrap_json_value_add_logical +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 1/20/2014 +! +! Add a logical vector child to the [[json_value]] variable. +! +!@note This routine is part of the public API that can be +! used to build a JSON structure using [[json_value]] pointers. + + subroutine json_value_add_logical_vec(json, p, name, val) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + character(kind=CK,len=*),intent(in) :: name !! name of the vector + logical(LK),dimension(:),intent(in) :: val !! value + + type(json_value),pointer :: var + integer(IK) :: i !! counter + + !create the variable as an array: + call json%create_array(var,name) + + !populate the array: + do i=1,size(val) + call json%add(var, CK_'', val(i)) + end do + + !add it: + call json%add(p, var) + + end subroutine json_value_add_logical_vec +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_value_add_logical_vec]] where `name` is kind=CDK. + + subroutine wrap_json_value_add_logical_vec(json, p, name, val) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + character(kind=CDK,len=*),intent(in) :: name !! name of the variable + logical(LK),dimension(:),intent(in) :: val !! value + + call json%add(p, to_unicode(name), val) + + end subroutine wrap_json_value_add_logical_vec +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 1/19/2014 +! +! Add a character string child to the [[json_value]] variable. +! +!@note This routine is part of the public API that can be +! used to build a JSON structure using [[json_value]] pointers. + + subroutine json_value_add_string(json, p, name, val, trim_str, adjustl_str) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + character(kind=CK,len=*),intent(in) :: name !! name of the variable + character(kind=CK,len=*),intent(in) :: val !! value + logical(LK),intent(in),optional :: trim_str !! if TRIM() should be called for the `val` + logical(LK),intent(in),optional :: adjustl_str !! if ADJUSTL() should be called for the `val` + + type(json_value),pointer :: var + + !create the variable: + call json%create_string(var,val,name,trim_str,adjustl_str) + + !add it: + call json%add(p, var) + + end subroutine json_value_add_string +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_value_add_string]] where `name` and `val` are kind=CDK. + + subroutine wrap_json_value_add_string(json, p, name, val, trim_str, adjustl_str) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + character(kind=CDK,len=*),intent(in) :: name !! name of the variable + character(kind=CDK,len=*),intent(in) :: val !! value + logical(LK),intent(in),optional :: trim_str !! if TRIM() should be called for the `val` + logical(LK),intent(in),optional :: adjustl_str !! if ADJUSTL() should be called for the `val` + + call json%add(p, to_unicode(name), to_unicode(val), trim_str, adjustl_str) + + end subroutine wrap_json_value_add_string +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_value_add_string]] where `name` is kind=CDK. + + subroutine json_value_add_string_name_ascii(json, p, name, val, trim_str, adjustl_str) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + character(kind=CDK,len=*),intent(in) :: name !! name of the variable + character(kind=CK, len=*),intent(in) :: val !! value + logical(LK),intent(in),optional :: trim_str !! if TRIM() should be called for the `val` + logical(LK),intent(in),optional :: adjustl_str !! if ADJUSTL() should be called for the `val` + + call json%add(p, to_unicode(name), val, trim_str, adjustl_str) + + end subroutine json_value_add_string_name_ascii +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_value_add_string]] where `val` is kind=CDK. + + subroutine json_value_add_string_val_ascii(json, p, name, val, trim_str, adjustl_str) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + character(kind=CK, len=*),intent(in) :: name !! name of the variable + character(kind=CDK,len=*),intent(in) :: val !! value + logical(LK),intent(in),optional :: trim_str !! if TRIM() should be called for the `val` + logical(LK),intent(in),optional :: adjustl_str !! if ADJUSTL() should be called for the `val` + + call json%add(p, name, to_unicode(val), trim_str, adjustl_str) + + end subroutine json_value_add_string_val_ascii +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 1/19/2014 +! +! Add a character string vector child to the [[json_value]] variable. +! +!@note This routine is part of the public API that can be +! used to build a JSON structure using [[json_value]] pointers. + + subroutine json_value_add_string_vec(json, p, name, val, trim_str, adjustl_str) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + character(kind=CK,len=*),intent(in) :: name !! variable name + character(kind=CK,len=*),dimension(:),intent(in) :: val !! array of strings + logical(LK),intent(in),optional :: trim_str !! if TRIM() should be called for each element + logical(LK),intent(in),optional :: adjustl_str !! if ADJUSTL() should be called for each element + + type(json_value),pointer :: var + integer(IK) :: i !! counter + + !create the variable as an array: + call json%create_array(var,name) + + !populate the array: + do i=1,size(val) + call json%add(var, CK_'', val(i), trim_str, adjustl_str) + end do + + !add it: + call json%add(p, var) + + end subroutine json_value_add_string_vec +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_value_add_string_vec]] where `name` and `val` are kind=CDK. + + subroutine wrap_json_value_add_string_vec(json, p, name, val, trim_str, adjustl_str) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + character(kind=CDK,len=*),intent(in) :: name + character(kind=CDK,len=*),dimension(:),intent(in) :: val + logical(LK),intent(in),optional :: trim_str + logical(LK),intent(in),optional :: adjustl_str + + call json%add(p, to_unicode(name), to_unicode(val), trim_str, adjustl_str) + + end subroutine wrap_json_value_add_string_vec +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_value_add_string_vec]] where `name` is kind=CDK. + + subroutine json_value_add_string_vec_name_ascii(json, p, name, val, trim_str, adjustl_str) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + character(kind=CDK,len=*),intent(in) :: name + character(kind=CK, len=*),dimension(:),intent(in) :: val + logical(LK),intent(in),optional :: trim_str + logical(LK),intent(in),optional :: adjustl_str + + call json%add(p, to_unicode(name), val, trim_str, adjustl_str) + + end subroutine json_value_add_string_vec_name_ascii +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_value_add_string_vec]] where `val` is kind=CDK. + + subroutine json_value_add_string_vec_val_ascii(json, p, name, val, trim_str, adjustl_str) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + character(kind=CK, len=*),intent(in) :: name + character(kind=CDK,len=*),dimension(:),intent(in) :: val + logical(LK),intent(in),optional :: trim_str + logical(LK),intent(in),optional :: adjustl_str + + call json%add(p, name, to_unicode(val), trim_str, adjustl_str) + + end subroutine json_value_add_string_vec_val_ascii +!***************************************************************************************** + +!***************************************************************************************** +!> +! Count the number of children in the object or array. +! +!### History +! * JW : 1/4/2014 : Original routine removed. +! Now using `n_children` variable. +! Renamed from `json_value_count`. + + function json_count(json,p) result(count) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: p !! this should normally be a `json_object` + !! or a `json_array`. For any other + !! variable type this will return 0. + integer(IK) :: count !! number of children in `p`. + + if (associated(p)) then + count = p%n_children + else + call json%throw_exception('Error in json_count: '//& + 'pointer is not associated.') + end if + + end function json_count +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 10/16/2015 +! +! Returns a pointer to the parent of a [[json_value]]. +! If there is no parent, then a `null()` pointer is returned. + + subroutine json_get_parent(json,p,parent) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: p !! JSON object + type(json_value),pointer,intent(out) :: parent !! pointer to `parent` + + if (associated(p)) then + parent => p%parent + else + nullify(parent) + call json%throw_exception('Error in json_get_parent: '//& + 'pointer is not associated.') + end if + + end subroutine json_get_parent +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 10/31/2015 +! +! Returns a pointer to the next of a [[json_value]]. +! If there is no next, then a `null()` pointer is returned. + + subroutine json_get_next(json,p,next) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: p !! JSON object + type(json_value),pointer,intent(out) :: next !! pointer to `next` + + if (associated(p)) then + next => p%next + else + nullify(next) + call json%throw_exception('Error in json_get_next: '//& + 'pointer is not associated.') + end if + + end subroutine json_get_next +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 10/31/2015 +! +! Returns a pointer to the previous of a [[json_value]]. +! If there is no previous, then a `null()` pointer is returned. + + subroutine json_get_previous(json,p,previous) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: p !! JSON object + type(json_value),pointer,intent(out) :: previous !! pointer to `previous` + + if (associated(p)) then + previous => p%previous + else + nullify(previous) + call json%throw_exception('Error in json_get_previous: '//& + 'pointer is not associated.') + end if + + end subroutine json_get_previous +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 10/31/2015 +! +! Returns a pointer to the tail of a [[json_value]] +! (the last child of an array of object). +! If there is no tail, then a `null()` pointer is returned. + + subroutine json_get_tail(json,p,tail) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: p !! JSON object + type(json_value),pointer,intent(out) :: tail !! pointer to `tail` + + if (associated(p)) then + tail => p%tail + else + nullify(tail) + call json%throw_exception('Error in json_get_tail: '//& + 'pointer is not associated.') + end if + + end subroutine json_get_tail +!***************************************************************************************** + +!***************************************************************************************** +!> +! Returns a child in the object or array given the index. + + subroutine json_value_get_child_by_index(json, p, idx, child, found) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: p !! object or array JSON data + integer(IK),intent(in) :: idx !! index of the child + !! (this is a 1-based Fortran + !! style array index). + type(json_value),pointer :: child !! pointer to the child + logical(LK),intent(out),optional :: found !! true if the value was found + !! (if not present, an exception + !! will be thrown if it was not + !! found. If present and not + !! found, no exception will be + !! thrown). + + integer(IK) :: i !! counter + + nullify(child) + + if (.not. json%exception_thrown) then + + if (associated(p%children)) then + + ! If getting first or last child, we can do this quickly. + ! Otherwise, traverse the list. + if (idx==1) then + + child => p%children ! first one + + elseif (idx==p%n_children) then + + if (associated(p%tail)) then + child => p%tail ! last one + else + call json%throw_exception('Error in json_value_get_child_by_index:'//& + ' child%tail is not associated.',found) + end if + + elseif (idx<1 .or. idx>p%n_children) then + + call json%throw_exception('Error in json_value_get_child_by_index:'//& + ' idx is out of range.',found) + + else + + ! if idx is closer to the end, we traverse the list backward from tail, + ! otherwise we traverse it forward from children: + + if (p%n_children-idx < idx) then ! traverse backward + + child => p%tail + + do i = 1, p%n_children - idx + + if (associated(child%previous)) then + child => child%previous + else + call json%throw_exception('Error in json_value_get_child_by_index:'//& + ' child%previous is not associated.',found) + nullify(child) + exit + end if + + end do + + else ! traverse forward + + child => p%children + + do i = 1, idx - 1 + + if (associated(child%next)) then + child => child%next + else + call json%throw_exception('Error in json_value_get_child_by_index:'//& + ' child%next is not associated.',found) + nullify(child) + exit + end if + + end do + + end if + + end if + + else + + call json%throw_exception('Error in json_value_get_child_by_index:'//& + ' p%children is not associated.',found) + + end if + + ! found output: + if (json%exception_thrown) then + if (present(found)) then + call json%clear_exceptions() + found = .false. + end if + else + if (present(found)) found = .true. + end if + + else + if (present(found)) found = .false. + end if + + end subroutine json_value_get_child_by_index +!***************************************************************************************** + +!***************************************************************************************** +!> +! Returns pointer to the first child of the object +! (or `null()` if it is not associated). + + subroutine json_value_get_child(json, p, child) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: p !! object or array JSON data + type(json_value),pointer :: child !! pointer to the child + + if (associated(p)) then + child => p%children + else + nullify(child) + call json%throw_exception('Error in json_value_get_child: '//& + 'pointer is not associated.') + end if + + end subroutine json_value_get_child +!***************************************************************************************** + +!***************************************************************************************** +!> +! Returns a child in the object or array given the name string. +! +! The name search can be case-sensitive or not, and can have significant trailing +! whitespace or not, depending on the settings in the [[json_core(type)]] class. +! +!@note The `name` input is not a path, and is not parsed like it is in [[json_get_by_path]]. + + subroutine json_value_get_child_by_name(json, p, name, child, found) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: p + character(kind=CK,len=*),intent(in) :: name !! the name of a child of `p` + type(json_value),pointer :: child !! pointer to the child + logical(LK),intent(out),optional :: found !! true if the value was found + !! (if not present, an exception + !! will be thrown if it was not + !! found. If present and not + !! found, no exception will be + !! thrown). + + integer(IK) :: i,n_children + logical :: error + + nullify(child) + + if (.not. json%exception_thrown) then + + if (associated(p)) then + + error = .true. ! will be false if it is found + if (p%var_type==json_object) then + n_children = json%count(p) + child => p%children !start with first one + do i=1, n_children + if (.not. associated(child)) then + call json%throw_exception(& + 'Error in json_value_get_child_by_name: '//& + 'Malformed JSON linked list',found) + exit + end if + if (allocated(child%name)) then + !name string matching routine: + if (json%name_equal(child,name)) then + error = .false. + exit + end if + end if + child => child%next + end do + end if + + if (error) then + !did not find anything: + call json%throw_exception(& + 'Error in json_value_get_child_by_name: '//& + 'child variable '//trim(name)//' was not found.',found) + nullify(child) + end if + + else + call json%throw_exception(& + 'Error in json_value_get_child_by_name: '//& + 'pointer is not associated.',found) + end if + + ! found output: + if (json%exception_thrown) then + if (present(found)) then + call json%clear_exceptions() + found = .false. + end if + else + if (present(found)) found = .true. + end if + + else + if (present(found)) found = .false. + end if + + end subroutine json_value_get_child_by_name +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 8/25/2017 +! +! Checks a JSON object for duplicate child names. +! +! It uses the specified settings for name matching (see [[name_strings_equal]]). +! +!@note This will only check for one duplicate, +! it will return the first one that it finds. + + subroutine json_check_children_for_duplicate_keys(json,p,has_duplicate,name,path) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: p !! the object to search. If `p` is + !! not a `json_object`, then `has_duplicate` + !! will be false. + logical(LK),intent(out) :: has_duplicate !! true if there is at least + !! two children have duplicate + !! `name` values. + character(kind=CK,len=:),allocatable,intent(out),optional :: name !! the duplicate name + !! (unallocated if no + !! duplicate was found) + character(kind=CK,len=:),allocatable,intent(out),optional :: path !! the full path to the + !! duplicate name + !! (unallocated if no + !! duplicate was found) + + integer(IK) :: i !! counter + integer(IK) :: j !! counter + type(json_value),pointer :: child !! pointer to a child of `p` + integer(IK) :: n_children !! number of children of `p` + logical(LK) :: found !! flag for `get_child` + + type :: alloc_str + !! so we can have an array of allocatable strings + character(kind=CK,len=:),allocatable :: str !! name string + end type alloc_str + type(alloc_str),dimension(:),allocatable :: names !! array of all the + !! child name strings + + ! initialize: + has_duplicate =.false. + + if (.not. json%exception_thrown) then + + if (associated(p)) then + + if (p%var_type==json_object) then + + ! number of items to check: + n_children = json%count(p) + allocate(names(n_children)) + + ! first get a list of all the name keys: + do i=1, n_children + call json%get_child(p,i,child,found) ! get by index + if (.not. found) then + call json%throw_exception(& + 'Error in json_check_children_for_duplicate_keys: '//& + 'Malformed JSON linked list') + exit + end if + if (allocated(child%name)) then + names(i)%str = child%name + else + call json%throw_exception(& + 'Error in json_check_children_for_duplicate_keys: '//& + 'Object child name is not allocated') + exit + end if + end do + + if (.not. json%exception_thrown) then + ! now check the list for duplicates: + main: do i=1,n_children + do j=1,i-1 + if (json%name_strings_equal(names(i)%str,names(j)%str)) then + has_duplicate = .true. + if (present(name)) then + name = names(i)%str + end if + if (present(path)) then + call json%get_child(p,names(i)%str,child,found) ! get by name + if (found) then + call json%get_path(child,path,found) + if (.not. found) then + ! should never happen since we know it is there + call json%throw_exception(& + 'Error in json_check_children_for_duplicate_keys: '//& + 'Could not get path') + end if + else + ! should never happen since we know it is there + call json%throw_exception(& + 'Error in json_check_children_for_duplicate_keys: '//& + 'Could not get child: '//trim(names(i)%str)) + end if + end if + exit main + end if + end do + end do main + end if + + ! cleanup + do i=1,n_children + if (allocated(names(i)%str)) deallocate(names(i)%str) + end do + if (allocated(names)) deallocate(names) + + end if + + end if + + end if + + end subroutine json_check_children_for_duplicate_keys +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 8/25/2017 +! +! Checks a JSON structure for duplicate child names. +! This one recursively traverses the entire structure +! (calling [[json_check_children_for_duplicate_keys]] +! recursively for each element). +! +!@note This will only check for one duplicate, +! it will return the first one that it finds. + + subroutine json_check_all_for_duplicate_keys(json,p,has_duplicate,name,path) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: p !! the object to search. If `p` is + !! not a `json_object`, then `has_duplicate` + !! will be false. + logical(LK),intent(out) :: has_duplicate !! true if there is at least + !! one duplicate `name` key anywhere + !! in the structure. + character(kind=CK,len=:),allocatable,intent(out),optional :: name !! the duplicate name + !! (unallocated if no + !! duplicates were found) + character(kind=CK,len=:),allocatable,intent(out),optional :: path !! the full path to the + !! duplicate name + !! (unallocated if no + !! duplicate was found) + + has_duplicate = .false. + if (.not. json%exception_thrown) then + call json%traverse(p,duplicate_key_func) + end if + + contains + + subroutine duplicate_key_func(json,p,finished) + + !! Callback function to check each element + !! for duplicate child names. + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: p + logical(LK),intent(out) :: finished + +#if defined __GFORTRAN__ + + ! this is a workaround for a gfortran bug (6 and 7), + + character(kind=CK,len=:),allocatable :: tmp_name !! temp variable for `name` string + character(kind=CK,len=:),allocatable :: tmp_path !! temp variable for `path` string + + if (present(name) .and. present(path)) then + call json%check_children_for_duplicate_keys(p,has_duplicate,name=tmp_name,path=tmp_path) + else if (present(name) .and. .not. present(path)) then + call json%check_children_for_duplicate_keys(p,has_duplicate,name=tmp_name) + else if (.not. present(name) .and. present(path)) then + call json%check_children_for_duplicate_keys(p,has_duplicate,path=tmp_path) + else + call json%check_children_for_duplicate_keys(p,has_duplicate) + end if + + if (has_duplicate) then + if (present(name)) name = tmp_name + if (present(path)) path = tmp_path + end if + +#else + call json%check_children_for_duplicate_keys(p,has_duplicate,name,path) +#endif + + finished = has_duplicate .or. json%exception_thrown + + end subroutine duplicate_key_func + + end subroutine json_check_all_for_duplicate_keys +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_value_get_child_by_name]] where `name` is kind=CDK. + + subroutine wrap_json_value_get_child_by_name(json, p, name, child, found) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: p + character(kind=CDK,len=*),intent(in) :: name + type(json_value),pointer :: child + logical(LK),intent(out),optional :: found + + call json%get(p,to_unicode(name),child,found) + + end subroutine wrap_json_value_get_child_by_name +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 2/12/2014 +! +! Print the [[json_value]] structure to an allocatable string. + + subroutine json_value_to_string(json,p,str) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: p + character(kind=CK,len=:),intent(out),allocatable :: str !! prints structure to this string + + integer(IK) :: iloc !! used to keep track of size of str + !! since it is being allocated in chunks. + + str = repeat(space, print_str_chunk_size) + iloc = 0_IK + call json%json_value_print(p, iunit=unit2str, str=str, iloc=iloc, indent=1_IK, colon=.true.) + + ! trim the string if necessary: + if (len(str)>iloc) str = str(1:iloc) + + end subroutine json_value_to_string +!***************************************************************************************** + +!***************************************************************************************** +!> +! Print the [[json_value]] structure to the console (`output_unit`). +! +!### Note +! * Just a wrapper for [[json_print_to_unit]]. + + subroutine json_print_to_console(json,p) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: p + + call json%print(p,int(output_unit,IK)) + + end subroutine json_print_to_console +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 6/20/2014 +! +! Print the [[json_value]] structure to a file. + + subroutine json_print_to_unit(json,p,iunit) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: p + integer(IK),intent(in) :: iunit !! the file unit (the file must + !! already have been opened, can't be -1). + + character(kind=CK,len=:),allocatable :: dummy !! dummy for `str` argument + !! to [[json_value_print]] + integer(IK) :: idummy !! dummy for `iloc` argument + !! to [[json_value_print]] + + if (iunit/=unit2str) then + idummy = 0_IK + call json%json_value_print(p,iunit,str=dummy,iloc=idummy,indent=1_IK,colon=.true.) + else + call json%throw_exception('Error in json_print_to_unit: iunit must not be -1.') + end if + + end subroutine json_print_to_unit +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 12/23/2014 +! +! Print the [[json_value]] structure to a file. + + subroutine json_print_to_filename(json,p,filename) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: p + character(kind=CDK,len=*),intent(in) :: filename !! the filename to print to + !! (should not already be open) + + integer(IK) :: iunit !! file unit for `open` statement + integer(IK) :: istat !! `iostat` code for `open` statement + + open(newunit=iunit,file=filename,status='REPLACE',iostat=istat FILE_ENCODING ) + if (istat==0) then + call json%print(p,iunit) + close(iunit,iostat=istat) + else + call json%throw_exception('Error in json_print_to_filename: could not open file: '//& + trim(filename)) + end if + + end subroutine json_print_to_filename +!***************************************************************************************** + +!***************************************************************************************** +!> +! Print the JSON structure to a string or a file. +! +!### Notes +! * This is an internal routine called by the various wrapper routines. +! * The reason the `str` argument is non-optional is because of a +! bug in v4.9 of the gfortran compiler. + + recursive subroutine json_value_print(json,p,iunit,str,indent,& + need_comma,colon,is_array_element,& + is_compressed_vector,iloc) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: p + integer(IK),intent(in) :: iunit !! file unit to write to (the + !! file is assumed to be open) + integer(IK),intent(in),optional :: indent !! indention level + logical(LK),intent(in),optional :: is_array_element !! if this is an array element + logical(LK),intent(in),optional :: need_comma !! if it needs a comma after it + logical(LK),intent(in),optional :: colon !! if the colon was just written + character(kind=CK,len=:),intent(inout),allocatable :: str + !! if `iunit==unit2str` (-1) then + !! the structure is printed to this + !! string rather than a file. This mode + !! is used by [[json_value_to_string]]. + integer(IK),intent(inout) :: iloc !! current index in `str`. should be set to 0 initially. + !! [only used when `str` is used.] + logical(LK),intent(in),optional :: is_compressed_vector !! if True, this is an element + !! from an array being printed + !! on one line [default is False] + + character(kind=CK,len=max_numeric_str_len) :: tmp !! for value to string conversions + character(kind=CK,len=:),allocatable :: s_indent !! the string of spaces for + !! indenting (see `tab` and `spaces`) + character(kind=CK,len=:),allocatable :: s !! the string appended to `str` + type(json_value),pointer :: element !! for getting children + integer(IK) :: tab !! number of `tabs` for indenting + integer(IK) :: spaces !! number of spaces for indenting + integer(IK) :: i !! counter + integer(IK) :: count !! number of children + logical(LK) :: print_comma !! if the comma will be printed after the value + logical(LK) :: write_file !! if we are writing to a file + logical(LK) :: write_string !! if we are writing to a string + logical(LK) :: is_array !! if this is an element in an array + logical(LK) :: is_vector !! if all elements of a vector + !! are scalars of the same type + character(kind=CK,len=:),allocatable :: str_escaped !! escaped version of + !! `name` or `str_value` + + if (.not. json%exception_thrown) then + + if (.not. associated(p)) then + ! note: a null() pointer will trigger this error. + ! However, if the pointer is undefined, then this will + ! crash (if this wasn't here it would crash below when + ! we try to access the contents) + call json%throw_exception('Error in json_value_print: '//& + 'the pointer is not associated') + return + end if + + if (present(is_compressed_vector)) then + is_vector = is_compressed_vector + else + is_vector = .false. + end if + + !whether to write a string or a file (one or the other): + write_string = (iunit==unit2str) + write_file = .not. write_string + + !if the comma will be printed after the value + ! [comma not printed for the last elements] + if (present(need_comma)) then + print_comma = need_comma + else + print_comma = .false. + end if + + !number of ""tabs"" to indent: + if (present(indent) .and. .not. json%no_whitespace) then + tab = indent + else + tab = 0 + end if + !convert to number of spaces: + spaces = tab*json%spaces_per_tab + + !if this is an element in an array: + if (present(is_array_element)) then + is_array = is_array_element + else + is_array = .false. + end if + + !if the colon was the last thing written + if (present(colon)) then + s_indent = CK_'' + else + s_indent = repeat(space, spaces) + end if + + select case (p%var_type) + + case (json_object) + + count = json%count(p) + + if (count==0) then !special case for empty object + + s = s_indent//start_object//end_object + call write_it( comma=print_comma ) + + else + + s = s_indent//start_object + call write_it() + + !if an object is in an array, there is an extra tab: + if (is_array) then + if ( .not. json%no_whitespace) tab = tab+1 + spaces = tab*json%spaces_per_tab + end if + + nullify(element) + element => p%children + do i = 1, count + + if (.not. associated(element)) then + call json%throw_exception('Error in json_value_print: '//& + 'Malformed JSON linked list') + return + end if + + ! print the name + if (allocated(element%name)) then + call escape_string(element%name,str_escaped,json%escape_solidus) + if (json%no_whitespace) then + !compact printing - no extra space + s = repeat(space, spaces)//quotation_mark//& + str_escaped//quotation_mark//colon_char + call write_it(advance=.false.) + else + s = repeat(space, spaces)//quotation_mark//& + str_escaped//quotation_mark//colon_char//space + call write_it(advance=.false.) + end if + else + call json%throw_exception('Error in json_value_print:'//& + ' element%name not allocated') + nullify(element) + return + end if + + ! recursive print of the element + call json%json_value_print(element, iunit=iunit, indent=tab + 1_IK, & + need_comma=i element%next + + end do + + ! [one fewer tab if it isn't an array element] + if (.not. is_array) then + s = repeat(space, max(0_IK,spaces-json%spaces_per_tab))//end_object + else + s = s_indent//end_object + end if + call write_it( comma=print_comma ) + nullify(element) + + end if + + case (json_array) + + count = json%count(p) + + if (count==0) then ! special case for empty array + + s = s_indent//start_array//end_array + call write_it( comma=print_comma ) + + else + + ! if every child is the same type & a scalar: + is_vector = json%is_vector(p) + if (json%failed()) return + + s = s_indent//start_array + call write_it( advance=(.not. is_vector) ) + + !if an array is in an array, there is an extra tab: + if (is_array) then + if ( .not. json%no_whitespace) tab = tab+1 + spaces = tab*json%spaces_per_tab + end if + + nullify(element) + element => p%children + do i = 1, count + + if (.not. associated(element)) then + call json%throw_exception('Error in json_value_print: '//& + 'Malformed JSON linked list') + return + end if + + ! recursive print of the element + if (is_vector) then + call json%json_value_print(element, iunit=iunit, indent=0_IK,& + need_comma=i element%next + + end do + + !indent the closing array character: + if (is_vector) then + s = end_array + call write_it( comma=print_comma ) + else + s = repeat(space, max(0_IK,spaces-json%spaces_per_tab))//end_array + call write_it( comma=print_comma ) + end if + nullify(element) + + end if + + case (json_null) + + s = s_indent//null_str + call write_it( comma=print_comma, & + advance=(.not. is_vector),& + space_after_comma=is_vector ) + + case (json_string) + + if (allocated(p%str_value)) then + ! have to escape the string for printing: + call escape_string(p%str_value,str_escaped,json%escape_solidus) + s = s_indent//quotation_mark//str_escaped//quotation_mark + call write_it( comma=print_comma, & + advance=(.not. is_vector),& + space_after_comma=is_vector ) + else + call json%throw_exception('Error in json_value_print:'//& + ' p%value_string not allocated') + return + end if + + case (json_logical) + + if (p%log_value) then + s = s_indent//true_str + call write_it( comma=print_comma, & + advance=(.not. is_vector),& + space_after_comma=is_vector ) + else + s = s_indent//false_str + call write_it( comma=print_comma, & + advance=(.not. is_vector),& + space_after_comma=is_vector ) + end if + + case (json_integer) + + call integer_to_string(p%int_value,int_fmt,tmp) + + s = s_indent//trim(tmp) + call write_it( comma=print_comma, & + advance=(.not. is_vector),& + space_after_comma=is_vector ) + + case (json_real) + + if (allocated(json%real_fmt)) then + call real_to_string(p%dbl_value,json%real_fmt,json%compact_real,json%non_normals_to_null,tmp) + else + !use the default format (user has not called initialize() or specified one): + call real_to_string(p%dbl_value,default_real_fmt,json%compact_real,json%non_normals_to_null,tmp) + end if + + s = s_indent//trim(tmp) + call write_it( comma=print_comma, & + advance=(.not. is_vector),& + space_after_comma=is_vector ) + + case default + + call integer_to_string(p%var_type,int_fmt,tmp) + call json%throw_exception('Error in json_value_print: '//& + 'unknown data type: '//trim(tmp)) + + end select + + end if + + contains + + subroutine write_it(advance,comma,space_after_comma) + + !! write the string `s` to the file (or the output string) + + implicit none + + logical(LK),intent(in),optional :: advance !! to add line break or not + logical(LK),intent(in),optional :: comma !! print comma after the string + logical(LK),intent(in),optional :: space_after_comma !! print a space after the comma + + logical(LK) :: add_comma !! if a delimiter is to be added after string + logical(LK) :: add_line_break !! if a line break is to be added after string + logical(LK) :: add_space !! if a space is to be added after the comma + integer(IK) :: n !! length of actual string `s` appended to `str` + integer(IK) :: room_left !! number of characters left in `str` + integer(IK) :: n_chunks_to_add !! number of chunks to add to `str` for appending `s` + + if (present(comma)) then + add_comma = comma + else + add_comma = .false. !default is not to add comma + end if + if (json%no_whitespace) then + add_space = .false. + else + if (present(space_after_comma)) then + add_space = space_after_comma + else + add_space = .false. !default is not to add space + end if + end if + if (present(advance)) then + if (json%no_whitespace) then + ! overrides input value: + add_line_break = .false. + else + add_line_break = advance + end if + else + add_line_break = .not. json%no_whitespace ! default is to advance if + ! we are printing whitespace + end if + + ! string to print: + if (add_comma) then + if (add_space) then + s = s // delimiter // space + else + s = s // delimiter + end if + end if + + if (write_file) then + + if (add_line_break) then + write(iunit,fmt='(A)') s + else + write(iunit,fmt='(A)',advance='NO') s + end if + + else !write string + + if (add_line_break) s = s // newline + + n = len(s) + room_left = len(str)-iloc + if (room_left < n) then + ! need to add another chunk to fit this string: + n_chunks_to_add = max(1_IK, ceiling( real(len(s)-room_left,RK) / real(chunk_size,RK), IK ) ) + str = str // repeat(space, print_str_chunk_size*n_chunks_to_add) + end if + ! append s to str: + str(iloc+1:iloc+n) = s + iloc = iloc + n + + end if + + end subroutine write_it + + end subroutine json_value_print +!***************************************************************************************** + +!***************************************************************************************** +!> +! Returns true if all the children are the same type (and a scalar). +! Note that integers and reals are considered the same type for this purpose. +! This routine is used for the `compress_vectors` option. + + function json_is_vector(json, p) result(is_vector) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + logical(LK) :: is_vector !! if all elements of a vector + !! are scalars of the same type + + integer(IK) :: var_type_prev !! for getting the variable type of children + integer(IK) :: var_type !! for getting the variable type of children + type(json_value),pointer :: element !! for getting children + integer(IK) :: i !! counter + integer(IK) :: count !! number of children + + integer(IK),parameter :: json_invalid = -1_IK !! to initialize the flag. an invalid value + integer(IK),parameter :: json_numeric = -2_IK !! indicates `json_integer` or `json_real` + + if (json%compress_vectors) then + ! check to see if every child is the same type, + ! and a scalar: + is_vector = .true. + var_type_prev = json_invalid + count = json%count(p) + element => p%children + do i = 1_IK, count + if (.not. associated(element)) then + call json%throw_exception('Error in json_is_vector: '//& + 'Malformed JSON linked list') + return + end if + ! check variable type of all the children. + ! They must all be the same, and a scalar. + call json%info(element,var_type=var_type) + ! special check for numeric values: + if (var_type==json_integer .or. var_type==json_real) var_type = json_numeric + if (var_type==json_object .or. & + var_type==json_array .or. & + (i>1_IK .and. var_type/=var_type_prev)) then + is_vector = .false. + exit + end if + var_type_prev = var_type + ! get the next child the list: + element => element%next + end do + else + is_vector = .false. + end if + + end function json_is_vector +!***************************************************************************************** + +!***************************************************************************************** +!> +! Returns true if the `path` is present in the `p` JSON structure. +! +!@note Just a wrapper for [[json_get_by_path]], so it uses the +! specified `path_mode` and other settings. + + function json_valid_path(json, p, path) result(found) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: p !! a JSON linked list + character(kind=CK,len=*),intent(in) :: path !! path to the variable + logical(LK) :: found !! true if it was found + + type(json_value),pointer :: tmp !! pointer to the variable specified by `path` + + call json%get(p, path, tmp, found) + + end function json_valid_path +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_valid_path]] where ""path"" is kind=CDK. + + function wrap_json_valid_path(json, p, path) result(found) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: p !! a JSON linked list + character(kind=CDK,len=*),intent(in) :: path !! path to the variable + logical(LK) :: found !! true if it was found + + found = json%valid_path(p, to_unicode(path)) + + end function wrap_json_valid_path +!***************************************************************************************** + +!***************************************************************************************** +!> +! Returns the [[json_value]] pointer given the path string. +! +! It uses one of three methods: +! +! * The original JSON-Fortran defaults +! * [RFC 6901](https://tools.ietf.org/html/rfc6901) +! * [JSONPath](http://goessner.net/articles/JsonPath/) ""bracket-notation"" + + subroutine json_get_by_path(json, me, path, p, found) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: me !! a JSON linked list + character(kind=CK,len=*),intent(in) :: path !! path to the variable + type(json_value),pointer,intent(out) :: p !! pointer to the variable + !! specified by `path` + logical(LK),intent(out),optional :: found !! true if it was found + + character(kind=CK,len=max_integer_str_len),allocatable :: path_mode_str !! string version + !! of `json%path_mode` + + nullify(p) + + if (.not. json%exception_thrown) then + + select case (json%path_mode) + case(1_IK) + call json%json_get_by_path_default(me, path, p, found) + case(2_IK) + call json%json_get_by_path_rfc6901(me, path, p, found) + case(3_IK) + call json%json_get_by_path_jsonpath_bracket(me, path, p, found) + case default + call integer_to_string(json%path_mode,int_fmt,path_mode_str) + call json%throw_exception('Error in json_get_by_path: Unsupported path_mode: '//& + trim(path_mode_str)) + if (present(found)) found = .false. + end select + + if (present(found)) then + if (.not. found) call json%clear_exceptions() + end if + + else + if (present(found)) found = .false. + end if + + end subroutine json_get_by_path +!***************************************************************************************** + +!***************************************************************************************** +!> +! Returns the [[json_value]] pointer given the path string, +! If necessary, by creating the variables as needed. +! +! By default, the leaf node and any empty array elements +! are created as `json_null` values. +! +! It only works for `path_mode=1` or `path_mode=3`. +! An error will be thrown for `path_mode=2` (RFC 6901). +! +!### See also +! * [[json_get_by_path]] + + subroutine json_create_by_path(json,me,path,p,found,was_created) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: me !! a JSON linked list + character(kind=CK,len=*),intent(in) :: path !! path to the variable + type(json_value),pointer,intent(out),optional :: p !! pointer to the variable + !! specify by `path` + logical(LK),intent(out),optional :: found !! true if there were no errors + !! (variable found or created) + logical(LK),intent(out),optional :: was_created !! true if it was actually created + !! (as opposed to already being there) + + type(json_value),pointer :: tmp + character(kind=CK,len=max_integer_str_len) :: path_mode_str !! string version + !! of `json%path_mode` + + if (present(p)) nullify(p) + + if (.not. json%exception_thrown) then + + select case (json%path_mode) + case(1_IK) + call json%json_get_by_path_default(me,path,tmp,found,& + create_it=.true.,& + was_created=was_created) + if (present(p)) p => tmp + case(3_IK) + call json%json_get_by_path_jsonpath_bracket(me,path,tmp,found,& + create_it=.true.,& + was_created=was_created) + if (present(p)) p => tmp + + case default + + if (json%path_mode==2_IK) then + ! the problem here is there isn't really a way to disambiguate + ! the array elements, so '/a/0' could be 'a(1)' or 'a.0'. + call json%throw_exception('Error in json_create_by_path: '//& + 'Create by path not supported in RFC 6901 path mode.') + else + call integer_to_string(json%path_mode,int_fmt,path_mode_str) + call json%throw_exception('Error in json_create_by_path: Unsupported path_mode: '//& + trim(path_mode_str)) + end if + if (present(found)) then + call json%clear_exceptions() + found = .false. + end if + if (present(was_created)) was_created = .false. + end select + + else + if (present(was_created)) was_created = .false. + if (present(found)) found = .false. + end if + + end subroutine json_create_by_path +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_create_by_path]] where ""path"" is kind=CDK. + + subroutine wrap_json_create_by_path(json,me,path,p,found,was_created) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: me !! a JSON linked list + character(kind=CDK,len=*),intent(in) :: path !! path to the variable + type(json_value),pointer,intent(out),optional :: p !! pointer to the variable + !! specify by `path` + logical(LK),intent(out),optional :: found !! true if there were no errors + !! (variable found or created) + logical(LK),intent(out),optional :: was_created !! true if it was actually created + !! (as opposed to already being there) + + call json%create(me,to_unicode(path),p,found,was_created) + + end subroutine wrap_json_create_by_path +!***************************************************************************************** + +!***************************************************************************************** +!> +! Rename a [[json_value]], given the path. +! +!@note this is a wrapper for [[json_value_rename]]. + + subroutine json_rename_by_path(json, me, path, name, found) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: me + character(kind=CK,len=*),intent(in) :: path !! path to the variable to rename + character(kind=CK,len=*),intent(in) :: name !! the new name + logical(LK),intent(out),optional :: found !! if there were no errors + + type(json_value),pointer :: p + + if ( json%exception_thrown ) then + if ( present(found) ) found = .false. + return + end if + + nullify(p) + call json%get(me=me, path=path, p=p) + + if (.not. associated(p)) then + call json%throw_exception('Error in json_rename_by_path:'//& + ' Unable to resolve path: '//trim(path),found) + else + call json%rename(p,name) + nullify(p) + end if + + if (json%exception_thrown) then + if (present(found)) then + found = .false. + call json%clear_exceptions() + end if + else + if (present(found)) found = .true. + end if + + end subroutine json_rename_by_path +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_rename_by_path]], where ""path"" and ""name"" are kind=CDK + + subroutine wrap_json_rename_by_path(json, me, path, name, found) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: me + character(kind=CDK,len=*),intent(in) :: path + character(kind=CDK,len=*),intent(in) :: name + logical(LK),intent(out),optional :: found + + call json%rename(me,to_unicode(path),to_unicode(name),found) + + end subroutine wrap_json_rename_by_path +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_rename_by_path]], where ""name"" is kind=CDK + + subroutine json_rename_by_path_name_ascii(json, me, path, name, found) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: me + character(kind=CK,len=*),intent(in) :: path + character(kind=CDK,len=*),intent(in) :: name + logical(LK),intent(out),optional :: found + + call json%rename(me,path,to_unicode(name),found) + + end subroutine json_rename_by_path_name_ascii +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_rename_by_path]], where ""path"" is kind=CDK + + subroutine json_rename_by_path_path_ascii(json, me, path, name, found) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: me + character(kind=CDK,len=*),intent(in) :: path + character(kind=CK,len=*),intent(in) :: name + logical(LK),intent(out),optional :: found + + call json%rename(me,to_unicode(path),name,found) + + end subroutine json_rename_by_path_path_ascii +!***************************************************************************************** + +!***************************************************************************************** +!> +! Returns the [[json_value]] pointer given the path string. +! +!### Example +! +!````fortran +! type(json_core) :: json +! type(json_value),pointer :: dat,p +! logical :: found +! !... +! call json%initialize(path_mode=1) ! this is the default so not strictly necessary. +! call json%get(dat,'data(2).version',p,found) +!```` +! +!### Notes +! The syntax used here is a subset of the +! [http://goessner.net/articles/JsonPath/](JSONPath) ""dot–notation"". +! The following special characters are used to denote paths: +! +! * `$` - root +! * `@` - this +! * `.` - child object member (note this can be changed using `json%path_separator`) +! * `[]` or `()` - child array element (note that indices are 1-based) +! +! Thus, if any of these characters are present in the name key, +! this routine cannot be used to get the value. +! In that case, the `get_child` methods would need to be used. +! Or, the alternate [[json_get_by_path_rfc6901]] could be used. +! +!### See also +! * [[json_get_by_path_rfc6901]] +! * [[json_get_by_path_jsonpath_bracket]] +! +!@note The syntax is inherited from FSON, and is basically a subset +! of JSONPath ""dot-notation"", with the additional allowance of +! () for array elements. +! +!@note JSON `null` values are used here for unknown variables when `create_it` is True. +! So, it is possible that an existing null variable can be converted to another +! type (object or array) if a child is specified in the path. Doing it this way +! to avoid having to use another type (say `json_unknown`) that would have to be +! converted to null once all the variables have been created (user would have +! had to do this). +! +!@warning See (**) in code. I think we need to protect for memory leaks when +! changing the type of a variable that already exists. + + subroutine json_get_by_path_default(json,me,path,p,found,create_it,was_created) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: me !! a JSON linked list + character(kind=CK,len=*),intent(in) :: path !! path to the variable + type(json_value),pointer,intent(out) :: p !! pointer to the variable + !! specify by `path` + logical(LK),intent(out),optional :: found !! true if it was found + logical(LK),intent(in),optional :: create_it !! if a variable is not present + !! in the path, then it is created. + !! the leaf node is returned as + !! a `null` json type and can be + !! changed by the caller. + logical(LK),intent(out),optional :: was_created !! if `create_it` is true, this + !! will be true if the variable + !! was actually created. Otherwise + !! it will be false. + + integer(IK) :: i !! counter of characters in `path` + integer(IK) :: length !! significant length of `path` + integer(IK) :: child_i !! index for getting children + character(kind=CK,len=1) :: c !! a character in the `path` + logical(LK) :: array !! flag when searching for array index in `path` + type(json_value),pointer :: tmp !! temp variables for getting child objects + logical(LK) :: child_found !! if the child value was found + logical(LK) :: create !! if the object is to be created + logical(LK) :: created !! if `create` is true, then this will be + !! true if the leaf object had to be created + integer(IK) :: j !! counter of children when creating object + logical(LK) :: status_ok !! integer to string conversion flag + + nullify(p) + + if (.not. json%exception_thrown) then + + if (present(create_it)) then + create = create_it + else + create = .false. + end if + + ! default to assuming relative to me + p => me + + child_i = 1 + array = .false. + created = .false. + + !keep trailing space or not: + if (json%trailing_spaces_significant) then + length = len(path) + else + length = len_trim(path) + end if + + do i=1, length + + c = path(i:i) + + select case (c) + case (root) + + ! root + do while (associated (p%parent)) + p => p%parent + end do + child_i = i + 1 + if (create) created = .false. ! should always exist + + case (this) + + ! this + p => me + child_i = i + 1 + if (create) created = .false. ! should always exist + + case (start_array,start_array_alt) + + ! start looking for the array element index + array = .true. + + ! get child member from p + if (child_i < i) then + nullify(tmp) + if (create) then + + ! Example: + ! 'aaa.bbb(1)' + ! -> and aaa is a null, need to make it an object + ! + ! What about the case: aaa.bbb(1)(3) ? + ! Is that already handled? + + if (p%var_type==json_null) then ! (**) + ! if p was also created, then we need to + ! convert it into an object here: + p%var_type = json_object + end if + + ! don't want to throw exceptions in this case + call json%get_child(p, path(child_i:i-1), tmp, child_found) + if (.not. child_found) then + ! have to create this child + ! [make it an array] + call json_value_create(tmp) + call json%to_array(tmp,path(child_i:i-1)) + call json%add(p,tmp) + created = .true. + else + created = .false. + end if + else + ! call the normal way + call json%get_child(p, path(child_i:i-1), tmp) + end if + p => tmp + else + child_i = i + 1 ! say, '@(' + cycle + end if + if (.not. associated(p)) then + call json%throw_exception('Error in json_get_by_path_default:'//& + ' Error getting array element',found) + exit + end if + child_i = i + 1 + + case (end_array,end_array_alt) + + if (.not. array) then + call json%throw_exception('Error in json_get_by_path_default:'//& + ' Unexpected '//c,found) + exit + end if + array = .false. + call string_to_integer(path(child_i:i-1),child_i,status_ok) + if (.not. status_ok) then + call json%throw_exception('Error in json_get_by_path_default:'//& + ' Could not convert array index to integer: '//& + trim(path(child_i:i-1)),found) + exit + end if + + nullify(tmp) + if (create) then + ! don't want to throw exceptions in this case + call json%get_child(p, child_i, tmp, child_found) + if (.not. child_found) then + + if (p%var_type==json_null) then ! (**) + ! if p was also created, then we need to + ! convert it into an array here: + p%var_type = json_array + end if + + ! have to create this element + ! [make it a null] + ! (and any missing ones before it) + do j = 1, child_i + nullify(tmp) + call json%get_child(p, j, tmp, child_found) + if (.not. child_found) then + call json_value_create(tmp) + call json%to_null(tmp) ! array element doesn't need a name + call json%add(p,tmp) + if (j==child_i) created = .true. + else + if (j==child_i) created = .false. + end if + end do + + else + created = .false. + end if + + else + ! call the normal way: + call json%get_child(p, child_i, tmp) + end if + + p => tmp + + child_i = i + 1 + + case default + + if (c==json%path_separator) then + + ! get child member from p + if (child_i < i) then + nullify(tmp) + if (create) then + if (p%var_type==json_null) then ! (**) + ! if p was also created, then we need to + ! convert it into an object here: + p%var_type = json_object + end if + + ! don't want to throw exceptions in this case + call json%get_child(p, path(child_i:i-1), tmp, child_found) + if (.not. child_found) then + ! have to create this child + ! [make it an object] + call json_value_create(tmp) + call json%to_object(tmp,path(child_i:i-1)) + call json%add(p,tmp) + created = .true. + else + created = .false. + end if + else + ! call the normal way + call json%get_child(p, path(child_i:i-1), tmp) + end if + p => tmp + else + child_i = i + 1 ! say '$.', '@.', or ').' + cycle + end if + + if (.not. associated(p)) then + call json%throw_exception('Error in json_get_by_path_default:'//& + ' Error getting child member.',found) + exit + end if + + child_i = i + 1 + + end if + + end select + + end do + + if (json%exception_thrown) then + + if (present(found)) then + nullify(p) ! just in case + found = .false. + call json%clear_exceptions() + end if + + else + + ! grab the last child if present in the path + if (child_i <= length) then + nullify(tmp) + if (create) then + if (p%var_type==json_null) then ! (**) + ! if p was also created, then we need to + ! convert it into an object here: + p%var_type = json_object + end if + + call json%get_child(p, path(child_i:i-1), tmp, child_found) + if (.not. child_found) then + ! have to create this child + ! (make it a null since it is the leaf) + call json_value_create(tmp) + call json%to_null(tmp,path(child_i:i-1)) + call json%add(p,tmp) + created = .true. + else + created = .false. + end if + else + ! call the normal way + call json%get_child(p, path(child_i:i-1), tmp) + end if + p => tmp + else + ! we already have p + if (create .and. created) then + ! make leaf p a null, but only + ! if it wasn't there + call json%to_null(p) + end if + end if + + ! error checking + if (associated(p)) then + if (present(found)) found = .true. !everything seems to be ok + else + call json%throw_exception('Error in json_get_by_path_default:'//& + ' variable not found: '//trim(path),found) + if (present(found)) then + found = .false. + call json%clear_exceptions() + end if + end if + + end if + + ! if it had to be created: + if (present(was_created)) was_created = created + + else + if (present(found)) found = .false. + if (present(was_created)) was_created = .false. + end if + + end subroutine json_get_by_path_default +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 2/4/2017 +! +! Returns the [[json_value]] pointer given the path string, +! using the ""JSON Pointer"" path specification defined by RFC 6901. +! +! Note that trailing whitespace significance and case sensitivity +! are user-specified. To fully conform to the RFC 6901 standard, +! should probably set (via `initialize`): +! +! * `case_sensitive_keys = .true.` [this is the default setting] +! * `trailing_spaces_significant = .true.` [this is *not* the default setting] +! * `allow_duplicate_keys = .false.` [this is *not* the default setting] +! +!### Example +! +!````fortran +! type(json_core) :: json +! type(json_value),pointer :: dat,p +! logical :: found +! !... +! call json%initialize(path_mode=2) +! call json%get(dat,'/data/2/version',p,found) +!```` +! +!### See also +! * [[json_get_by_path_default]] +! * [[json_get_by_path_jsonpath_bracket]] +! +!### Reference +! * [JavaScript Object Notation (JSON) Pointer](https://tools.ietf.org/html/rfc6901) +! +!@note Not doing anything special about the `-` character to index an array. +! This is considered a normal error. +! +!@note Unlike in the default path mode, the array indices here are 0-based +! (in accordance with the RFC 6901 standard) +! +!@warning Not checking if the member that is referenced is unique. +! (according to the standard, evaluation of non-unique references +! should fail). Like [[json_get_by_path_default]], this one will just return +! the first instance it encounters. This might be changed in the future. +! +!@warning I think the standard indicates that the input paths should use +! escaped JSON strings (currently we are assuming they are not escaped). + + subroutine json_get_by_path_rfc6901(json, me, path, p, found) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: me !! a JSON linked list + character(kind=CK,len=*),intent(in) :: path !! path to the variable + !! (an RFC 6901 ""JSON Pointer"") + type(json_value),pointer,intent(out) :: p !! pointer to the variable + !! specify by `path` + logical(LK),intent(out),optional :: found !! true if it was found + + character(kind=CK,len=:),allocatable :: token !! a token in the path (between the `/` characters) + integer(IK) :: i !! counter + integer(IK) :: islash_curr !! location of current '/' character in the path + integer(IK) :: islash_next !! location of next '/' character in the path + integer(IK) :: ilen !! length of `path` string + type(json_value),pointer :: tmp !! temporary variable for traversing the structure + integer(IK) :: ival !! integer array index value (0-based) + logical(LK) :: status_ok !! error flag + logical(LK) :: child_found !! for getting child values + + nullify(p) + + if (.not. json%exception_thrown) then + + p => me ! initialize + + if (path/=CK_'') then + + if (path(1:1)==slash) then ! the first character must be a slash + + islash_curr = 1 ! initialize current slash index + + !keep trailing space or not: + if (json%trailing_spaces_significant) then + ilen = len(path) + else + ilen = len_trim(path) + end if + + do + + ! get the next token by finding the slashes + ! + ! 1 2 3 + ! /abc/d/efg + + if (islash_curr==ilen) then + !the last token is an empty string + token = CK_'' + islash_next = 0 ! will signal to stop + else + + ! . + ! '/123/567/' + + ! index in remaining string: + islash_next = index(path(islash_curr+1:ilen),slash) + if (islash_next<=0) then + !last token: + token = path(islash_curr+1:ilen) + else + ! convert to actual index in path: + islash_next = islash_curr + index(path(islash_curr+1:ilen),slash) + if (islash_next>islash_curr+1) then + token = path(islash_curr+1:islash_next-1) + else + !empty token: + token = CK_'' + end if + end if + + end if + + ! remove trailing spaces in the token here if necessary: + if (.not. json%trailing_spaces_significant) & + token = trim(token) + + ! decode the token: + token = decode_rfc6901(token) + + ! now, parse the token: + + ! first see if there is a child with this name + call json%get_child(p,token,tmp,child_found) + if (child_found) then + ! it was found + p => tmp + else + ! No key with this name. + ! Is it an integer? If so, + ! it might be an array index. + status_ok = (len(token)>0) + if (status_ok) then + do i=1,len(token) + ! It must only contain (0..9) characters + ! (it must be unsigned) + if (scan(token(i:i),CK_'0123456789')<1) then + status_ok = .false. + exit + end if + end do + if (status_ok) then + if (len(token)>1 .and. token(1:1)==CK_'0') then + ! leading zeros not allowed for some reason + status_ok = .false. + end if + end if + if (status_ok) then + ! if we make it this far, it should be + ! convertible to an integer, so do it. + call string_to_integer(token,ival,status_ok) + end if + end if + if (status_ok) then + ! ival is an array index (0-based) + call json%get_child(p,ival+1_IK,tmp,child_found) + if (child_found) then + p => tmp + else + ! not found + status_ok = .false. + end if + end if + if (.not. status_ok) then + call json%throw_exception('Error in json_get_by_path_rfc6901: '//& + 'invalid path specification: '//trim(path),found) + exit + end if + end if + + if (islash_next<=0) exit ! finished + + ! set up for next token: + islash_curr = islash_next + + end do + + else + call json%throw_exception('Error in json_get_by_path_rfc6901: '//& + 'invalid path specification: '//trim(path),found) + end if + end if + + if (json%exception_thrown) then + nullify(p) + if (present(found)) then + found = .false. + call json%clear_exceptions() + end if + else + if (present(found)) found = .true. + end if + + else + if (present(found)) found = .false. + end if + + end subroutine json_get_by_path_rfc6901 +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 9/2/2017 +! +! Returns the [[json_value]] pointer given the path string, +! using the ""JSON Pointer"" path specification defined by the +! JSONPath ""bracket-notation"". +! +! The first character `$` is optional, and signifies the root +! of the structure. If it is not present, then the first key +! is taken to be in the `me` object. +! +! Single or real quotes may be used. +! +!### Example +! +!````fortran +! type(json_core) :: json +! type(json_value),pointer :: dat,p +! logical :: found +! !... +! call json%initialize(path_mode=3) +! call json%get(dat,""$['store']['book'][1]['title']"",p,found) +!```` +! +!### See also +! * [[json_get_by_path_default]] +! * [[json_get_by_path_rfc6901]] +! +!### Reference +! * [JSONPath](http://goessner.net/articles/JsonPath/) +! +!@note Uses 1-based array indices (same as [[json_get_by_path_default]], +! but unlike [[json_get_by_path_rfc6901]] which uses 0-based indices). +! +!@note When `create_it=True`, if the variable already exists and is a type +! that is not compatible with the usage in the `path`, then it is +! destroyed and replaced with what is specified in the `path`. Note that +! this applies the all variables in the path as it is created. Currently, +! this behavior is different from [[json_get_by_path_default]]. +! +!@note JSON `null` values are used here for unknown variables +! when `create_it` is True. +! +!@warning Note that if using single quotes, this routine cannot parse +! a key containing `']`. If using real quotes, this routine +! cannot parse a key containing `""]`. If the key contains both +! `']` and `""]`, there is no way to parse it using this routine. + + subroutine json_get_by_path_jsonpath_bracket(json,me,path,p,found,create_it,was_created) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: me !! a JSON linked list + character(kind=CK,len=*),intent(in) :: path !! path to the variable + !! (using JSONPath + !! ""bracket-notation"") + type(json_value),pointer,intent(out) :: p !! pointer to the variable + !! specify by `path` + logical(LK),intent(out),optional :: found !! true if it was found + logical(LK),intent(in),optional :: create_it !! if a variable is not present + !! in the path, then it is created. + !! the leaf node is returned as + !! a `null` json type and can be + !! changed by the caller. + logical(LK),intent(out),optional :: was_created !! if `create_it` is true, this + !! will be true if the variable + !! was actually created. Otherwise + !! it will be false. + + character(kind=CK,len=:),allocatable :: token !! a token in the path + !! (between the `['']` or + !! `[]` characters) + integer(IK) :: istart !! location of current '[' + !! character in the path + integer(IK) :: iend !! location of current ']' + !! character in the path + integer(IK) :: ival !! integer array index value + logical(LK) :: status_ok !! error flag + type(json_value),pointer :: tmp !! temporary variable for + !! traversing the structure + integer(IK) :: i !! counter + integer(IK) :: ilen !! length of `path` string + logical(LK) :: real_quotes !! if the keys are enclosed in `""`, + !! rather than `'` tokens. + logical(LK) :: create !! if the object is to be created + logical(LK) :: created !! if `create` is true, then this will be + !! true if the leaf object had to be created + integer(IK) :: j !! counter of children when creating object + + !TODO instead of reallocating `token` all the time, just + ! allocate a big size and keep track of the length, + ! then just reallocate only if necessary. + ! [would probably be inefficient if there was a very large token, + ! and then a bunch of small ones... but for similarly-sized ones + ! it should be way more efficient since it would avoid most + ! reallocations.] + + nullify(p) + + if (.not. json%exception_thrown) then + + if (present(create_it)) then + create = create_it + else + create = .false. + end if + + p => me ! initialize + created = .false. + + if (path==CK_'') then + call json%throw_exception('Error in json_get_by_path_jsonpath_bracket: '//& + 'invalid path specification: '//trim(path),found) + else + + if (path(1:1)==root .or. path(1:1)==start_array) then ! the first character must be + ! a `$` (root) or a `[` + ! (element of `me`) + + if (path(1:1)==root) then + ! go to the root + do while (associated (p%parent)) + p => p%parent + end do + if (create) created = .false. ! should always exist + end if + + !path length (don't need trailing spaces:) + ilen = len_trim(path) + + if (ilen>1) then + + istart = 2 ! initialize first '[' location index + + do + + if (istart>ilen) exit ! finished + + ! must be the next start bracket: + if (path(istart:istart) /= start_array) then + call json%throw_exception(& + 'Error in json_get_by_path_jsonpath_bracket: '//& + 'expecting ""["", found: ""'//trim(path(istart:istart))//& + '"" in path: '//trim(path),found) + exit + end if + + ! get the next token by checking: + ! + ! * [''] -- is the token after istart a quote? + ! if so, then search for the next `']` + ! + ! * [1] -- if not, then maybe it is a number, + ! so search for the next `]` + + ! verify length of remaining string + if (istart+2<=ilen) then + + real_quotes = path(istart+1:istart+1) == quotation_mark ! ["" + + if (real_quotes .or. path(istart+1:istart+1)==single_quote) then ! [' + + ! it might be a key value: ['abc'] + + istart = istart + 1 ! move counter to ' index + if (real_quotes) then + iend = istart + index(path(istart+1:ilen),& + quotation_mark//end_array) ! ""] + else + iend = istart + index(path(istart+1:ilen),& + single_quote//end_array) ! '] + end if + if (iend>istart) then + + ! istart iend + ! | | + ! ['p']['abcdefg'] + + if (iend>istart+1) then + token = path(istart+1:iend-1) + else + token = CK_'' ! blank string + end if + ! remove trailing spaces in + ! the token here if necessary: + if (.not. json%trailing_spaces_significant) & + token = trim(token) + + if (create) then + ! have a token, create it if necessary + + ! we need to convert it into an object here + ! (e.g., if p was also just created) + ! and destroy its data to prevent a memory leak + call json%convert(p,json_object) + + ! don't want to throw exceptions in this case + call json%get_child(p,token,tmp,status_ok) + if (.not. status_ok) then + ! have to create this child + ! [make it a null since we don't + ! know what it is yet] + call json_value_create(tmp) + call json%to_null(tmp,token) + call json%add(p,tmp) + status_ok = .true. + created = .true. + else + ! it was already there. + created = .false. + end if + else + ! have a token, see if it is valid: + call json%get_child(p,token,tmp,status_ok) + end if + + if (status_ok) then + ! it was found + p => tmp + else + call json%throw_exception(& + 'Error in json_get_by_path_jsonpath_bracket: '//& + 'invalid token found: ""'//token//& + '"" in path: '//trim(path),found) + exit + end if + iend = iend + 1 ! move counter to ] index + else + call json%throw_exception(& + 'Error in json_get_by_path_jsonpath_bracket: '//& + 'invalid path: '//trim(path),found) + exit + end if + + else + + ! it might be an integer value: [123] + + iend = istart + index(path(istart+1:ilen),end_array) ! ] + if (iend>istart+1) then + + ! this should be an integer: + token = path(istart+1:iend-1) + + ! verify that there are no spaces or other + ! characters in the string: + status_ok = .true. + do i=1,len(token) + ! It must only contain (0..9) characters + ! (it must be unsigned) + if (scan(token(i:i),CK_'0123456789')<1) then + status_ok = .false. + exit + end if + end do + if (status_ok) then + call string_to_integer(token,ival,status_ok) + if (status_ok) status_ok = ival>0 ! assuming 1-based array indices + end if + + if (status_ok) then + + ! have a valid integer to use as an index + ! see if this element is really there: + call json%get_child(p,ival,tmp,status_ok) + + if (create .and. .not. status_ok) then + + ! have to create it: + + if (.not.(p%var_type==json_object .or. p%var_type==json_array)) then + ! we need to convert it into an array here + ! (e.g., if p was also just created) + ! and destroy its data to prevent a memory leak + call json%convert(p,json_array) + end if + + ! have to create this element + ! [make it a null] + ! (and any missing ones before it) + do j = 1, ival + nullify(tmp) + call json%get_child(p, j, tmp, status_ok) + if (.not. status_ok) then + call json_value_create(tmp) + call json%to_null(tmp) ! array element doesn't need a name + call json%add(p,tmp) + if (j==ival) created = .true. + else + if (j==ival) created = .false. + end if + end do + status_ok = .true. + + else + created = .false. + end if + + if (status_ok) then + ! found it + p => tmp + else + ! not found + call json%throw_exception(& + 'Error in json_get_by_path_jsonpath_bracket: '//& + 'invalid array index found: ""'//token//& + '"" in path: '//trim(path),found) + exit + end if + else + call json%throw_exception(& + 'Error in json_get_by_path_jsonpath_bracket: '//& + 'invalid token: ""'//token//& + '"" in path: '//trim(path),found) + exit + end if + + else + call json%throw_exception(& + 'Error in json_get_by_path_jsonpath_bracket: '//& + 'invalid path: '//trim(path),found) + exit + end if + + end if + + else + call json%throw_exception(& + 'Error in json_get_by_path_jsonpath_bracket: '//& + 'invalid path: '//trim(path),found) + exit + end if + + ! set up for next token: + istart = iend + 1 + + end do + + end if + + else + call json%throw_exception(& + 'Error in json_get_by_path_jsonpath_bracket: '//& + 'expecting ""'//root//'"", found: ""'//path(1:1)//& + '"" in path: '//trim(path),found) + end if + + end if + + if (json%exception_thrown) then + nullify(p) + if (present(found)) then + found = .false. + call json%clear_exceptions() + end if + else + if (present(found)) found = .true. + end if + + ! if it had to be created: + if (present(was_created)) was_created = created + + else + if (present(found)) found = .false. + if (present(was_created)) was_created = .false. + end if + + end subroutine json_get_by_path_jsonpath_bracket +!***************************************************************************************** + +!***************************************************************************************** +!> +! Convert an existing JSON variable `p` to a different variable type. +! The existing variable (and its children) is destroyed. It is replaced +! in the structure by a new variable of type `var_type` +! (which can be a `json_null`, `json_object` or `json_array`). +! +!@note This is an internal routine used when creating variables by path. + + subroutine convert(json,p,var_type) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p !! the variable to convert + integer(IK),intent(in) :: var_type !! the variable type to convert `p` to + + type(json_value),pointer :: tmp !! temporary variable + character(kind=CK,len=:),allocatable :: name !! the name of a JSON variable + + logical :: convert_it !! if `p` needs to be converted + + convert_it = p%var_type /= var_type + + if (convert_it) then + + call json%info(p,name=name) ! get existing name + + select case (var_type) + case(json_object) + call json%create_object(tmp,name) + case(json_array) + call json%create_array(tmp,name) + case(json_null) + call json%create_null(tmp,name) + case default + call json%throw_exception('Error in convert: invalid var_type value.') + return + end select + + call json%replace(p,tmp,destroy=.true.) + p => tmp + nullify(tmp) + + end if + + end subroutine convert +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_get_by_path]] where ""path"" is kind=CDK. + + subroutine wrap_json_get_by_path(json, me, path, p, found) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: me + character(kind=CDK,len=*),intent(in) :: path + type(json_value),pointer,intent(out) :: p + logical(LK),intent(out),optional :: found + + call json%get(me, to_unicode(path), p, found) + + end subroutine wrap_json_get_by_path +!***************************************************************************************** + +!***************************************************************************************** +!> +! Returns the path to a JSON object that is part +! of a linked list structure. +! +! The path returned would be suitable for input to +! [[json_get_by_path]] and related routines. +! +!@note If an error occurs (which in this case means a malformed +! JSON structure) then an exception will be thrown, unless +! `found` is present, which will be set to `false`. `path` +! will be a blank string. +! +!@note If `json%path_mode/=1`, then the `use_alt_array_tokens` +! and `path_sep` inputs are ignored if present. +! +!@note [http://goessner.net/articles/JsonPath/](JSONPath) (`path_mode=3`) +! does not specify whether or not the keys should be escaped (this routine +! assumes not, as does http://jsonpath.com). +! Also, we are using Fortran-style 1-based array indices, +! not 0-based, to agree with the assumption in `path_mode=1` + + subroutine json_get_path(json, p, path, found, use_alt_array_tokens, path_sep) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: p !! a JSON linked list object + character(kind=CK,len=:),allocatable,intent(out) :: path !! path to the variable + logical(LK),intent(out),optional :: found !! true if there were no problems + logical(LK),intent(in),optional :: use_alt_array_tokens !! if true, then '()' are used for array elements + !! otherwise, '[]' are used [default] + !! (only used if `path_mode=1`) + character(kind=CK,len=1),intent(in),optional :: path_sep !! character to use for path separator + !! (otherwise use `json%path_separator`) + !! (only used if `path_mode=1`) + + character(kind=CK,len=:),allocatable :: name !! variable name + character(kind=CK,len=:),allocatable :: parent_name !! variable's parent name + character(kind=CK,len=max_integer_str_len) :: istr !! for integer to string conversion + !! (array indices) + type(json_value),pointer :: tmp !! for traversing the structure + type(json_value),pointer :: element !! for traversing the structure + integer(IK) :: var_type !! JSON variable type flag + integer(IK) :: i !! counter + integer(IK) :: n_children !! number of children for parent + logical(LK) :: use_brackets !! to use '[]' characters for arrays + logical(LK) :: parent_is_root !! if the parent is the root + character(kind=CK,len=1) :: array_start !! for `path_mode=1`, the character to start arrays + character(kind=CK,len=1) :: array_end !! for `path_mode=1`, the character to end arrays + logical :: consecutive_arrays !! check for array of array case + integer(IK) :: parents_parent_var_type !! `var_type` for parent's parent + + !optional input: + if (present(use_alt_array_tokens)) then + use_brackets = .not. use_alt_array_tokens + else + use_brackets = .true. + end if + + if (json%path_mode==1_IK) then + if (use_brackets) then + array_start = start_array + array_end = end_array + else + array_start = start_array_alt + array_end = end_array_alt + end if + end if + + ! initialize: + consecutive_arrays = .false. + + if (associated(p)) then + + !traverse the structure via parents up to the root + tmp => p + do + + if (.not. associated(tmp)) exit !finished + + !get info about the current variable: + call json%info(tmp,name=name) + if (json%path_mode==2_IK) then + name = encode_rfc6901(name) + end if + + ! if tmp a child of an object, or an element of an array + if (associated(tmp%parent)) then + + !get info about the parent: + call json%info(tmp%parent,var_type=var_type,& + n_children=n_children,name=parent_name) + if (json%path_mode==2_IK) then + parent_name = encode_rfc6901(parent_name) + end if + if (associated(tmp%parent%parent)) then + call json%info(tmp%parent%parent,var_type=parents_parent_var_type) + consecutive_arrays = parents_parent_var_type == json_array .and. & + var_type == json_array + else + consecutive_arrays = .false. + end if + + select case (var_type) + case (json_array) + + !get array index of this element: + element => tmp%parent%children + do i = 1, n_children + if (.not. associated(element)) then + call json%throw_exception('Error in json_get_path: '//& + 'malformed JSON structure. ',found) + exit + end if + if (associated(element,tmp)) then + exit + else + element => element%next + end if + if (i==n_children) then ! it wasn't found (should never happen) + call json%throw_exception('Error in json_get_path: '//& + 'malformed JSON structure. ',found) + exit + end if + end do + select case(json%path_mode) + case(3_IK) + ! JSONPath ""bracket-notation"" + ! example: `$['key'][1]` + ! [note: this uses 1-based indices] + call integer_to_string(i,int_fmt,istr) + if (consecutive_arrays) then + call add_to_path(start_array//trim(adjustl(istr))//end_array,CK_'') + else + call add_to_path(start_array//single_quote//parent_name//& + single_quote//end_array//& + start_array//trim(adjustl(istr))//end_array,CK_'') + end if + case(2_IK) + ! rfc6901 + ! Example: '/key/0' + call integer_to_string(i-1_IK,int_fmt,istr) ! 0-based index + if (consecutive_arrays) then + call add_to_path(trim(adjustl(istr))) + else + call add_to_path(parent_name//slash//trim(adjustl(istr))) + end if + case(1_IK) + ! default + ! Example: `key[1]` + call integer_to_string(i,int_fmt,istr) + if (consecutive_arrays) then + call add_to_path(array_start//trim(adjustl(istr))//array_end,path_sep) + else + call add_to_path(parent_name//array_start//& + trim(adjustl(istr))//array_end,path_sep) + end if + end select + + if (.not. consecutive_arrays) tmp => tmp%parent ! already added parent name + + case (json_object) + + if (.not. consecutive_arrays) then + ! idea is not to print the array name if + ! it was already printed with the array + + !process parent on the next pass + select case(json%path_mode) + case(3_IK) + call add_to_path(start_array//single_quote//name//& + single_quote//end_array,CK_'') + case default + call add_to_path(name,path_sep) + end select + + end if + + case default + + call json%throw_exception('Error in json_get_path: '//& + 'malformed JSON structure. '//& + 'A variable that is not an object '//& + 'or array should not have a child.',found) + exit + + end select + + else + !the last one: + select case(json%path_mode) + case(3_IK) + call add_to_path(start_array//single_quote//name//& + single_quote//end_array,CK_'') + case default + call add_to_path(name,path_sep) + end select + end if + + if (associated(tmp%parent)) then + !check if the parent is the root: + parent_is_root = (.not. associated(tmp%parent%parent)) + if (parent_is_root) exit + end if + + !go to parent: + tmp => tmp%parent + + end do + + else + call json%throw_exception('Error in json_get_path: '//& + 'input pointer is not associated',found) + end if + + !for errors, return blank string: + if (json%exception_thrown .or. .not. allocated(path)) then + path = CK_'' + else + select case (json%path_mode) + case(3_IK) + ! add the outer level object identifier: + path = root//path + case(2_IK) + ! add the root slash: + path = slash//path + end select + end if + + !optional output: + if (present(found)) then + if (json%exception_thrown) then + found = .false. + call json%clear_exceptions() + else + found = .true. + end if + end if + + contains + + subroutine add_to_path(str,path_sep) + !! prepend the string to the path + implicit none + character(kind=CK,len=*),intent(in) :: str !! string to prepend to `path` + character(kind=CK,len=1),intent(in),optional :: path_sep + !! path separator (default is '.'). + !! (ignored if `json%path_mode/=1`) + + select case (json%path_mode) + case(3_IK) + ! in this case, the options are ignored + if (.not. allocated(path)) then + path = str + else + path = str//path + end if + case(2_IK) + ! in this case, the options are ignored + if (.not. allocated(path)) then + path = str + else + path = str//slash//path + end if + case(1_IK) + ! default path format + if (.not. allocated(path)) then + path = str + else + ! shouldn't add the path_sep for cases like x[1][2] + ! [if current is an array element, and the previous was + ! also an array element] so check for that here: + if (.not. ( str(len(str):len(str))==array_end .and. & + path(1:1)==array_start )) then + if (present(path_sep)) then + ! use user specified: + path = str//path_sep//path + else + ! use the default: + path = str//json%path_separator//path + end if + else + path = str//path + end if + end if + end select + + end subroutine add_to_path + + end subroutine json_get_path +!***************************************************************************************** + +!***************************************************************************************** +!> +! Wrapper for [[json_get_path]] where ""path"" and ""path_sep"" are kind=CDK. + + subroutine wrap_json_get_path(json, p, path, found, use_alt_array_tokens, path_sep) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: p !! a JSON linked list object + character(kind=CDK,len=:),allocatable,intent(out) :: path !! path to the variable + logical(LK),intent(out),optional :: found !! true if there were no problems + logical(LK),intent(in),optional :: use_alt_array_tokens !! if true, then '()' are used + !! for array elements otherwise, + !! '[]' are used [default] + character(kind=CDK,len=1),intent(in),optional :: path_sep !! character to use for path + !! separator (default is '.') + + character(kind=CK,len=:),allocatable :: ck_path !! path to the variable + + ! call the main routine: + if (present(path_sep)) then + call json%get_path(p,ck_path,found,use_alt_array_tokens,to_unicode(path_sep)) + else + call json%get_path(p,ck_path,found,use_alt_array_tokens) + end if + + ! from unicode: + path = ck_path + + end subroutine wrap_json_get_path +!***************************************************************************************** + +!***************************************************************************************** +!> +! Convert a string into an integer. +! +!@note Replacement for the `parse_integer` function in the original code. + + function string_to_int(json,str) result(ival) + + implicit none + + class(json_core),intent(inout) :: json + character(kind=CK,len=*),intent(in) :: str !! a string + integer(IK) :: ival !! `str` converted to an integer + + logical(LK) :: status_ok !! error flag for [[string_to_integer]] + + ! call the core routine: + call string_to_integer(str,ival,status_ok) + + if (.not. status_ok) then + ival = 0 + call json%throw_exception('Error in string_to_int: '//& + 'string cannot be converted to an integer: '//& + trim(str)) + end if + + end function string_to_int +!***************************************************************************************** + +!***************************************************************************************** +!> +! Convert a string into a `real(RK)` value. + + function string_to_dble(json,str) result(rval) + + implicit none + + class(json_core),intent(inout) :: json + character(kind=CK,len=*),intent(in) :: str !! a string + real(RK) :: rval !! `str` converted to a `real(RK)` + + logical(LK) :: status_ok !! error flag for [[string_to_real]] + + call string_to_real(str,json%use_quiet_nan,rval,status_ok) + + if (.not. status_ok) then !if there was an error + rval = 0.0_RK + call json%throw_exception('Error in string_to_dble: '//& + 'string cannot be converted to a real: '//& + trim(str)) + end if + + end function string_to_dble +!***************************************************************************************** + +!***************************************************************************************** +!> +! Get an integer value from a [[json_value]]. + + subroutine json_get_integer(json, me, value) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: me + integer(IK),intent(out) :: value !! the integer value + + logical(LK) :: status_ok !! for [[string_to_integer]] + + value = 0_IK + if ( json%exception_thrown ) return + + if (me%var_type == json_integer) then + value = me%int_value + else + if (json%strict_type_checking) then + if (allocated(me%name)) then + call json%throw_exception('Error in json_get_integer:'//& + ' Unable to resolve value to integer: '//me%name) + else + call json%throw_exception('Error in json_get_integer:'//& + ' Unable to resolve value to integer') + end if + else + !type conversions + select case(me%var_type) + case (json_real) + value = int(me%dbl_value, IK) + case (json_logical) + if (me%log_value) then + value = 1_IK + else + value = 0_IK + end if + case (json_string) + call string_to_integer(me%str_value,value,status_ok) + if (.not. status_ok) then + value = 0_IK + if (allocated(me%name)) then + call json%throw_exception('Error in json_get_integer:'//& + ' Unable to convert string value to integer: '//& + me%name//' = '//trim(me%str_value)) + else + call json%throw_exception('Error in json_get_integer:'//& + ' Unable to convert string value to integer: '//& + trim(me%str_value)) + end if + end if + case default + if (allocated(me%name)) then + call json%throw_exception('Error in json_get_integer:'//& + ' Unable to resolve value to integer: '//me%name) + else + call json%throw_exception('Error in json_get_integer:'//& + ' Unable to resolve value to integer') + end if + end select + end if + end if + + end subroutine json_get_integer +!***************************************************************************************** + +!***************************************************************************************** +!> +! Get an integer value from a [[json_value]], given the path string. + + subroutine json_get_integer_by_path(json, me, path, value, found, default) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: me + character(kind=CK,len=*),intent(in) :: path + integer(IK),intent(out) :: value + logical(LK),intent(out),optional :: found + integer(IK),intent(in),optional :: default !! default value if not found + + integer(IK),parameter :: default_if_not_specified = 0_IK + character(kind=CK,len=*),parameter :: routine = CK_'json_get_integer_by_path' + +#include ""json_get_scalar_by_path.inc"" + + end subroutine json_get_integer_by_path +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_get_integer_by_path]], where ""path"" is kind=CDK. + + subroutine wrap_json_get_integer_by_path(json, me, path, value, found, default) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: me + character(kind=CDK,len=*),intent(in) :: path + integer(IK),intent(out) :: value + logical(LK),intent(out),optional :: found + integer(IK),intent(in),optional :: default !! default value if not found + + call json%get(me, to_unicode(path), value, found, default) + + end subroutine wrap_json_get_integer_by_path +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 5/14/2014 +! +! Get an integer vector from a [[json_value]]. + + subroutine json_get_integer_vec(json, me, vec) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: me + integer(IK),dimension(:),allocatable,intent(out) :: vec + + logical(LK) :: initialized + + if ( json%exception_thrown ) return + + ! check for 0-length arrays first: + select case (me%var_type) + case (json_array) + if (json%count(me)==0) then + allocate(vec(0)) + return + end if + end select + + initialized = .false. + + !the callback function is called for each element of the array: + call json%get(me, array_callback=get_int_from_array) + + if (json%exception_thrown .and. allocated(vec)) deallocate(vec) + + contains + + subroutine get_int_from_array(json, element, i, count) + + !! callback function for integer + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: element + integer(IK),intent(in) :: i !! index + integer(IK),intent(in) :: count !! size of array + + !size the output array: + if (.not. initialized) then + allocate(vec(count)) + initialized = .true. + end if + + !populate the elements: + call json%get(element, value=vec(i)) + + end subroutine get_int_from_array + + end subroutine json_get_integer_vec +!***************************************************************************************** + +!***************************************************************************************** +!> +! If `found` is present, set it it false. + + subroutine flag_not_found(found) + + implicit none + + logical(LK),intent(out),optional :: found + + if (present(found)) found = .false. + + end subroutine flag_not_found +!***************************************************************************************** + +!***************************************************************************************** +!> +! Get an integer vector from a [[json_value]], given the path string. + + subroutine json_get_integer_vec_by_path(json, me, path, vec, found, default) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: me + character(kind=CK,len=*),intent(in) :: path + integer(IK),dimension(:),allocatable,intent(out) :: vec + logical(LK),intent(out),optional :: found + integer(IK),dimension(:),intent(in),optional :: default !! default value if not found + + character(kind=CK,len=*),parameter :: routine = CK_'json_get_integer_vec_by_path' + +#include ""json_get_vec_by_path.inc"" + + end subroutine json_get_integer_vec_by_path +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_get_integer_vec_by_path]], where ""path"" is kind=CDK + + subroutine wrap_json_get_integer_vec_by_path(json, me, path, vec, found, default) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: me + character(kind=CDK,len=*),intent(in) :: path + integer(IK),dimension(:),allocatable,intent(out) :: vec + logical(LK),intent(out),optional :: found + integer(IK),dimension(:),intent(in),optional :: default !! default value if not found + + call json%get(me,path=to_unicode(path),vec=vec,found=found,default=default) + + end subroutine wrap_json_get_integer_vec_by_path +!***************************************************************************************** + +!***************************************************************************************** +!> +! Get a real value from a [[json_value]]. + + subroutine json_get_real(json, me, value) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: me + real(RK),intent(out) :: value + + logical(LK) :: status_ok !! for [[string_to_real]] + + value = 0.0_RK + if ( json%exception_thrown ) return + + if (me%var_type == json_real) then + value = me%dbl_value + else + if (json%strict_type_checking) then + if (allocated(me%name)) then + call json%throw_exception('Error in json_get_real:'//& + ' Unable to resolve value to real: '//me%name) + else + call json%throw_exception('Error in json_get_real:'//& + ' Unable to resolve value to real') + end if + else + !type conversions + select case (me%var_type) + case (json_integer) + value = real(me%int_value, RK) + case (json_logical) + if (me%log_value) then + value = 1.0_RK + else + value = 0.0_RK + end if + case (json_string) + call string_to_real(me%str_value,json%use_quiet_nan,value,status_ok) + if (.not. status_ok) then + value = 0.0_RK + if (allocated(me%name)) then + call json%throw_exception('Error in json_get_real:'//& + ' Unable to convert string value to real: '//& + me%name//' = '//trim(me%str_value)) + else + call json%throw_exception('Error in json_get_real:'//& + ' Unable to convert string value to real: '//& + trim(me%str_value)) + end if + end if + case (json_null) + if (ieee_support_nan(value) .and. json%null_to_real_mode/=1_IK) then + select case (json%null_to_real_mode) + case(2_IK) + if (json%use_quiet_nan) then + value = ieee_value(value,ieee_quiet_nan) + else + value = ieee_value(value,ieee_signaling_nan) + end if + case(3_IK) + value = 0.0_RK + end select + else + if (allocated(me%name)) then + call json%throw_exception('Error in json_get_real:'//& + ' Cannot convert null to NaN: '//me%name) + else + call json%throw_exception('Error in json_get_real:'//& + ' Cannot convert null to NaN') + end if + end if + case default + if (allocated(me%name)) then + call json%throw_exception('Error in json_get_real:'//& + ' Unable to resolve value to real: '//me%name) + else + call json%throw_exception('Error in json_get_real:'//& + ' Unable to resolve value to real') + end if + end select + end if + end if + + end subroutine json_get_real +!***************************************************************************************** + +!***************************************************************************************** +!> +! Get a real value from a [[json_value]], given the path. + + subroutine json_get_real_by_path(json, me, path, value, found, default) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: me + character(kind=CK,len=*),intent(in) :: path + real(RK),intent(out) :: value + logical(LK),intent(out),optional :: found + real(RK),intent(in),optional :: default !! default value if not found + + real(RK),parameter :: default_if_not_specified = 0.0_RK + character(kind=CK,len=*),parameter :: routine = CK_'json_get_real_by_path' + +#include ""json_get_scalar_by_path.inc"" + + end subroutine json_get_real_by_path +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_get_real_by_path]], where ""path"" is kind=CDK + + subroutine wrap_json_get_real_by_path(json, me, path, value, found, default) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: me + character(kind=CDK,len=*),intent(in) :: path + real(RK),intent(out) :: value + logical(LK),intent(out),optional :: found + real(RK),intent(in),optional :: default !! default value if not found + + call json%get(me,to_unicode(path),value,found,default) + + end subroutine wrap_json_get_real_by_path +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 5/14/2014 +! +! Get a real vector from a [[json_value]]. + + subroutine json_get_real_vec(json, me, vec) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: me + real(RK),dimension(:),allocatable,intent(out) :: vec + + logical(LK) :: initialized + + if ( json%exception_thrown ) return + + ! check for 0-length arrays first: + select case (me%var_type) + case (json_array) + if (json%count(me)==0) then + allocate(vec(0)) + return + end if + end select + + initialized = .false. + + !the callback function is called for each element of the array: + call json%get(me, array_callback=get_real_from_array) + + if (json%exception_thrown .and. allocated(vec)) deallocate(vec) + + contains + + subroutine get_real_from_array(json, element, i, count) + + !! callback function for real + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: element + integer(IK),intent(in) :: i !! index + integer(IK),intent(in) :: count !! size of array + + !size the output array: + if (.not. initialized) then + allocate(vec(count)) + initialized = .true. + end if + + !populate the elements: + call json%get(element, value=vec(i)) + + end subroutine get_real_from_array + + end subroutine json_get_real_vec +!***************************************************************************************** + +!***************************************************************************************** +!> +! Get a real vector from a [[json_value]], given the path. + + subroutine json_get_real_vec_by_path(json, me, path, vec, found, default) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: me + character(kind=CK,len=*),intent(in) :: path + real(RK),dimension(:),allocatable,intent(out) :: vec + logical(LK),intent(out),optional :: found + real(RK),dimension(:),intent(in),optional :: default !! default value if not found + + character(kind=CK,len=*),parameter :: routine = CK_'json_get_real_vec_by_path' + +#include ""json_get_vec_by_path.inc"" + + end subroutine json_get_real_vec_by_path +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_get_real_vec_by_path]], where ""path"" is kind=CDK + + subroutine wrap_json_get_real_vec_by_path(json, me, path, vec, found, default) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: me + character(kind=CDK,len=*),intent(in) :: path + real(RK),dimension(:),allocatable,intent(out) :: vec + logical(LK),intent(out),optional :: found + real(RK),dimension(:),intent(in),optional :: default !! default value if not found + + call json%get(me, to_unicode(path), vec, found, default) + + end subroutine wrap_json_get_real_vec_by_path +!***************************************************************************************** + +#ifndef REAL32 +!***************************************************************************************** +!> +! Alternate version of [[json_get_real]] where value=real32. + + subroutine json_get_real32(json, me, value) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: me + real(real32),intent(out) :: value + + real(RK) :: tmp + + call json%get(me, tmp) + value = real(tmp,real32) + + end subroutine json_get_real32 +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_get_real_by_path]] where value=real32. + + subroutine json_get_real32_by_path(json, me, path, value, found, default) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: me + character(kind=CK,len=*),intent(in) :: path + real(real32),intent(out) :: value + logical(LK),intent(out),optional :: found + real(real32),intent(in),optional :: default !! default value if not found + + real(RK) :: tmp + real(RK) :: tmp_default + + if (present(default)) then + tmp_default = real(default,RK) + call json%get(me, path, tmp, found, tmp_default) + else + call json%get(me, path, tmp, found) + end if + + value = real(tmp,real32) + + end subroutine json_get_real32_by_path +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_get_real32_by_path]], where ""path"" is kind=CDK + + subroutine wrap_json_get_real32_by_path(json, me, path, value, found, default) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: me + character(kind=CDK,len=*),intent(in) :: path + real(real32),intent(out) :: value + logical(LK),intent(out),optional :: found + real(real32),intent(in),optional :: default !! default value if not found + + call json%get(me,to_unicode(path),value,found,default) + + end subroutine wrap_json_get_real32_by_path +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_get_real_vec]] where `vec` is `real32`. + + subroutine json_get_real32_vec(json, me, vec) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: me + real(real32),dimension(:),allocatable,intent(out) :: vec + + real(RK),dimension(:),allocatable :: tmp + + call json%get(me, tmp) + if (allocated(tmp)) vec = real(tmp,real32) + + end subroutine json_get_real32_vec +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_get_real_vec_by_path]] where `vec` is `real32`. + + subroutine json_get_real32_vec_by_path(json, me, path, vec, found, default) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: me + character(kind=CK,len=*),intent(in) :: path + real(real32),dimension(:),allocatable,intent(out) :: vec + logical(LK),intent(out),optional :: found + real(real32),dimension(:),intent(in),optional :: default !! default value if not found + + real(RK),dimension(:),allocatable :: tmp + real(RK),dimension(:),allocatable :: tmp_default + + if (present(default)) then + tmp_default = real(default,RK) + call json%get(me, path, tmp, found, tmp_default) + else + call json%get(me, path, tmp, found) + end if + + if (allocated(tmp)) vec = real(tmp,real32) + + end subroutine json_get_real32_vec_by_path +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_get_real32_vec_by_path]], where ""path"" is kind=CDK + + subroutine wrap_json_get_real32_vec_by_path(json, me, path, vec, found, default) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: me + character(kind=CDK,len=*),intent(in) :: path + real(real32),dimension(:),allocatable,intent(out) :: vec + logical(LK),intent(out),optional :: found + real(real32),dimension(:),intent(in),optional :: default !! default value if not found + + call json%get(me, to_unicode(path), vec, found, default) + + end subroutine wrap_json_get_real32_vec_by_path +!***************************************************************************************** +#endif + +#ifdef REAL128 +!***************************************************************************************** +!> +! Alternate version of [[json_get_real]] where `value` is `real64`. + + subroutine json_get_real64(json, me, value) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: me + real(real64),intent(out) :: value + + real(RK) :: tmp + + call json%get(me, tmp) + value = real(tmp,real64) + + end subroutine json_get_real64 +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_get_real_by_path]] where `value` is `real64`. + + subroutine json_get_real64_by_path(json, me, path, value, found, default) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: me + character(kind=CK,len=*),intent(in) :: path + real(real64),intent(out) :: value + logical(LK),intent(out),optional :: found + real(real64),intent(in),optional :: default !! default value if not found + + real(RK) :: tmp + + call json%get(me, path, tmp, found, default) + value = real(tmp,real64) + + end subroutine json_get_real64_by_path +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_get_real64_by_path]], where ""path"" is kind=CDK + + subroutine wrap_json_get_real64_by_path(json, me, path, value, found, default) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: me + character(kind=CDK,len=*),intent(in) :: path + real(real64),intent(out) :: value + logical(LK),intent(out),optional :: found + real(real64),intent(in),optional :: default !! default value if not found + + call json%get(me,to_unicode(path),value,found, default) + + end subroutine wrap_json_get_real64_by_path +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_get_real_vec]] where `vec` is `real64`. + + subroutine json_get_real64_vec(json, me, vec) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: me + real(real64),dimension(:),allocatable,intent(out) :: vec + + real(RK),dimension(:),allocatable :: tmp + + call json%get(me, tmp) + if (allocated(tmp)) vec = real(tmp,real64) + + end subroutine json_get_real64_vec +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_get_real_vec_by_path]] where `vec` is `real64`. + + subroutine json_get_real64_vec_by_path(json, me, path, vec, found, default) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: me + character(kind=CK,len=*),intent(in) :: path + real(real64),dimension(:),allocatable,intent(out) :: vec + logical(LK),intent(out),optional :: found + real(real64),dimension(:),intent(in),optional :: default !! default value if not found + + real(RK),dimension(:),allocatable :: tmp + + call json%get(me, path, tmp, found, default) + if (allocated(tmp)) vec = real(tmp,real64) + + end subroutine json_get_real64_vec_by_path +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_get_real64_vec_by_path]], where ""path"" is kind=CDK + + subroutine wrap_json_get_real64_vec_by_path(json, me, path, vec, found, default) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: me + character(kind=CDK,len=*),intent(in) :: path + real(real64),dimension(:),allocatable,intent(out) :: vec + logical(LK),intent(out),optional :: found + real(real64),dimension(:),intent(in),optional :: default !! default value if not found + + call json%get(me, to_unicode(path), vec, found, default) + + end subroutine wrap_json_get_real64_vec_by_path +!***************************************************************************************** +#endif + +!***************************************************************************************** +!> +! Get a logical value from a [[json_value]]. +! +!### Note +! If `strict_type_checking` is False, then the following assumptions are made: +! +! * For integers: a value > 0 is True +! * For reals: a value > 0 is True +! * For strings: 'true' is True, and everything else is false. [case sensitive match] + + subroutine json_get_logical(json, me, value) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: me + logical(LK),intent(out) :: value + + value = .false. + if ( json%exception_thrown ) return + + if (me%var_type == json_logical) then + value = me%log_value + else + if (json%strict_type_checking) then + if (allocated(me%name)) then + call json%throw_exception('Error in json_get_logical: '//& + 'Unable to resolve value to logical: '//& + me%name) + else + call json%throw_exception('Error in json_get_logical: '//& + 'Unable to resolve value to logical') + end if + else + !type conversions + select case (me%var_type) + case (json_integer) + value = (me%int_value > 0_IK) + case (json_real) + value = (me%dbl_value > 0.0_RK) + case (json_string) + value = (me%str_value == true_str) + case default + if (allocated(me%name)) then + call json%throw_exception('Error in json_get_logical: '//& + 'Unable to resolve value to logical: '//& + me%name) + else + call json%throw_exception('Error in json_get_logical: '//& + 'Unable to resolve value to logical') + end if + end select + end if + end if + + end subroutine json_get_logical +!***************************************************************************************** + +!***************************************************************************************** +!> +! Get a logical value from a [[json_value]], given the path. + + subroutine json_get_logical_by_path(json, me, path, value, found, default) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: me + character(kind=CK,len=*),intent(in) :: path + logical(LK),intent(out) :: value + logical(LK),intent(out),optional :: found + logical(LK),intent(in),optional :: default !! default value if not found + + logical(LK),parameter :: default_if_not_specified = .false. + character(kind=CK,len=*),parameter :: routine = CK_'json_get_logical_by_path' + +#include ""json_get_scalar_by_path.inc"" + + end subroutine json_get_logical_by_path +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_get_logical_by_path]], where ""path"" is kind=CDK + + subroutine wrap_json_get_logical_by_path(json, me, path, value, found, default) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: me + character(kind=CDK,len=*),intent(in) :: path + logical(LK),intent(out) :: value + logical(LK),intent(out),optional :: found + logical(LK),intent(in),optional :: default !! default value if not found + + call json%get(me,to_unicode(path),value,found,default) + + end subroutine wrap_json_get_logical_by_path +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 5/14/2014 +! +! Get a logical vector from [[json_value]]. + + subroutine json_get_logical_vec(json, me, vec) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: me + logical(LK),dimension(:),allocatable,intent(out) :: vec + + logical(LK) :: initialized + + if ( json%exception_thrown ) return + + ! check for 0-length arrays first: + select case (me%var_type) + case (json_array) + if (json%count(me)==0) then + allocate(vec(0)) + return + end if + end select + + initialized = .false. + + !the callback function is called for each element of the array: + call json%get(me, array_callback=get_logical_from_array) + + if (json%exception_thrown .and. allocated(vec)) deallocate(vec) + + contains + + subroutine get_logical_from_array(json, element, i, count) + + !! callback function for logical + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: element + integer(IK),intent(in) :: i !! index + integer(IK),intent(in) :: count !! size of array + + !size the output array: + if (.not. initialized) then + allocate(vec(count)) + initialized = .true. + end if + + !populate the elements: + call json%get(element, value=vec(i)) + + end subroutine get_logical_from_array + + end subroutine json_get_logical_vec +!***************************************************************************************** + +!***************************************************************************************** +!> +! Get a logical vector from a [[json_value]], given the path. + + subroutine json_get_logical_vec_by_path(json, me, path, vec, found, default) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: me + character(kind=CK,len=*),intent(in) :: path + logical(LK),dimension(:),allocatable,intent(out) :: vec + logical(LK),intent(out),optional :: found + logical(LK),dimension(:),intent(in),optional :: default + + character(kind=CK,len=*),parameter :: routine = CK_'json_get_logical_vec_by_path' + +#include ""json_get_vec_by_path.inc"" + + end subroutine json_get_logical_vec_by_path +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_get_logical_vec_by_path]], where ""path"" is kind=CDK + + subroutine wrap_json_get_logical_vec_by_path(json, me, path, vec, found, default) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: me + character(kind=CDK,len=*),intent(in) :: path + logical(LK),dimension(:),allocatable,intent(out) :: vec + logical(LK),intent(out),optional :: found + logical(LK),dimension(:),intent(in),optional :: default + + call json%get(me,to_unicode(path),vec,found,default) + + end subroutine wrap_json_get_logical_vec_by_path +!***************************************************************************************** + +!***************************************************************************************** +!> +! Get a character string from a [[json_value]]. + + subroutine json_get_string(json, me, value) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: me + character(kind=CK,len=:),allocatable,intent(out) :: value + + value = CK_'' + if (.not. json%exception_thrown) then + + if (me%var_type == json_string) then + + if (allocated(me%str_value)) then + if (json%unescaped_strings) then + ! default: it is stored already unescaped: + value = me%str_value + else + ! return the escaped version: + call escape_string(me%str_value, value, json%escape_solidus) + end if + else + call json%throw_exception('Error in json_get_string: '//& + 'me%str_value not allocated') + end if + + else + + if (json%strict_type_checking) then + if (allocated(me%name)) then + call json%throw_exception('Error in json_get_string:'//& + ' Unable to resolve value to string: '//me%name) + else + call json%throw_exception('Error in json_get_string:'//& + ' Unable to resolve value to string') + end if + else + + select case (me%var_type) + + case (json_integer) + + if (allocated(me%int_value)) then + value = repeat(space, max_integer_str_len) + call integer_to_string(me%int_value,int_fmt,value) + value = trim(value) + else + call json%throw_exception('Error in json_get_string: '//& + 'me%int_value not allocated') + end if + + case (json_real) + + if (allocated(me%dbl_value)) then + value = repeat(space, max_numeric_str_len) + call real_to_string(me%dbl_value,json%real_fmt,& + json%non_normals_to_null,& + json%compact_real,value) + value = trim(value) + else + call json%throw_exception('Error in json_get_string: '//& + 'me%int_value not allocated') + end if + + case (json_logical) + + if (allocated(me%log_value)) then + if (me%log_value) then + value = true_str + else + value = false_str + end if + else + call json%throw_exception('Error in json_get_string: '//& + 'me%log_value not allocated') + end if + + case (json_null) + + value = null_str + + case default + if (allocated(me%name)) then + call json%throw_exception('Error in json_get_string: '//& + 'Unable to resolve value to characters: '//& + me%name) + else + call json%throw_exception('Error in json_get_string: '//& + 'Unable to resolve value to characters') + end if + end select + + end if + end if + + end if + + end subroutine json_get_string +!***************************************************************************************** + +!***************************************************************************************** +!> +! Get a character string from a [[json_value]], given the path. + + subroutine json_get_string_by_path(json, me, path, value, found, default) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: me + character(kind=CK,len=*),intent(in) :: path + character(kind=CK,len=:),allocatable,intent(out) :: value + logical(LK),intent(out),optional :: found + character(kind=CK,len=*),intent(in),optional :: default + + character(kind=CK,len=*),parameter :: default_if_not_specified = CK_'' + character(kind=CK,len=*),parameter :: routine = CK_'json_get_string_by_path' + +#include ""json_get_scalar_by_path.inc"" + + end subroutine json_get_string_by_path +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_get_string_by_path]], where ""path"" is kind=CDK + + subroutine wrap_json_get_string_by_path(json, me, path, value, found, default) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: me + character(kind=CDK,len=*),intent(in) :: path + character(kind=CK,len=:),allocatable,intent(out) :: value + logical(LK),intent(out),optional :: found + character(kind=CK,len=*),intent(in),optional :: default + + call json%get(me,to_unicode(path),value,found,default) + + end subroutine wrap_json_get_string_by_path +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 5/14/2014 +! +! Get a string vector from a [[json_value(type)]]. + + subroutine json_get_string_vec(json, me, vec) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: me + character(kind=CK,len=*),dimension(:),allocatable,intent(out) :: vec + + logical(LK) :: initialized + + if ( json%exception_thrown ) return + + ! check for 0-length arrays first: + select case (me%var_type) + case (json_array) + if (json%count(me)==0) then + allocate(vec(0)) + return + end if + end select + + initialized = .false. + + !the callback function is called for each element of the array: + call json%get(me, array_callback=get_chars_from_array) + + if (json%exception_thrown .and. allocated(vec)) deallocate(vec) + + contains + + subroutine get_chars_from_array(json, element, i, count) + + !! callback function for chars + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: element + integer(IK),intent(in) :: i !! index + integer(IK),intent(in) :: count !! size of array + + character(kind=CK,len=:),allocatable :: cval + + !size the output array: + if (.not. initialized) then + allocate(vec(count)) + initialized = .true. + end if + + !populate the elements: + call json%get(element, value=cval) + if (allocated(cval)) then + vec(i) = cval + deallocate(cval) + else + vec(i) = CK_'' + end if + + end subroutine get_chars_from_array + + end subroutine json_get_string_vec +!***************************************************************************************** + +!***************************************************************************************** +!> +! Get a string vector from a [[json_value(type)]], given the path. + + subroutine json_get_string_vec_by_path(json, me, path, vec, found, default) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: me + character(kind=CK,len=*),intent(in) :: path + character(kind=CK,len=*),dimension(:),allocatable,intent(out) :: vec + logical(LK),intent(out),optional :: found + character(kind=CK,len=*),dimension(:),intent(in),optional :: default + + character(kind=CK,len=*),parameter :: routine = CK_'json_get_string_vec_by_path' + +#include ""json_get_vec_by_path.inc"" + + end subroutine json_get_string_vec_by_path +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_get_string_vec_by_path]], where ""path"" is kind=CDK + + subroutine wrap_json_get_string_vec_by_path(json, me, path, vec, found, default) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: me + character(kind=CDK,len=*),intent(in) :: path + character(kind=CK,len=*),dimension(:),allocatable,intent(out) :: vec + logical(LK),intent(out),optional :: found + character(kind=CK,len=*),dimension(:),intent(in),optional :: default + + call json%get(me,to_unicode(path),vec,found,default) + + end subroutine wrap_json_get_string_vec_by_path +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 12/16/2016 +! +! Get a string vector from a [[json_value(type)]]. This is an alternate +! version of [[json_get_string_vec]]. This one returns an allocatable +! length character (where the string length is the maximum length of +! any element in the array). It also returns an integer array of the +! actual sizes of the strings in the JSON structure. +! +!@note This is somewhat inefficient since it does +! cycle through the array twice. +! +!@warning The allocation of `vec` doesn't work with +! gfortran 4.9 or 5 due to compiler bugs + + subroutine json_get_alloc_string_vec(json, me, vec, ilen) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: me + character(kind=CK,len=:),dimension(:),allocatable,intent(out) :: vec + integer(IK),dimension(:),allocatable,intent(out) :: ilen !! the actual length + !! of each character + !! string in the array + + logical(LK) :: initialized !! if the output array has been sized + integer(IK) :: max_len !! the length of the longest string in the array + + if ( json%exception_thrown ) return + + ! check for 0-length arrays first: + select case (me%var_type) + case (json_array) + if (json%count(me)==0) then + allocate(character(kind=CK,len=0) :: vec(0)) + allocate(ilen(0)) + return + end if + end select + + initialized = .false. + + call json%string_info(me,ilen=ilen,max_str_len=max_len) + if (.not. json%exception_thrown) then + ! now get each string using the callback function: + call json%get(me, array_callback=get_chars_from_array) + end if + + if (json%exception_thrown) then + if (allocated(vec)) deallocate(vec) + if (allocated(ilen)) deallocate(ilen) + end if + + contains + + subroutine get_chars_from_array(json, element, i, count) + + !! callback function for chars + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: element + integer(IK),intent(in) :: i !! index + integer(IK),intent(in) :: count !! size of array + + character(kind=CK,len=:),allocatable :: cval !! for getting string + + !size the output array: + if (.not. initialized) then + ! string length long enough to hold the longest one + ! Note that this doesn't work with gfortran 4.9 or 5. + allocate( character(kind=CK,len=max_len) :: vec(count) ) + initialized = .true. + end if + + !populate the elements: + call json%get(element, value=cval) + if (allocated(cval)) then + vec(i) = cval + ilen(i) = len(cval) ! return the actual length + deallocate(cval) + else + vec(i) = CK_'' + ilen(i) = 0 + end if + + end subroutine get_chars_from_array + + end subroutine json_get_alloc_string_vec +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_get_alloc_string_vec]] where input is the path. +! +! This is an alternate version of [[json_get_string_vec_by_path]]. +! This one returns an allocatable length character (where the string +! length is the maximum length of any element in the array). It also +! returns an integer array of the actual sizes of the strings in the +! JSON structure. +! +!@note An alternative to using this routine is to use [[json_get_array]] with +! a callback function that gets the string from each element and populates +! a user-defined string type. +! +!@note If the `default` argument is used, and `default_ilen` is not present, +! then `ilen` will just be returned as the length of the `default` dummy +! argument (all elements with the same length). + + subroutine json_get_alloc_string_vec_by_path(json,me,path,vec,ilen,found,default,default_ilen) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: me + character(kind=CK,len=*),intent(in) :: path + character(kind=CK,len=:),dimension(:),allocatable,intent(out) :: vec + integer(IK),dimension(:),allocatable,intent(out) :: ilen !! the actual length + !! of each character + !! string in the array + logical(LK),intent(out),optional :: found + character(kind=CK,len=*),dimension(:),intent(in),optional :: default + integer(IK),dimension(:),intent(in),optional :: default_ilen !! the actual + !! length of `default` + + character(kind=CK,len=*),parameter :: routine = CK_'json_get_alloc_string_vec_by_path' + +#include ""json_get_vec_by_path_alloc.inc"" + + end subroutine json_get_alloc_string_vec_by_path +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_get_alloc_string_vec_by_path]], where ""path"" is kind=CDK + + subroutine wrap_json_get_alloc_string_vec_by_path(json,me,path,vec,ilen,found,default,default_ilen) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: me + character(kind=CDK,len=*),intent(in) :: path + character(kind=CK,len=:),dimension(:),allocatable,intent(out) :: vec + integer(IK),dimension(:),allocatable,intent(out) :: ilen !! the actual length + !! of each character + !! string in the array + logical(LK),intent(out),optional :: found + character(kind=CK,len=*),dimension(:),intent(in),optional :: default + integer(IK),dimension(:),intent(in),optional :: default_ilen !! the actual + !! length of `default` + + call json%get(me,to_unicode(path),vec,ilen,found,default,default_ilen) + + end subroutine wrap_json_get_alloc_string_vec_by_path +!***************************************************************************************** + +!***************************************************************************************** +!> +! This routine calls the user-supplied [[json_array_callback_func]] +! subroutine for each element in the array. +! +!@note For integer, real, logical, and character arrays, +! higher-level routines are provided (see `get` methods), so +! this routine does not have to be used for those cases. + + recursive subroutine json_get_array(json, me, array_callback) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: me + procedure(json_array_callback_func) :: array_callback + + type(json_value),pointer :: element !! temp variable for getting elements + integer(IK) :: i !! counter + integer(IK) :: count !! number of elements in the array + + if ( json%exception_thrown ) return + + select case (me%var_type) + case (json_array) + count = json%count(me) + element => me%children + do i = 1, count ! callback for each child + if (.not. associated(element)) then + call json%throw_exception('Error in json_get_array: '//& + 'Malformed JSON linked list') + return + end if + call array_callback(json, element, i, count) + if (json%exception_thrown) exit + element => element%next + end do + case default + call json%throw_exception('Error in json_get_array:'//& + ' Resolved value is not an array ') + end select + + end subroutine json_get_array +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 4/28/2016 +! +! Traverse a JSON structure. +! This routine calls the user-specified [[json_traverse_callback_func]] +! for each element of the structure. + + subroutine json_traverse(json,p,traverse_callback) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: p + procedure(json_traverse_callback_func) :: traverse_callback + + logical(LK) :: finished !! can be used to stop the process + + if (.not. json%exception_thrown) call traverse(p) + + contains + + recursive subroutine traverse(p) + + !! recursive [[json_value]] traversal. + + implicit none + + type(json_value),pointer,intent(in) :: p + + type(json_value),pointer :: element !! a child element + integer(IK) :: i !! counter + integer(IK) :: icount !! number of children + + if (json%exception_thrown) return + call traverse_callback(json,p,finished) ! first call for this object + if (finished) return + + !for arrays and objects, have to also call for all children: + if (p%var_type==json_array .or. p%var_type==json_object) then + + icount = json%count(p) ! number of children + if (icount>0) then + element => p%children ! first one + do i = 1, icount ! call for each child + if (.not. associated(element)) then + call json%throw_exception('Error in json_traverse: '//& + 'Malformed JSON linked list') + return + end if + call traverse(element) + if (finished .or. json%exception_thrown) exit + element => element%next + end do + end if + nullify(element) + + end if + + end subroutine traverse + + end subroutine json_traverse +!***************************************************************************************** + +!***************************************************************************************** +!> +! This routine calls the user-supplied array_callback subroutine +! for each element in the array (specified by the path). + + recursive subroutine json_get_array_by_path(json, me, path, array_callback, found) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: me + character(kind=CK,len=*),intent(in) :: path + procedure(json_array_callback_func) :: array_callback + logical(LK),intent(out),optional :: found + + type(json_value),pointer :: p + + if ( json%exception_thrown ) then + if ( present(found) ) found = .false. + return + end if + + nullify(p) + + ! resolve the path to the value + call json%get(me=me, path=path, p=p) + + if (.not. associated(p)) then + call json%throw_exception('Error in json_get_array:'//& + ' Unable to resolve path: '//trim(path),found) + else + call json%get(me=p,array_callback=array_callback) + nullify(p) + end if + if ( json%exception_thrown ) then + if ( present(found) ) then + found = .false. + call json%clear_exceptions() + end if + else + if ( present(found) ) found = .true. + end if + + end subroutine json_get_array_by_path +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_get_array_by_path]], where ""path"" is kind=CDK + + recursive subroutine wrap_json_get_array_by_path(json, me, path, array_callback, found) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer,intent(in) :: me + character(kind=CDK,len=*),intent(in) :: path + procedure(json_array_callback_func) :: array_callback + logical(LK),intent(out),optional :: found + + call json%get(me, to_unicode(path), array_callback, found) + + end subroutine wrap_json_get_array_by_path +!***************************************************************************************** + +!***************************************************************************************** +!> +! Internal routine to be called before parsing JSON. +! Currently, all this does it allocate the `comment_char` if none was specified. + + subroutine json_prepare_parser(json) + + implicit none + + class(json_core),intent(inout) :: json + + if (json%allow_comments .and. .not. allocated(json%comment_char)) then + ! comments are enabled, but user hasn't set the comment char, + ! so in this case use the default: + json%comment_char = CK_'/!#' + end if + + end subroutine json_prepare_parser +!***************************************************************************************** + +!***************************************************************************************** +!> +! Parse the JSON file and populate the [[json_value]] tree. +! +!### Inputs +! +! The inputs can be: +! +! * `file` & `unit` : the specified unit is used to read JSON from file. +! [note if unit is already open, then the filename is ignored] +! * `file` : JSON is read from file using internal unit number +! +!### Example +! +!````fortran +! type(json_core) :: json +! type(json_value),pointer :: p +! call json%load(file='myfile.json', p=p) +!```` +! +!### History +! * Jacob Williams : 01/13/2015 : added read from string option. +! * Izaak Beekman : 03/08/2015 : moved read from string to separate +! subroutine, and error annotation to separate subroutine. +! +!@note When calling this routine, any exceptions thrown from previous +! calls will automatically be cleared. + + subroutine json_parse_file(json, file, p, unit) + + implicit none + + class(json_core),intent(inout) :: json + character(kind=CDK,len=*),intent(in) :: file !! JSON file name + type(json_value),pointer :: p !! output structure + integer(IK),intent(in),optional :: unit !! file unit number (/= 0) + + integer(IK) :: iunit !! file unit actually used + integer(IK) :: istat !! iostat flag + logical(LK) :: is_open !! if the file is already open + logical(LK) :: has_duplicate !! if checking for duplicate keys + character(kind=CK,len=:),allocatable :: path !! path to any duplicate key + + ! clear any exceptions and initialize: + call json%initialize() + call json%prepare_parser() + + if ( present(unit) ) then + + if (unit==0) then + call json%throw_exception('Error in json_parse_file: unit number must not be 0.') + return + end if + + iunit = unit + + ! check to see if the file is already open + ! if it is, then use it, otherwise open the file with the name given. + inquire(unit=iunit, opened=is_open, iostat=istat) + if (istat==0 .and. .not. is_open) then + ! open the file + open ( unit = iunit, & + file = file, & + status = 'OLD', & + action = 'READ', & + form = form_spec, & + access = access_spec, & + iostat = istat & + FILE_ENCODING ) + else + ! if the file is already open, then we need to make sure + ! that it is open with the correct form/access/etc... + end if + + else + + ! open the file with a new unit number: + open ( newunit = iunit, & + file = file, & + status = 'OLD', & + action = 'READ', & + form = form_spec, & + access = access_spec, & + iostat = istat & + FILE_ENCODING ) + + end if + + if (istat==0) then + + if (use_unformatted_stream) then + ! save the file size to be read: + inquire(unit=iunit, size=json%filesize, iostat=istat) + end if + + ! create the value and associate the pointer + call json_value_create(p) + + ! Note: the name of the root json_value doesn't really matter, + ! but we'll allocate something here just in case. + p%name = trim(file) !use the file name + + ! parse as a value + call json%parse_value(unit=iunit, str=CK_'', value=p) + call json%parse_end(unit=iunit, str=CK_'') + + ! check for errors: + if (json%exception_thrown) then + call json%annotate_invalid_json(iunit,CK_'') + else + if (.not. json%allow_duplicate_keys) then + call json%check_for_duplicate_keys(p,has_duplicate,path=path) + if (.not. json%exception_thrown) then + if (has_duplicate) then + call json%throw_exception('Error in json_parse_file: '//& + 'Duplicate key found: '//path) + end if + end if + end if + end if + + ! close the file: + close(unit=iunit, iostat=istat) + + else + + call json%throw_exception('Error in json_parse_file: Error opening file: '//trim(file)) + nullify(p) + + end if + + end subroutine json_parse_file +!***************************************************************************************** + +!***************************************************************************************** +!> +! Parse the JSON string and populate the [[json_value]] tree. +! +!### See also +! * [[json_parse_file]] + + subroutine json_parse_string(json, p, str) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p !! output structure + character(kind=CK,len=*),intent(in) :: str !! string with JSON data + + integer(IK),parameter :: iunit = 0 !! indicates that json data will be read from buffer + + logical(LK) :: has_duplicate !! if checking for duplicate keys + character(kind=CK,len=:),allocatable :: path !! path to any duplicate key + + ! clear any exceptions and initialize: + call json%initialize() + call json%prepare_parser() + + ! create the value and associate the pointer + call json_value_create(p) + + ! Note: the name of the root json_value doesn't really matter, + ! but we'll allocate something here just in case. + p%name = CK_'' + + ! parse as a value + call json%parse_value(unit=iunit, str=str, value=p) + call json%parse_end(unit=iunit, str=str) + + if (json%exception_thrown) then + call json%annotate_invalid_json(iunit,str) + else + if (.not. json%allow_duplicate_keys) then + call json%check_for_duplicate_keys(p,has_duplicate,path=path) + if (.not. json%exception_thrown) then + if (has_duplicate) then + call json%throw_exception('Error in json_parse_string: '//& + 'Duplicate key found: '//path) + end if + end if + end if + end if + + end subroutine json_parse_string +!***************************************************************************************** + +!***************************************************************************************** +!> +! An error checking routine to call after a file (or string) has been parsed. +! It will throw an exception if there are any other non-whitespace characters +! in the file. + + subroutine json_parse_end(json, unit, str) + + implicit none + + class(json_core),intent(inout) :: json + integer(IK),intent(in) :: unit !! file unit number + character(kind=CK,len=*),intent(in) :: str !! string containing JSON + !! data (only used if `unit=0`) + + logical(LK) :: eof !! end-of-file flag + character(kind=CK,len=1) :: c !! character read from file + !! (or string) by [[pop_char]] + + ! first check for exceptions: + if (json%exception_thrown) return + + ! pop the next non whitespace character off the file + call json%pop_char(unit, str=str, eof=eof, skip_ws=.true., & + skip_comments=json%allow_comments, popped=c) + + if (.not. eof) then + call json%throw_exception('Error in json_parse_end:'//& + ' Unexpected character found after parsing value. ""'//& + c//'""') + end if + + end subroutine json_parse_end +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_parse_string]], where `str` is kind=CDK. + + subroutine wrap_json_parse_string(json, p, str) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p !! output structure + character(kind=CDK,len=*),intent(in) :: str !! string with JSON data + + call json%deserialize(p,to_unicode(str)) + + end subroutine wrap_json_parse_string +!***************************************************************************************** + +!***************************************************************************************** +!> +! Generate a warning message if there was an error parsing a JSON +! file or string. + + subroutine annotate_invalid_json(json,iunit,str) + + implicit none + + class(json_core),intent(inout) :: json + integer(IK),intent(in) :: iunit !! file unit number + character(kind=CK,len=*),intent(in) :: str !! string with JSON data + + character(kind=CK,len=:),allocatable :: line !! line containing the error + character(kind=CK,len=:),allocatable :: arrow_str !! arrow string that points + !! to the current character + character(kind=CK,len=max_integer_str_len) :: line_str !! current line number string + character(kind=CK,len=max_integer_str_len) :: char_str !! current character count string + integer(IK) :: i !! line number counter + integer(IK) :: i_nl_prev !! index of previous newline character + integer(IK) :: i_nl !! index of current newline character + + ! If there was an error reading the file, then + ! print the line where the error occurred: + if (json%exception_thrown) then + + !the counters for the current line and the last character read: + call integer_to_string(json%line_count, int_fmt, line_str) + call integer_to_string(json%char_count, int_fmt, char_str) + + !draw the arrow string that points to the current character: + arrow_str = repeat('-',max( 0_IK, json%char_count - 1_IK) )//'^' + + if (json%line_count>0 .and. json%char_count>0) then + + if (iunit/=0) then + + if (use_unformatted_stream) then + call json%get_current_line_from_file_stream(iunit,line) + else + call json%get_current_line_from_file_sequential(iunit,line) + end if + + else + + !get the current line from the string: + ! [this is done by counting the newline characters] + i_nl_prev = 0 !index of previous newline character + i_nl = 2 !just in case line_count = 0 + do i=1,json%line_count + i_nl = index(str(i_nl_prev+1:),newline) + if (i_nl==0) then !last line - no newline character + i_nl = len(str)+1 + exit + end if + i_nl = i_nl + i_nl_prev !index of current newline character + i_nl_prev = i_nl !update for next iteration + end do + line = str(i_nl_prev+1 : i_nl-1) !extract current line + + end if + + else + !in this case, it was an empty line or file + line = CK_'' + end if + + ! add a newline for the error display if necessary: + line = trim(line) + if (len(line)>0) then + i = len(line) + if (line(i:i)/=newline) line = line//newline + else + line = line//newline + end if + + !create the error message: + if (allocated(json%err_message)) then + json%err_message = json%err_message//newline + else + json%err_message = '' + end if + json%err_message = json%err_message//& + 'line: '//trim(adjustl(line_str))//', '//& + 'character: '//trim(adjustl(char_str))//newline//& + line//arrow_str + + if (allocated(line)) deallocate(line) + + end if + + end subroutine annotate_invalid_json +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! +! Rewind the file to the beginning of the current line, and return this line. +! The file is assumed to be opened. +! This is the SEQUENTIAL version (see also [[get_current_line_from_file_stream]]). + + subroutine get_current_line_from_file_sequential(iunit,line) + + implicit none + + integer(IK),intent(in) :: iunit !! file unit number + character(kind=CK,len=:),allocatable,intent(out) :: line !! current line + + character(kind=CK,len=seq_chunk_size) :: chunk !! for reading line in chunks + integer(IK) :: istat !! iostat flag + integer(IK) :: isize !! number of characters read in read statement + + !initialize: + line = CK_'' + + !rewind to beginning of the current record: + backspace(iunit, iostat=istat) + + !loop to read in all the characters in the current record. + ![the line is read in chunks until the end of the line is reached] + if (istat==0) then + do + isize = 0 + read(iunit,fmt='(A)',advance='NO',size=isize,iostat=istat) chunk + if (istat==0) then + line = line//chunk + else + if (isize>0 .and. isize<=seq_chunk_size) line = line//chunk(1:isize) + exit + end if + end do + end if + + end subroutine get_current_line_from_file_sequential +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! +! Rewind the file to the beginning of the current line, and return this line. +! The file is assumed to be opened. +! This is the STREAM version (see also [[get_current_line_from_file_sequential]]). + + subroutine get_current_line_from_file_stream(json,iunit,line) + + implicit none + + class(json_core),intent(inout) :: json + integer(IK),intent(in) :: iunit !! file unit number + character(kind=CK,len=:),allocatable,intent(out) :: line !! current line + + integer(IK) :: istart !! start position of current line + integer(IK) :: iend !! end position of current line + integer(IK) :: ios !! file read `iostat` code + character(kind=CK,len=1) :: c !! a character read from the file + logical :: done !! flag to exit the loop + + istart = json%ipos + do + if (istart<=1) then + istart = 1 + exit + end if + read(iunit,pos=istart,iostat=ios) c + done = ios /= 0_IK + if (.not. done) done = c==newline + if (done) then + if (istart/=1) istart = istart - 1 + exit + end if + istart = istart-1 !rewind until the beginning of the line + end do + iend = json%ipos + do + read(iunit,pos=iend,iostat=ios) c + if (IS_IOSTAT_END(ios)) then + ! account for end of file without linebreak + iend=iend-1 + exit + end if + if (c==newline .or. ios/=0) exit + iend=iend+1 + end do + allocate( character(kind=CK,len=iend-istart+1) :: line ) + read(iunit,pos=istart,iostat=ios) line + + end subroutine get_current_line_from_file_stream +!***************************************************************************************** + +!***************************************************************************************** +!> +! Core parsing routine. + + recursive subroutine parse_value(json, unit, str, value) + + implicit none + + class(json_core),intent(inout) :: json + integer(IK),intent(in) :: unit !! file unit number + character(kind=CK,len=*),intent(in) :: str !! string containing JSON + !! data (only used if `unit=0`) + type(json_value),pointer :: value !! JSON data that is extracted + + logical(LK) :: eof !! end-of-file flag + character(kind=CK,len=1) :: c !! character read from file + !! (or string) by [[pop_char]] +#if defined __GFORTRAN__ + character(kind=CK,len=:),allocatable :: tmp !! this is a work-around for a bug + !! in the gfortran 4.9 compiler. +#endif + + if (.not. json%exception_thrown) then + + !the routine is being called incorrectly. + if (.not. associated(value)) then + call json%throw_exception('Error in parse_value: value pointer not associated.') + return + end if + + ! pop the next non whitespace character off the file + call json%pop_char(unit, str=str, eof=eof, skip_ws=.true., & + skip_comments=json%allow_comments, popped=c) + + if (eof) then + return + else + + select case (c) + + case (start_object) + + ! start object + call json%to_object(value) !allocate class + call json%parse_object(unit, str, value) + + case (start_array) + + ! start array + call json%to_array(value) !allocate class + call json%parse_array(unit, str, value) + + case (end_array) + + ! end an empty array + call json%push_char(c) + if (associated(value)) then + deallocate(value) + nullify(value) + end if + + case (quotation_mark) + + ! string + call json%to_string(value) !allocate class + + select case (value%var_type) + case (json_string) +#if defined __GFORTRAN__ + ! write to a tmp variable because of + ! a bug in 4.9 gfortran compiler. + call json%parse_string(unit,str,tmp) + value%str_value = tmp + if (allocated(tmp)) deallocate(tmp) +#else + call json%parse_string(unit,str,value%str_value) +#endif + end select + + case (CK_'t') !true_str(1:1) gfortran bug work around + + !true + call json%parse_for_chars(unit, str, true_str(2:)) + !allocate class and set value: + if (.not. json%exception_thrown) call json%to_logical(value,.true.) + + case (CK_'f') !false_str(1:1) gfortran bug work around + + !false + call json%parse_for_chars(unit, str, false_str(2:)) + !allocate class and set value: + if (.not. json%exception_thrown) call json%to_logical(value,.false.) + + case (CK_'n') !null_str(1:1) gfortran bug work around + + !null + call json%parse_for_chars(unit, str, null_str(2:)) + if (.not. json%exception_thrown) call json%to_null(value) ! allocate class + + case(CK_'-', CK_'0': CK_'9', CK_'.', CK_'+') + + call json%push_char(c) + call json%parse_number(unit, str, value) + + case default + + call json%throw_exception('Error in parse_value:'//& + ' Unexpected character while parsing value. ""'//& + c//'""') + + end select + end if + + end if + + end subroutine parse_value +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! +! Allocate a [[json_value]] pointer and make it a logical(LK) variable. +! The pointer should not already be allocated. +! +!### Example +!````fortran +! type(json_value),pointer :: p +! type(json_core) :: json +! call json%create_logical(p,'value',.true.) +!```` + + subroutine json_value_create_logical(json,p,val,name) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + logical(LK),intent(in) :: val !! variable value + character(kind=CK,len=*),intent(in) :: name !! variable name + + call json_value_create(p) + call json%to_logical(p,val,name) + + end subroutine json_value_create_logical +!***************************************************************************************** + +!***************************************************************************************** +!> author: Izaak Beekman +! +! Wrapper for [[json_value_create_logical]] so `create_logical` method can +! be called with name of character kind 'DEFAULT' or 'ISO_10646' + + subroutine wrap_json_value_create_logical(json,p,val,name) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + logical(LK),intent(in) :: val + character(kind=CDK,len=*),intent(in) :: name + + call json%create_logical(p,val,to_unicode(name)) + + end subroutine wrap_json_value_create_logical +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! +! Allocate a [[json_value]] pointer and make it an integer(IK) variable. +! The pointer should not already be allocated. +! +!### Example +!````fortran +! type(json_value),pointer :: p +! type(json_core) :: json +! call json%create_integer(p,'value',1) +!```` + + subroutine json_value_create_integer(json,p,val,name) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + integer(IK),intent(in) :: val + character(kind=CK,len=*),intent(in) :: name + + call json_value_create(p) + call json%to_integer(p,val,name) + + end subroutine json_value_create_integer +!***************************************************************************************** + +!***************************************************************************************** +!> author: Izaak Beekman +! +! A wrapper procedure for [[json_value_create_integer]] so that `create_integer` +! method may be called with either a 'DEFAULT' or 'ISO_10646' character kind +! `name` actual argument. + + subroutine wrap_json_value_create_integer(json,p,val,name) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + integer(IK),intent(in) :: val + character(kind=CDK,len=*),intent(in) :: name + + call json%create_integer(p,val,to_unicode(name)) + + end subroutine wrap_json_value_create_integer +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! +! Allocate a [[json_value]] pointer and make it a real(RK) variable. +! The pointer should not already be allocated. +! +!### Example +!````fortran +! type(json_value),pointer :: p +! type(json_core) :: json +! call json%create_real(p,'value',1.0_RK) +!```` + + subroutine json_value_create_real(json,p,val,name) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + real(RK),intent(in) :: val + character(kind=CK,len=*),intent(in) :: name + + call json_value_create(p) + call json%to_real(p,val,name) + + end subroutine json_value_create_real +!***************************************************************************************** + +!***************************************************************************************** +!> author: Izaak Beekman +! +! A wrapper for [[json_value_create_real]] so that `create_real` method +! may be called with an actual argument corresponding to the dummy argument, +! `name` that may be of 'DEFAULT' or 'ISO_10646' character kind. + + subroutine wrap_json_value_create_real(json,p,val,name) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + real(RK),intent(in) :: val + character(kind=CDK,len=*),intent(in) :: name + + call json%create_real(p,val,to_unicode(name)) + + end subroutine wrap_json_value_create_real +!***************************************************************************************** + +#ifndef REAL32 +!***************************************************************************************** +!> +! Alternate version of [[json_value_create_real]] where val=real32. +! +!@note The value is converted into a `real(RK)` variable internally. + + subroutine json_value_create_real32(json,p,val,name) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + real(real32),intent(in) :: val + character(kind=CK,len=*),intent(in) :: name + + call json%create_real(p,real(val,RK),name) + + end subroutine json_value_create_real32 +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_value_create_real32]] where ""name"" is kind(CDK). + + subroutine wrap_json_value_create_real32(json,p,val,name) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + real(real32),intent(in) :: val + character(kind=CDK,len=*),intent(in) :: name + + call json%create_real(p,val,to_unicode(name)) + + end subroutine wrap_json_value_create_real32 +!***************************************************************************************** +#endif + +#ifdef REAL128 +!***************************************************************************************** +!> +! Alternate version of [[json_value_create_real]] where val=real64. +! +!@note The value is converted into a `real(RK)` variable internally. + + subroutine json_value_create_real64(json,p,val,name) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + real(real64),intent(in) :: val + character(kind=CK,len=*),intent(in) :: name + + call json%create_real(p,real(val,RK),name) + + end subroutine json_value_create_real64 +!***************************************************************************************** + +!***************************************************************************************** +!> +! Alternate version of [[json_value_create_real64]] where ""name"" is kind(CDK). + + subroutine wrap_json_value_create_real64(json,p,val,name) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + real(real64),intent(in) :: val + character(kind=CDK,len=*),intent(in) :: name + + call json%create_real(p,val,to_unicode(name)) + + end subroutine wrap_json_value_create_real64 +!***************************************************************************************** +#endif + +!***************************************************************************************** +!> author: Jacob Williams +! +! Allocate a json_value pointer and make it a string variable. +! The pointer should not already be allocated. +! +!### Example +!````fortran +! type(json_value),pointer :: p +! type(json_core) :: json +! call json%create_string(p,'value','hello') +!```` + + subroutine json_value_create_string(json,p,val,name,trim_str,adjustl_str) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + character(kind=CK,len=*),intent(in) :: val + character(kind=CK,len=*),intent(in) :: name + logical(LK),intent(in),optional :: trim_str !! if TRIM() should be called for the `val` + logical(LK),intent(in),optional :: adjustl_str !! if ADJUSTL() should be called for the `val` + + call json_value_create(p) + call json%to_string(p,val,name,trim_str,adjustl_str) + + end subroutine json_value_create_string +!***************************************************************************************** + +!***************************************************************************************** +!> author: Izaak Beekman +! +! Wrap [[json_value_create_string]] so that `create_string` method may be called +! with actual character string arguments for `name` and `val` that are BOTH of +! 'DEFAULT' or 'ISO_10646' character kind. + + subroutine wrap_json_value_create_string(json,p,val,name,trim_str,adjustl_str) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + character(kind=CDK,len=*),intent(in) :: val + character(kind=CDK,len=*),intent(in) :: name + logical(LK),intent(in),optional :: trim_str !! if TRIM() should be called for the `val` + logical(LK),intent(in),optional :: adjustl_str !! if ADJUSTL() should be called for the `val` + + call json%create_string(p,to_unicode(val),to_unicode(name),trim_str,adjustl_str) + + end subroutine wrap_json_value_create_string +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! +! Allocate a json_value pointer and make it a null variable. +! The pointer should not already be allocated. +! +!### Example +!````fortran +! type(json_value),pointer :: p +! type(json_core) :: json +! call json%create_null(p,'value') +!```` + + subroutine json_value_create_null(json,p,name) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + character(kind=CK,len=*),intent(in) :: name + + call json_value_create(p) + call json%to_null(p,name) + + end subroutine json_value_create_null +!***************************************************************************************** + +!***************************************************************************************** +!> author: Izaak Beekman +! +! Wrap [[json_value_create_null]] so that `create_null` method may be called with +! an actual argument corresponding to the dummy argument `name` that is either +! of 'DEFAULT' or 'ISO_10646' character kind. + + subroutine wrap_json_value_create_null(json,p,name) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + character(kind=CDK,len=*),intent(in) :: name + + call json%create_null(p,to_unicode(name)) + + end subroutine wrap_json_value_create_null +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! +! Allocate a [[json_value]] pointer and make it an object variable. +! The pointer should not already be allocated. +! +!### Example +!````fortran +! type(json_value),pointer :: p +! type(json_core) :: json +! call json%create_object(p,'objectname') +!```` +! +!@note The name is not significant for the root structure or an array element. +! In those cases, an empty string can be used. + + subroutine json_value_create_object(json,p,name) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + character(kind=CK,len=*),intent(in) :: name + + call json_value_create(p) + call json%to_object(p,name) + + end subroutine json_value_create_object +!***************************************************************************************** + +!***************************************************************************************** +!> author: Izaak Beekman +! +! Wrap [[json_value_create_object]] so that `create_object` method may be called +! with an actual argument corresponding to the dummy argument `name` that is of +! either 'DEFAULT' or 'ISO_10646' character kind. + + subroutine wrap_json_value_create_object(json,p,name) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + character(kind=CDK,len=*),intent(in) :: name + + call json%create_object(p,to_unicode(name)) + + end subroutine wrap_json_value_create_object +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! +! Allocate a [[json_value]] pointer and make it an array variable. +! The pointer should not already be allocated. +! +!### Example +!````fortran +! type(json_value),pointer :: p +! type(json_core) :: json +! call json%create_array(p,'arrayname') +!```` + + subroutine json_value_create_array(json,p,name) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + character(kind=CK,len=*),intent(in) :: name + + call json_value_create(p) + call json%to_array(p,name) + + end subroutine json_value_create_array +!***************************************************************************************** + +!***************************************************************************************** +!> author: Izaak Beekman +! +! A wrapper for [[json_value_create_array]] so that `create_array` method may be +! called with an actual argument, corresponding to the dummy argument `name`, +! that is either of 'DEFAULT' or 'ISO_10646' character kind. + + subroutine wrap_json_value_create_array(json,p,name) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + character(kind=CDK,len=*),intent(in) :: name + + call json%create_array(p,to_unicode(name)) + + end subroutine wrap_json_value_create_array +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! +! Change the [[json_value]] variable to a logical. + + subroutine to_logical(json,p,val,name) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + logical(LK),intent(in),optional :: val !! if the value is also to be set + !! (if not present, then .false. is used). + character(kind=CK,len=*),intent(in),optional :: name !! if the name is also to be changed. + + !set type and value: + call destroy_json_data(p) + p%var_type = json_logical + allocate(p%log_value) + if (present(val)) then + p%log_value = val + else + p%log_value = .false. !default value + end if + + !name: + if (present(name)) call json%rename(p,name) + + end subroutine to_logical +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! +! Change the [[json_value]] variable to an integer. + + subroutine to_integer(json,p,val,name) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + integer(IK),intent(in),optional :: val !! if the value is also to be set + !! (if not present, then 0 is used). + character(kind=CK,len=*),intent(in),optional :: name !! if the name is also to be changed. + + !set type and value: + call destroy_json_data(p) + p%var_type = json_integer + allocate(p%int_value) + if (present(val)) then + p%int_value = val + else + p%int_value = 0_IK !default value + end if + + !name: + if (present(name)) call json%rename(p,name) + + end subroutine to_integer +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! +! Change the [[json_value]] variable to a real. + + subroutine to_real(json,p,val,name) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + real(RK),intent(in),optional :: val !! if the value is also to be set + !! (if not present, then 0.0_rk is used). + character(kind=CK,len=*),intent(in),optional :: name !! if the name is also to be changed. + + !set type and value: + call destroy_json_data(p) + p%var_type = json_real + allocate(p%dbl_value) + if (present(val)) then + p%dbl_value = val + else + p%dbl_value = 0.0_RK !default value + end if + + !name: + if (present(name)) call json%rename(p,name) + + end subroutine to_real +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! +! Change the [[json_value]] variable to a string. +! +!### Modified +! * Izaak Beekman : 02/24/2015 + + subroutine to_string(json,p,val,name,trim_str,adjustl_str) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + character(kind=CK,len=*),intent(in),optional :: val !! if the value is also to be set + !! (if not present, then '' is used). + character(kind=CK,len=*),intent(in),optional :: name !! if the name is also to be changed. + logical(LK),intent(in),optional :: trim_str !! if TRIM() should be called for the `val` + !! (only used if `val` is present) + logical(LK),intent(in),optional :: adjustl_str !! if ADJUSTL() should be called for the `val` + !! (only used if `val` is present) + !! (note that ADJUSTL is done before TRIM) + + character(kind=CK,len=:),allocatable :: str !! temp string for `trim()` and/or `adjustl()` + logical :: trim_string !! if the string is to be trimmed + logical :: adjustl_string !! if the string is to be adjusted left + + !set type and value: + call destroy_json_data(p) + p%var_type = json_string + if (present(val)) then + + if (present(trim_str)) then + trim_string = trim_str + else + trim_string = .false. + end if + if (present(adjustl_str)) then + adjustl_string = adjustl_str + else + adjustl_string = .false. + end if + + if (trim_string .or. adjustl_string) then + str = val + if (adjustl_string) str = adjustl(str) + if (trim_string) str = trim(str) + p%str_value = str + else + p%str_value = val + end if + + else + p%str_value = CK_'' ! default value + end if + + !name: + if (present(name)) call json%rename(p,name) + + end subroutine to_string +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! +! Change the [[json_value]] variable to a null. + + subroutine to_null(json,p,name) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + character(kind=CK,len=*),intent(in),optional :: name !! if the name is also to be changed. + + !set type and value: + call destroy_json_data(p) + p%var_type = json_null + + !name: + if (present(name)) call json%rename(p,name) + + end subroutine to_null +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! +! Change the [[json_value]] variable to an object. + + subroutine to_object(json,p,name) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + character(kind=CK,len=*),intent(in),optional :: name !! if the name is also to be changed. + + !set type and value: + call destroy_json_data(p) + p%var_type = json_object + + !name: + if (present(name)) call json%rename(p,name) + + end subroutine to_object +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! +! Change the [[json_value]] variable to an array. + + subroutine to_array(json,p,name) + + implicit none + + class(json_core),intent(inout) :: json + type(json_value),pointer :: p + character(kind=CK,len=*),intent(in),optional :: name !! if the name is also to be changed. + + !set type and value: + call destroy_json_data(p) + p%var_type = json_array + + !name: + if (present(name)) call json%rename(p,name) + + end subroutine to_array +!***************************************************************************************** + +!***************************************************************************************** +!> +! Core parsing routine. + + recursive subroutine parse_object(json, unit, str, parent) + + implicit none + + class(json_core),intent(inout) :: json + integer(IK),intent(in) :: unit !! file unit number (if parsing from a file) + character(kind=CK,len=*),intent(in) :: str !! JSON string (if parsing from a string) + type(json_value),pointer :: parent !! the parsed object will be added as a child of this + + type(json_value),pointer :: pair !! temp variable + logical(LK) :: eof !! end of file flag + character(kind=CK,len=1) :: c !! character returned by [[pop_char]] +#if defined __GFORTRAN__ + character(kind=CK,len=:),allocatable :: tmp !! this is a work-around for a bug + !! in the gfortran 4.9 compiler. +#endif + + if (.not. json%exception_thrown) then + + !the routine is being called incorrectly. + if (.not. associated(parent)) then + call json%throw_exception('Error in parse_object: parent pointer not associated.') + end if + + nullify(pair) !probably not necessary + + ! pair name + call json%pop_char(unit, str=str, eof=eof, skip_ws=.true., & + skip_comments=json%allow_comments, popped=c) + if (eof) then + call json%throw_exception('Error in parse_object:'//& + ' Unexpected end of file while parsing start of object.') + return + else if (end_object == c) then + ! end of an empty object + return + else if (quotation_mark == c) then + call json_value_create(pair) +#if defined __GFORTRAN__ + call json%parse_string(unit,str,tmp) ! write to a tmp variable because of + pair%name = tmp ! a bug in 4.9 gfortran compiler. + deallocate(tmp) +#else + call json%parse_string(unit,str,pair%name) +#endif + if (json%exception_thrown) then + call json%destroy(pair) + return + end if + else + call json%throw_exception('Error in parse_object: Expecting string: ""'//c//'""') + return + end if + + ! pair value + call json%pop_char(unit, str=str, eof=eof, skip_ws=.true., & + skip_comments=json%allow_comments, popped=c) + if (eof) then + call json%destroy(pair) + call json%throw_exception('Error in parse_object:'//& + ' Unexpected end of file while parsing object member.') + return + else if (colon_char == c) then + ! parse the value + call json%parse_value(unit, str, pair) + if (json%exception_thrown) then + call json%destroy(pair) + return + else + call json%add(parent, pair) + end if + else + call json%destroy(pair) + call json%throw_exception('Error in parse_object:'//& + ' Expecting : and then a value: '//c) + return + end if + + ! another possible pair + call json%pop_char(unit, str=str, eof=eof, skip_ws=.true., & + skip_comments=json%allow_comments, popped=c) + if (eof) then + call json%throw_exception('Error in parse_object: '//& + 'End of file encountered when parsing an object') + return + else if (delimiter == c) then + ! read the next member + call json%parse_object(unit = unit, str=str, parent = parent) + else if (end_object == c) then + ! end of object + return + else + call json%throw_exception('Error in parse_object: Expecting end of object: '//c) + return + end if + + end if + + end subroutine parse_object +!***************************************************************************************** + +!***************************************************************************************** +!> +! Core parsing routine. + + recursive subroutine parse_array(json, unit, str, array) + + implicit none + + class(json_core),intent(inout) :: json + integer(IK),intent(in) :: unit !! file unit number (if parsing from a file) + character(kind=CK,len=*),intent(in) :: str !! JSON string (if parsing from a string) + type(json_value),pointer :: array + + type(json_value),pointer :: element !! temp variable for array element + logical(LK) :: eof !! end of file flag + character(kind=CK,len=1) :: c !! character returned by [[pop_char]] + + do + + if (json%exception_thrown) exit + + ! try to parse an element value + nullify(element) + call json_value_create(element) + call json%parse_value(unit, str, element) + if (json%exception_thrown) then + if (associated(element)) call json%destroy(element) + exit + end if + + ! parse value will deallocate an empty array value + if (associated(element)) call json%add(array, element) + + ! popped the next character + call json%pop_char(unit, str=str, eof=eof, skip_ws=.true., & + skip_comments=json%allow_comments, popped=c) + + if (eof) then + ! The file ended before array was finished: + call json%throw_exception('Error in parse_array: '//& + 'End of file encountered when parsing an array.') + exit + else if (delimiter == c) then + ! parse the next element + cycle + else if (end_array == c) then + ! end of array + exit + else + call json%throw_exception('Error in parse_array: '//& + 'Unexpected character encountered when parsing array.') + exit + end if + + end do + + end subroutine parse_array +!***************************************************************************************** + +!***************************************************************************************** +!> +! Parses a string while reading a JSON file. +! +!### History +! * Jacob Williams : 6/16/2014 : Added hex validation. +! * Jacob Williams : 12/3/2015 : Fixed some bugs. +! * Jacob Williams : 8/23/2015 : `string` is now returned unescaped. +! * Jacob Williams : 7/21/2018 : moved hex validate to [[unescape_string]]. + + subroutine parse_string(json, unit, str, string) + + implicit none + + class(json_core),intent(inout) :: json + integer(IK),intent(in) :: unit !! file unit number (if + !! parsing from a file) + character(kind=CK,len=*),intent(in) :: str !! JSON string (if parsing + !! from a string) + character(kind=CK,len=:),allocatable,intent(out) :: string !! the string (unescaped + !! if necessary) + + logical(LK) :: eof !! end of file flag + logical(LK) :: escape !! for escape string parsing + character(kind=CK,len=1) :: c !! character returned by [[pop_char]] + integer(IK) :: ip !! index to put next character, + !! to speed up by reducing the number + !! of character string reallocations. + character(kind=CK,len=:),allocatable :: error_message !! for string unescaping + + !at least return a blank string if there is a problem: + string = blank_chunk + + if (.not. json%exception_thrown) then + + !initialize: + escape = .false. + ip = 1 + + do + + !get the next character from the file: + call json%pop_char(unit, str=str, eof=eof, skip_ws=.false., popped=c) + + if (eof) then + + call json%throw_exception('Error in parse_string: Expecting end of string') + return + + else if (c==quotation_mark .and. .not. escape) then !end of string + + exit + + else + + !if the string is not big enough, then add another chunk: + if (ip>len(string)) string = string // blank_chunk + + !append to string: + string(ip:ip) = c + ip = ip + 1 + + ! check for escape character, so we don't + ! exit prematurely if escaping a quotation + ! character: + if (escape) then + escape = .false. + else + escape = (c==backslash) + end if + + end if + + end do + + !trim the string if necessary: + if (ip +! Core parsing routine. +! +! This is used to verify the strings `true`, `false`, and `null` during parsing. + + subroutine parse_for_chars(json, unit, str, chars) + + implicit none + + class(json_core),intent(inout) :: json + integer(IK),intent(in) :: unit !! file unit number (if parsing from a file) + character(kind=CK,len=*),intent(in) :: str !! JSON string (if parsing from a string) + character(kind=CK,len=*),intent(in) :: chars !! the string to check for. + + integer(IK) :: i !! counter + integer(IK) :: length !! trimmed length of `chars` + logical(LK) :: eof !! end of file flag + character(kind=CK,len=1) :: c !! character returned by [[pop_char]] + + if (.not. json%exception_thrown) then + + length = len_trim(chars) + + do i = 1, length + call json%pop_char(unit, str=str, eof=eof, skip_ws=.false., popped=c) + if (eof) then + call json%throw_exception('Error in parse_for_chars:'//& + ' Unexpected end of file while parsing.') + return + else if (c /= chars(i:i)) then + call json%throw_exception('Error in parse_for_chars:'//& + ' Unexpected character: ""'//c//'"" (expecting ""'//& + chars(i:i)//'"")') + return + end if + end do + + end if + + end subroutine parse_for_chars +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 1/20/2014 +! +! Read a numerical value from the file (or string). +! The routine will determine if it is an integer or a real, and +! allocate the type accordingly. +! +!@note Complete rewrite of the original FSON routine, which had some problems. + + subroutine parse_number(json, unit, str, value) + + implicit none + + class(json_core),intent(inout) :: json + integer(IK),intent(in) :: unit !! file unit number (if parsing from a file) + character(kind=CK,len=*),intent(in) :: str !! JSON string (if parsing from a string) + type(json_value),pointer :: value + + character(kind=CK,len=:),allocatable :: tmp !! temp string + character(kind=CK,len=:),allocatable :: saved_err_message !! temp error message for + !! string to int conversion + character(kind=CK,len=1) :: c !! character returned by [[pop_char]] + logical(LK) :: eof !! end of file flag + real(RK) :: rval !! real value + integer(IK) :: ival !! integer value + logical(LK) :: first !! first character + logical(LK) :: is_integer !! it is an integer + integer(IK) :: ip !! index to put next character + !! [to speed up by reducing the number + !! of character string reallocations] + + if (.not. json%exception_thrown) then + + tmp = blank_chunk + ip = 1 + first = .true. + is_integer = .true. !assume it may be an integer, unless otherwise determined + + !read one character at a time and accumulate the string: + do + + !get the next character: + call json%pop_char(unit, str=str, eof=eof, skip_ws=.true., popped=c) + + select case (c) + case(CK_'-',CK_'+') !note: allowing a '+' as the first character here. + + if (is_integer .and. (.not. first)) is_integer = .false. + + !add it to the string: + !tmp = tmp // c !...original + if (ip>len(tmp)) tmp = tmp // blank_chunk + tmp(ip:ip) = c + ip = ip + 1 + + case(CK_'.',CK_'E',CK_'e',CK_'D',CK_'d') !can be present in real numbers + + if (is_integer) is_integer = .false. + + !add it to the string: + !tmp = tmp // c !...original + if (ip>len(tmp)) tmp = tmp // blank_chunk + tmp(ip:ip) = c + ip = ip + 1 + + case(CK_'0':CK_'9') !valid characters for numbers + + !add it to the string: + !tmp = tmp // c !...original + if (ip>len(tmp)) tmp = tmp // blank_chunk + tmp(ip:ip) = c + ip = ip + 1 + + case default + + !push back the last character read: + call json%push_char(c) + + !string to value: + if (is_integer) then + ! it is an integer: + ival = json%string_to_int(tmp) + + if (json%exception_thrown .and. .not. json%strict_integer_type_checking) then + ! if it couldn't be converted to an integer, + ! then try to convert it to a real value and see if that works + + saved_err_message = json%err_message ! keep the original error message + call json%clear_exceptions() ! clear exceptions + rval = json%string_to_dble(tmp) + if (json%exception_thrown) then + ! restore original error message and continue + json%err_message = saved_err_message + call json%to_integer(value,ival) ! just so we have something + else + ! in this case, we return a real + call json%to_real(value,rval) + end if + + else + call json%to_integer(value,ival) + end if + + else + ! it is a real: + rval = json%string_to_dble(tmp) + call json%to_real(value,rval) + end if + + exit !finished + + end select + + if (first) first = .false. + + end do + + !cleanup: + if (allocated(tmp)) deallocate(tmp) + + end if + + end subroutine parse_number +!***************************************************************************************** + +!***************************************************************************************** +!> +! Get the next character from the file (or string). +! +!### See also +! * [[push_char]] +! +!@note This routine ignores non-printing ASCII characters +! (`iachar<=31`) that are in strings. + + subroutine pop_char(json,unit,str,skip_ws,skip_comments,eof,popped) + + implicit none + + class(json_core),intent(inout) :: json + integer(IK),intent(in) :: unit !! file unit number (if parsing + !! from a file) + character(kind=CK,len=*),intent(in) :: str !! JSON string (if parsing from a + !! string) -- only used if `unit=0` + logical(LK),intent(in),optional :: skip_ws !! to ignore whitespace [default False] + logical(LK),intent(in),optional :: skip_comments !! to ignore comment lines [default False] + logical(LK),intent(out) :: eof !! true if the end of the file has + !! been reached. + character(kind=CK,len=1),intent(out) :: popped !! the popped character returned + + integer(IK) :: ios !! `iostat` flag + integer(IK) :: str_len !! length of `str` + character(kind=CK,len=1) :: c !! a character read from the file (or string) + logical(LK) :: ignore !! if whitespace is to be ignored + logical(LK) :: ignore_comments !! if comment lines are to be ignored + logical(LK) :: parsing_comment !! if we are in the process + !! of parsing a comment line + + if (.not. json%exception_thrown) then + + eof = .false. + if (.not. present(skip_ws)) then + ignore = .false. + else + ignore = skip_ws + end if + parsing_comment = .false. + if (.not. present(skip_comments)) then + ignore_comments = .false. + else + ignore_comments = skip_comments + end if + + do + + if (json%pushed_index > 0) then + + ! there is a character pushed back on, most likely + ! from the number parsing. Note: this can only occur if + ! reading from a file when use_unformatted_stream=.false. + c = json%pushed_char(json%pushed_index:json%pushed_index) + json%pushed_index = json%pushed_index - 1 + + else + + if (unit/=0) then !read from the file + + !read the next character: + if (use_unformatted_stream) then + + ! in this case, we read the file in chunks. + ! if we already have the character we need, + ! then get it from the chunk. Otherwise, + ! read in another chunk. + if (json%ichunk<1) then + ! read in a chunk: + json%ichunk = 0 + if (json%filesizelen(json%chunk)) then + ! check this just in case + ios = IOSTAT_END + else + ! get the next character from the chunk: + c = json%chunk(json%ichunk:json%ichunk) + if (json%ichunk==len(json%chunk)) then + json%ichunk = 0 ! reset for next chunk + end if + end if + + else + ! a formatted read: + read(unit=unit,fmt='(A1)',advance='NO',iostat=ios) c + end if + json%ipos = json%ipos + 1 + + else !read from the string + + str_len = len(str) !length of the string + if (json%ipos<=str_len) then + c = str(json%ipos:json%ipos) + ios = 0 + else + ios = IOSTAT_END !end of the string + end if + json%ipos = json%ipos + 1 + + end if + + json%char_count = json%char_count + 1 !character count in the current line + + if (IS_IOSTAT_END(ios)) then !end of file + + ! no character to return + json%char_count = 0 + eof = .true. + popped = space ! just to set a value + exit + + else if (IS_IOSTAT_EOR(ios) .or. c==newline) then !end of record + + json%char_count = 0 + json%line_count = json%line_count + 1 + if (ignore_comments) parsing_comment = .false. ! done parsing this comment line + cycle + + end if + + end if + + if (ignore_comments .and. (parsing_comment .or. scan(c,json%comment_char,kind=IK)>0_IK) ) then + + ! skipping the comment + parsing_comment = .true. + cycle + + else if (any(c == control_chars)) then + + ! non printing ascii characters + cycle + + else if (ignore .and. c == space) then + + ! ignoring whitespace + cycle + + else + + ! return the character + popped = c + exit + + end if + + end do + + end if + + end subroutine pop_char +!***************************************************************************************** + +!***************************************************************************************** +!> +! Core routine. +! +!### See also +! * [[pop_char]] +! +!### History +! * Jacob Williams : 5/3/2015 : replaced original version of this routine. + + subroutine push_char(json,c) + + implicit none + + class(json_core),intent(inout) :: json + character(kind=CK,len=1),intent(in) :: c !! to character to push + + character(kind=CK,len=max_numeric_str_len) :: istr !! for error printing + + if (.not. json%exception_thrown) then + + if (use_unformatted_stream) then + + !in this case, c is ignored, and we just + !decrement the stream position counter: + json%ipos = json%ipos - 1 + json%ichunk = json%ichunk - 1 + + else + + json%pushed_index = json%pushed_index + 1 + + if (json%pushed_index>0 .and. json%pushed_index<=len(json%pushed_char)) then + json%pushed_char(json%pushed_index:json%pushed_index) = c + else + call integer_to_string(json%pushed_index,int_fmt,istr) + call json%throw_exception('Error in push_char: '//& + 'invalid valid of pushed_index: '//trim(istr)) + end if + + end if + + !character count in the current line + json%char_count = json%char_count - 1 + + end if + + end subroutine push_char +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! +! Print any error message, and then clear the exceptions. +! +!@note This routine is used by the unit tests. +! It was originally in json_example.f90, and was +! moved here 2/26/2015 by Izaak Beekman. + + subroutine json_print_error_message(json,io_unit) + + implicit none + + class(json_core),intent(inout) :: json + integer, intent(in), optional :: io_unit !! unit number for + !! printing error message + + character(kind=CK,len=:),allocatable :: error_msg !! error message + logical :: status_ok !! false if there were any errors thrown + + !get error message: + call json%check_for_errors(status_ok, error_msg) + + !print it if there is one: + if (.not. status_ok) then + if (present(io_unit)) then + write(io_unit,'(A)') error_msg + else + write(output_unit,'(A)') error_msg + end if + deallocate(error_msg) + call json%clear_exceptions() + end if + + end subroutine json_print_error_message +!***************************************************************************************** + +!***************************************************************************************** + end module json_value_module +!***************************************************************************************** +","Fortran" +"Genesis","genesis-release-r-ccs/genesis","src/lib/json-fortran/json_kinds.F90",".F90","5759","149","!***************************************************************************************** +!> author: Jacob Williams +! license: BSD +! +! JSON-Fortran kind definitions. +! +!### License +! * JSON-Fortran is released under a BSD-style license. +! See the [LICENSE](https://github.com/jacobwilliams/json-fortran/blob/master/LICENSE) +! file for details. +! +!@note ```-DUSE_UCS4``` is an optional preprocessor flag. +! When present, Unicode support is enabled. Note that this +! is currently only supported with the gfortran compiler. +! Example: ```gfortran -DUSE_UCS4 ... ``` +#ifdef USE_UCS4 +# pragma push_macro(""USE_UCS4"") +# undef USE_UCS4 +! The documentation given here assumes ```USE_UCS4``` **is** defined. +# pragma pop_macro(""USE_UCS4"") +#else +! The documentation given here assumes ```USE_UCS4``` **is not** defined. +#endif +! +!@warning ```CK``` and ```CDK``` are the JSON-Fortran character kind and JSON-Fortran default +! character kind respectively. Client code **MUST** ensure characters of ```kind=CK``` +! are used for all character variables and strings passed to the JSON-Fortran +! library *EXCEPT* for file names which must be of ```'DEFAULT'``` character kind, +! provided here as ```CDK```. In particular, any variable that is a: json path, string +! value or object name passed to the JSON-Fortran library **MUST** be of type ```CK```. +! +!@note Most string literal constants of default kind are fine to pass as arguments to +! JSON-Fortran procedures since they have been overloaded to accept ```intent(in)``` +! character arguments of the default (```CDK```) kind. If you find a procedure which does +! not accept an ```intent(in)``` literal string argument of default kind, please +! [file an issue](https://github.com/jacobwilliams/json-fortran/issues/new) on GitHub. +! +!@note The default real kind (`RK`) and the default integer kind (`IK`) can be +! changed using optional preprocessor flags. This library was built with kinds: +#ifdef REAL32 +! real(kind=real32) [4 bytes] +#elif REAL64 +! real(kind=real64) [8 bytes] +#elif REAL128 +! real(kind=real128) [16 bytes] +#else +! real(kind=real64) [8 bytes] +#endif +! and +#ifdef INT8 +! integer(kind=int8) [1 byte] +#elif INT16 +! integer(kind=int16) [2 bytes] +#elif INT32 +! integer(kind=int32) [4 bytes] +#elif INT64 +! integer(kind=int64) [8 bytes] +#else +! integer(kind=int32) [4 bytes] +#endif +! . +! +!@note In addition to the real kind specified by `RK`, interfaces for +! the real kinds with less precision are also provided in the library, +! but all are converted to `real(RK)` variables internally. + + module json_kinds + + use,intrinsic :: iso_fortran_env + + implicit none + + private + +! used for the reals with less precision +! than the default precision: +#ifndef REAL32 + public :: real32 +#endif +#ifdef REAL128 + public :: real64 +#endif + +#ifdef REAL32 + integer,parameter,public :: RK = real32 !! Default real kind [4 bytes] +#elif REAL64 + integer,parameter,public :: RK = real64 !! Default real kind [8 bytes] +#elif REAL128 + integer,parameter,public :: RK = real128 !! Default real kind [16 bytes] +#else + integer,parameter,public :: RK = real64 !! Default real kind if not specified [8 bytes] +#endif + +#ifdef INT8 + integer,parameter,public :: IK = int8 !! Default integer kind [1 byte] +#elif INT16 + integer,parameter,public :: IK = int16 !! Default integer kind [2 bytes] +#elif INT32 + integer,parameter,public :: IK = int32 !! Default integer kind [4 bytes] +#elif INT64 + integer,parameter,public :: IK = int64 !! Default integer kind [8 bytes] +#else + integer,parameter,public :: IK = int32 !! Default integer kind if not specified [4 bytes] +#endif + + !********************************************************* + !> + ! Processor dependent 'DEFAULT' character kind. + ! This is 1 byte for the Intel and Gfortran compilers. + integer,parameter,public :: CDK = selected_char_kind('DEFAULT') + !********************************************************* + + !********************************************************* + !> + ! Default logical kind. + ! This is 4 bytes for the Intel and Gfortran compilers + ! (and perhaps others). + ! The declaration ensures a valid kind + ! if the compiler doesn't have a logical_kinds(3). + integer,parameter,public :: LK = logical_kinds(min(3,size(logical_kinds))) + !********************************************************* + + !********************************************************* + !> + ! String kind preprocessor macro. +#if defined __GFORTRAN__ && defined USE_UCS4 + ! gfortran compiler AND UCS4 support requested: + character(kind=CDK,len=*),parameter :: json_fortran_string_kind = 'ISO_10646' +#else + ! this is the string kind to use unless compiling with GFortran AND + ! UCS4/ISO 10646 support is requested + character(kind=CDK,len=*),parameter :: json_fortran_string_kind = 'DEFAULT' +#endif + !********************************************************* + + !********************************************************* + !> + ! Default character kind used by JSON-Fortran. + ! If ISO 10646 (UCS4) support is available, use that, + ! otherwise, gracefully fall back on 'DEFAULT' characters. + ! Currently only gfortran >= 4.9.2 will correctly support + ! UCS4 which is stored in 4 bytes. + ! (and perhaps others). + integer,parameter,public :: CK = selected_char_kind(json_fortran_string_kind) + !********************************************************* + + end module json_kinds +!***************************************************************************************** +","Fortran" +"Genesis","genesis-release-r-ccs/genesis","src/lib/json-fortran/json_module.F90",".F90","3146","80","!***************************************************************************************** +!> author: Jacob Williams +! license: BSD +! +! A Modern Fortran JSON (JavaScript Object Notation) API. +! +! This module provides access to [[json_value_module]] and +! [[json_file_module]]. For normal JSON-Fortran use, using this module +! is all that is necessary. +! +! Note that this module renames the kind definition variables from [[json_kinds]] +! from [`RK`, `IK`, `LK`, `CK`, and `CDK`] to [`json_RK`, `json_IK`, `json_LK`, +! `json_CK`, and `json_CDK`] so as to avoid namespace pollution with short +! variable names. +! +#ifdef USE_UCS4 +#pragma push_macro(""USE_UCS4"") +#undef USE_UCS4 +! Since ```USE_UCS4``` **is** defined, this module also exports the +! operators `==`, `/=`, and `//` from [[json_string_utilities]] for +! `CK` and `CDK` operations. +#pragma pop_macro(""USE_UCS4"") +#endif +! +!### License +! * JSON-Fortran is released under a BSD-style license. +! See the [LICENSE](https://github.com/jacobwilliams/json-fortran/blob/master/LICENSE) +! file for details. +! +!### History +! * Joseph A. Levin : March 2012 : Original [FSON](https://github.com/josephalevin/fson) +! code [retrieved on 12/2/2013]. +! * Jacob Williams : 2/8/2014 : Extensive modifications to the original FSON code. +! The original F95 code was split into four files: +! fson_path_m.f95, fson_string_m.f95, fson_value_m.f95, and fson.f95. +! The new code has been extensively updated, refactored and combined into this +! one module (json_module.f90). +! Various Fortran 2003/2008 features are now used +! (e.g., allocatable strings, newunit, generic, class, and abstract interface). +! * Development continues at: [Github](https://github.com/jacobwilliams/json-fortran) +! +!### See also +! * [json-fortran development site](https://github.com/jacobwilliams/json-fortran) +! * [json-fortran online documentation](https://jacobwilliams.github.io/json-fortran) +! * [JSON website](http://www.json.org/) +! * [JSON validator](http://jsonlint.com/) +! +!@note Originally JSON-Fortran was entirely contained within this module. + + module json_module + + use json_kinds, only: json_RK => RK, & + json_IK => IK, & + json_LK => LK, & + json_CK => CK, & + json_CDK => CDK +#ifdef USE_UCS4 + use json_string_utilities, only: operator(==),& + operator(//),& + operator(/=) +#endif + use json_parameters, only: json_unknown,& + json_null, & + json_object, & + json_array, & + json_logical,& + json_integer,& + json_real, & + json_double, & + json_string + use json_value_module + use json_file_module + + implicit none + + public + + end module json_module +!***************************************************************************************** +","Fortran" +"Genesis","genesis-release-r-ccs/genesis","src/lib/json-fortran/json_string_utilities.F90",".F90","31512","933","!***************************************************************************************** +!> author: Jacob Williams +! license: BSD +! +! JSON-Fortran support module for string manipulation. +! +!### License +! * JSON-Fortran is released under a BSD-style license. +! See the [LICENSE](https://github.com/jacobwilliams/json-fortran/blob/master/LICENSE) +! file for details. + + module json_string_utilities + + use,intrinsic :: ieee_arithmetic + use json_kinds + use json_parameters + + implicit none + + private + + !****************************************************** + !> + ! Convert a 'DEFAULT' kind character input to + ! 'ISO_10646' kind and return it + interface to_unicode + module procedure to_uni, to_uni_vec + end interface + !****************************************************** + +#ifdef USE_UCS4 + !****************************************************** + !> + ! Provide a means to convert to UCS4 while + ! concatenating UCS4 and default strings + interface operator(//) + module procedure ucs4_join_default, default_join_ucs4 + end interface + public :: operator(//) + !****************************************************** + + !****************************************************** + !> + ! Provide a string `==` operator that works + ! with mixed kinds + interface operator(==) + module procedure ucs4_comp_default, default_comp_ucs4 + end interface + public :: operator(==) + !****************************************************** + + !****************************************************** + !> + ! Provide a string `/=` operator that works + ! with mixed kinds + interface operator(/=) + module procedure ucs4_neq_default, default_neq_ucs4 + end interface + public :: operator(/=) + !****************************************************** +#endif + + public :: integer_to_string + public :: real_to_string + public :: string_to_integer + public :: string_to_real + public :: valid_json_hex + public :: to_unicode + public :: escape_string + public :: unescape_string + public :: lowercase_string + public :: replace_string + public :: decode_rfc6901 + public :: encode_rfc6901 + + contains +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 12/4/2013 +! +! Convert an integer to a string. + + pure subroutine integer_to_string(ival,int_fmt,str) + + implicit none + + integer(IK),intent(in) :: ival !! integer value. + character(kind=CDK,len=*),intent(in) :: int_fmt !! format for integers + character(kind=CK,len=*),intent(out) :: str !! `ival` converted to a string. + + integer(IK) :: istat + + write(str,fmt=int_fmt,iostat=istat) ival + + if (istat==0) then + str = adjustl(str) + else + str = repeat(star,len(str)) + end if + + end subroutine integer_to_string +!***************************************************************************************** + +!***************************************************************************************** +!> +! Convert a string into an integer. +! +!# History +! * Jacob Williams : 12/10/2013 : Rewrote original `parse_integer` routine. +! Added error checking. +! * Modified by Izaak Beekman +! * Jacob Williams : 2/4/2017 : moved core logic to this routine. + + subroutine string_to_integer(str,ival,status_ok) + + implicit none + + character(kind=CK,len=*),intent(in) :: str !! the string to convert to an integer + integer(IK),intent(out) :: ival !! the integer value + logical(LK),intent(out) :: status_ok !! true if there were no errors + + character(kind=CDK,len=:),allocatable :: digits + integer(IK) :: ndigits_digits,ndigits,ierr + + ! Compute how many digits we need to read + ndigits = 2*len_trim(str) + if (ndigits/=0) then + ndigits_digits = floor(log10(real(ndigits)))+1 + allocate(character(kind=CDK,len=ndigits_digits) :: digits) + write(digits,'(I0)') ndigits !gfortran will have a runtime error with * edit descriptor here + ! gfortran bug: '*' edit descriptor for ISO_10646 strings does bad stuff. + read(str,'(I'//trim(digits)//')',iostat=ierr) ival !string to integer + ! error check: + status_ok = (ierr==0) + else + status_ok = .false. + end if + if (.not. status_ok) ival = 0_IK + + end subroutine string_to_integer +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 12/4/2013 +! +! Convert a real value to a string. +! +!### Modified +! * Izaak Beekman : 02/24/2015 : added the compact option. +! * Jacob Williams : 10/27/2015 : added the star option. +! * Jacob Williams : 07/07/2019 : added null and ieee options. + + subroutine real_to_string(rval,real_fmt,compact_real,non_normals_to_null,str) + + implicit none + + real(RK),intent(in) :: rval !! real value. + character(kind=CDK,len=*),intent(in) :: real_fmt !! format for real numbers + logical(LK),intent(in) :: compact_real !! compact the string so that it is + !! displayed with fewer characters + logical(LK),intent(in) :: non_normals_to_null !! If True, NaN, Infinity, or -Infinity are returned as `null`. + !! If False, the string value will be returned in quotes + !! (e.g., ""NaN"", ""Infinity"", or ""-Infinity"" ) + character(kind=CK,len=*),intent(out) :: str !! `rval` converted to a string. + + integer(IK) :: istat !! write `iostat` flag + + if (ieee_is_finite(rval) .and. .not. ieee_is_nan(rval)) then + + ! normal real numbers + + if (real_fmt==star) then + write(str,fmt=*,iostat=istat) rval + else + write(str,fmt=real_fmt,iostat=istat) rval + end if + + if (istat==0) then + !in this case, the default string will be compacted, + ! so that the same value is displayed with fewer characters. + if (compact_real) call compact_real_string(str) + else + str = repeat(star,len(str)) ! error + end if + + else + ! special cases for NaN, Infinity, and -Infinity + + if (non_normals_to_null) then + ! return it as a JSON null value + str = null_str + else + ! Let the compiler do the real to string conversion + ! like before, but put the result in quotes so it + ! gets printed as a string + write(str,fmt=*,iostat=istat) rval + if (istat==0) then + str = quotation_mark//trim(adjustl(str))//quotation_mark + else + str = repeat(star,len(str)) ! error + end if + end if + + end if + + end subroutine real_to_string +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 1/19/2014 +! +! Convert a string into a `real(RK)`. +! +!# History +! * Jacob Williams, 10/27/2015 : Now using `fmt=*`, rather than +! `fmt=real_fmt`, since it doesn't work for some unusual cases +! (e.g., when `str='1E-5'`). +! * Jacob Williams : 2/6/2017 : moved core logic to this routine. + + subroutine string_to_real(str,use_quiet_nan,rval,status_ok) + + implicit none + + character(kind=CK,len=*),intent(in) :: str !! the string to convert to a real + logical(LK),intent(in) :: use_quiet_nan !! if true, return NaN's as `ieee_quiet_nan`. + !! otherwise, use `ieee_signaling_nan`. + real(RK),intent(out) :: rval !! `str` converted to a real value + logical(LK),intent(out) :: status_ok !! true if there were no errors + + integer(IK) :: ierr !! read iostat error code + + read(str,fmt=*,iostat=ierr) rval + status_ok = (ierr==0) + if (.not. status_ok) then + rval = 0.0_RK + else + if (ieee_support_nan(rval)) then + if (ieee_is_nan(rval)) then + ! make sure to return the correct NaN + if (use_quiet_nan) then + rval = ieee_value(rval,ieee_quiet_nan) + else + rval = ieee_value(rval,ieee_signaling_nan) + end if + end if + end if + end if + + end subroutine string_to_real +!***************************************************************************************** + +!***************************************************************************************** +!> author: Izaak Beekman +! date: 02/24/2015 +! +! Compact a string representing a real number, so that +! the same value is displayed with fewer characters. +! +!# See also +! * [[real_to_string]] + + subroutine compact_real_string(str) + + implicit none + + character(kind=CK,len=*),intent(inout) :: str !! string representation of a real number. + + character(kind=CK,len=len(str)) :: significand + character(kind=CK,len=len(str)) :: expnt + character(kind=CK,len=2) :: separator + integer(IK) :: exp_start + integer(IK) :: decimal_pos + integer(IK) :: sig_trim + integer(IK) :: exp_trim + integer(IK) :: i !! counter + + str = adjustl(str) + exp_start = scan(str,CK_'eEdD') + if (exp_start == 0) exp_start = scan(str,CK_'-+',back=.true.) + decimal_pos = scan(str,CK_'.') + if (exp_start /= 0) separator = str(exp_start:exp_start) + + if ( exp_start < decimal_pos ) then !possibly signed, exponent-less float + + significand = str + sig_trim = len(trim(significand)) + do i = len(trim(significand)),decimal_pos+2,-1 !look from right to left at 0s + !but save one after the decimal place + if (significand(i:i) == '0') then + sig_trim = i-1 + else + exit + end if + end do + str = trim(significand(1:sig_trim)) + + else if (exp_start > decimal_pos) then !float has exponent + + significand = str(1:exp_start-1) + sig_trim = len(trim(significand)) + do i = len(trim(significand)),decimal_pos+2,-1 !look from right to left at 0s + if (significand(i:i) == '0') then + sig_trim = i-1 + else + exit + end if + end do + expnt = adjustl(str(exp_start+1:)) + if (expnt(1:1) == '+' .or. expnt(1:1) == '-') then + separator = trim(adjustl(separator))//expnt(1:1) + exp_start = exp_start + 1 + expnt = adjustl(str(exp_start+1:)) + end if + exp_trim = 1 + do i = 1,(len(trim(expnt))-1) !look at exponent leading zeros saving last + if (expnt(i:i) == '0') then + exp_trim = i+1 + else + exit + end if + end do + str = trim(adjustl(significand(1:sig_trim)))// & + trim(adjustl(separator))// & + trim(adjustl(expnt(exp_trim:))) + + !else ! mal-formed real, BUT this code should be unreachable + + end if + + end subroutine compact_real_string +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! date: 1/21/2014 +! +! Add the escape characters to a string for adding to JSON. + + subroutine escape_string(str_in, str_out, escape_solidus) + + implicit none + + character(kind=CK,len=*),intent(in) :: str_in + character(kind=CK,len=:),allocatable,intent(out) :: str_out + logical(LK),intent(in) :: escape_solidus !! if the solidus (forward slash) + !! is also to be escaped + + integer(IK) :: i !! counter + integer(IK) :: ipos !! accumulated string size + !! (so we can allocate it in chunks for + !! greater runtime efficiency) + character(kind=CK,len=1) :: c !! for reading `str_in` one character at a time. +#if defined __GFORTRAN__ + character(kind=CK,len=:),allocatable :: tmp !! workaround for bug in gfortran 6.1 +#endif + logical :: to_be_escaped !! if there are characters to be escaped + + character(kind=CK,len=*),parameter :: specials_no_slash = quotation_mark//& + backslash//& + bspace//& + formfeed//& + newline//& + carriage_return//& + horizontal_tab + + character(kind=CK,len=*),parameter :: specials = specials_no_slash//slash + + !Do a quick scan for the special characters, + ! if any are present, then process the string, + ! otherwise, return the string as is. + if (escape_solidus) then + to_be_escaped = scan(str_in,specials)>0 + else + to_be_escaped = scan(str_in,specials_no_slash)>0 + end if + + if (to_be_escaped) then + + str_out = repeat(space,chunk_size) + ipos = 1 + + !go through the string and look for special characters: + do i=1,len(str_in) + + c = str_in(i:i) !get next character in the input string + + !if the string is not big enough, then add another chunk: + if (ipos+3>len(str_out)) str_out = str_out // blank_chunk + + select case(c) + case(backslash) + + !test for unicode sequence: '\uXXXX' + ![don't add an extra '\' for those] + if (i+5<=len(str_in)) then + if (str_in(i+1:i+1)==CK_'u' .and. & + valid_json_hex(str_in(i+2:i+5))) then + str_out(ipos:ipos) = c + ipos = ipos + 1 + cycle + end if + end if + + str_out(ipos:ipos+1) = backslash//c + ipos = ipos + 2 + + case(quotation_mark) + str_out(ipos:ipos+1) = backslash//c + ipos = ipos + 2 + case(slash) + if (escape_solidus) then + str_out(ipos:ipos+1) = backslash//c + ipos = ipos + 2 + else + str_out(ipos:ipos) = c + ipos = ipos + 1 + end if + case(bspace) + str_out(ipos:ipos+1) = '\b' + ipos = ipos + 2 + case(formfeed) + str_out(ipos:ipos+1) = '\f' + ipos = ipos + 2 + case(newline) + str_out(ipos:ipos+1) = '\n' + ipos = ipos + 2 + case(carriage_return) + str_out(ipos:ipos+1) = '\r' + ipos = ipos + 2 + case(horizontal_tab) + str_out(ipos:ipos+1) = '\t' + ipos = ipos + 2 + case default + str_out(ipos:ipos) = c + ipos = ipos + 1 + end select + + end do + + !trim the string if necessary: + if (ipos +! Remove the escape characters from a JSON string and return it. +! +! The escaped characters are denoted by the `\` character: +! +! * `\""` - quotation mark +! * `\\` - reverse solidus +! * `\/` - solidus +! * `\b` - backspace +! * `\f` - formfeed +! * `\n` - newline (LF) +! * `\r` - carriage return (CR) +! * `\t` - horizontal tab +! * `\uXXXX` - 4 hexadecimal digits + + subroutine unescape_string(str, error_message) + + implicit none + + character(kind=CK,len=:),allocatable,intent(inout) :: str !! in: string as stored + !! in a [[json_value]]. + !! out: decoded string. + character(kind=CK,len=:),allocatable,intent(out) :: error_message !! will be allocated if + !! there was an error + + integer :: i !! counter + integer :: n !! length of `str` + integer :: m !! length of `str_tmp` + character(kind=CK,len=1) :: c !! for scanning each character in string + character(kind=CK,len=:),allocatable :: str_tmp !! temp decoded string (if the input + !! string contains an escape character + !! and needs to be decoded). + + if (scan(str,backslash)>0) then + + !there is at least one escape character, so process this string: + + n = len(str) + str_tmp = repeat(space,n) !size the output string (will be trimmed later) + m = 0 !counter in str_tmp + i = 0 !counter in str + + do + + i = i + 1 + if (i>n) exit ! finished + c = str(i:i) ! get next character in the string + + if (c == backslash) then + + if (i author: Jacob Williams +! date:6/14/2014 +! +! Returns true if the string is a valid 4-digit hex string. +! +!# Examples +!```fortran +! valid_json_hex('0000') !returns true +! valid_json_hex('ABC4') !returns true +! valid_json_hex('AB') !returns false (< 4 characters) +! valid_json_hex('WXYZ') !returns false (invalid characters) +!``` + + pure function valid_json_hex(str) result(valid) + + implicit none + + logical(LK) :: valid !! is str a value 4-digit hex string + character(kind=CK,len=*),intent(in) :: str !! the string to check. + + integer(IK) :: n !! length of `str` + integer(IK) :: i !! counter + + !> an array of the valid hex characters + character(kind=CK,len=1),dimension(22),parameter :: valid_chars = & + [ (achar(i),i=48,57), & ! decimal digits + (achar(i),i=65,70), & ! capital A-F + (achar(i),i=97,102) ] ! lowercase a-f + + !initialize + valid = .false. + + !check all the characters in the string: + n = len(str) + if (n==4) then + do i=1,n + if (.not. any(str(i:i)==valid_chars)) return + end do + valid = .true. !all are in the set, so it is OK + end if + + end function valid_json_hex +!***************************************************************************************** + +!***************************************************************************************** +!> author: Izaak Beekman +! +! Convert string to unicode (CDK to CK). + + pure function to_uni(str) + + implicit none + + character(kind=CDK,len=*), intent(in) :: str + character(kind=CK,len=len(str)) :: to_uni + + to_uni = str + + end function to_uni +!***************************************************************************************** + +!***************************************************************************************** +!> author: Izaak Beekman +! +! Convert array of strings to unicode (CDK to CK). +! +!@note JW: may be able to remove this by making [[to_uni]] PURE ELEMENTAL ? + + pure function to_uni_vec(str) + + implicit none + + character(kind=CDK,len=*), dimension(:), intent(in) :: str + character(kind=CK,len=len(str)), dimension(size(str)) :: to_uni_vec + + to_uni_vec = str + + end function to_uni_vec +!***************************************************************************************** + +!***************************************************************************************** +!> author: Izaak Beekman +! +! `CK`//`CDK` operator. + + pure function ucs4_join_default(ucs4_str,def_str) result(res) + + implicit none + + character(kind=CK, len=*), intent(in) :: ucs4_str + character(kind=CDK,len=*), intent(in) :: def_str + character(kind=CK,len=(len(ucs4_str)+len(def_str))) :: res + + res = ucs4_str//to_unicode(def_str) + + end function ucs4_join_default +!***************************************************************************************** + +!***************************************************************************************** +!> author: Izaak Beekman +! +! `CDK`//`CK` operator. + + pure function default_join_ucs4(def_str,ucs4_str) result(res) + + implicit none + + character(kind=CDK,len=*), intent(in) :: def_str + character(kind=CK, len=*), intent(in) :: ucs4_str + character(kind=CK,len=(len(def_str)+len(ucs4_str))) :: res + + res = to_unicode(def_str)//ucs4_str + + end function default_join_ucs4 +!***************************************************************************************** + +!***************************************************************************************** +!> author: Izaak Beekman +! +! `CK`==`CDK` operator. + + pure elemental function ucs4_comp_default(ucs4_str,def_str) result(res) + + implicit none + + character(kind=CK, len=*), intent(in) :: ucs4_str + character(kind=CDK,len=*), intent(in) :: def_str + logical(LK) :: res + + res = ( ucs4_str == to_unicode(def_str) ) + + end function ucs4_comp_default +!***************************************************************************************** + +!***************************************************************************************** +!> author: Izaak Beekman +! +! `CDK`==`CK` operator. + + pure elemental function default_comp_ucs4(def_str,ucs4_str) result(res) + + implicit none + + character(kind=CDK,len=*), intent(in) :: def_str + character(kind=CK, len=*), intent(in) :: ucs4_str + logical(LK) :: res + + res = (to_unicode(def_str) == ucs4_str) + + end function default_comp_ucs4 +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! +! `CK`/=`CDK` operator. + + pure elemental function ucs4_neq_default(ucs4_str,def_str) result(res) + + implicit none + + character(kind=CK, len=*), intent(in) :: ucs4_str + character(kind=CDK,len=*), intent(in) :: def_str + logical(LK) :: res + + res = ( ucs4_str /= to_unicode(def_str) ) + + end function ucs4_neq_default +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! +! `CDK`/=`CK` operator. + + pure elemental function default_neq_ucs4(def_str,ucs4_str) result(res) + + implicit none + + character(kind=CDK,len=*), intent(in) :: def_str + character(kind=CK, len=*), intent(in) :: ucs4_str + logical(LK) :: res + + res = (to_unicode(def_str) /= ucs4_str) + + end function default_neq_ucs4 +!***************************************************************************************** + +!***************************************************************************************** +!> author: Jacob Williams +! +! Returns lowercase version of the `CK` string. + + pure function lowercase_string(str) result(s_lower) + + implicit none + + character(kind=CK,len=*),intent(in) :: str !! input string + character(kind=CK,len=(len(str))) :: s_lower !! lowercase version of the string + + integer :: i !! counter + integer :: j !! index of uppercase character + + s_lower = str + + do i = 1, len_trim(str) + j = index(upper,s_lower(i:i)) + if (j>0) s_lower(i:i) = lower(j:j) + end do + + end function lowercase_string +!***************************************************************************************** + +!***************************************************************************************** +!> +! Replace all occurrences of `s1` in `str` with `s2`. +! +! A case-sensitive match is used. +! +!@note `str` must be allocated. + + pure subroutine replace_string(str,s1,s2) + + implicit none + + character(kind=CK,len=:),allocatable,intent(inout) :: str + character(kind=CK,len=*),intent(in) :: s1 + character(kind=CK,len=*),intent(in) :: s2 + + character(kind=CK,len=:),allocatable :: tmp !! temporary string for accumulating result + integer(IK) :: i !! counter + integer(IK) :: n !! for accumulating the string + integer(IK) :: ilen !! length of `str` string + integer(IK) :: ilen1 !! length of `s1` string + + if (len(str)>0) then + + tmp = CK_'' ! initialize + ilen1 = len(s1) + + ! . + ! '123ab789' + + do + ilen = len(str) + i = index(str,s1) + if (i>0) then + if (i>1) tmp = tmp//str(1:i-1) + tmp = tmp//s2 ! replace s1 with s2 in new string + n = i+ilen1 ! start of remainder of str to keep + if (n<=ilen) then + str = str(n:ilen) + else + ! done + exit + end if + else + ! done: get remainder of string + tmp = tmp//str + exit + end if + end do + + str = tmp + + end if + + end subroutine replace_string +!***************************************************************************************** + +!***************************************************************************************** +!> +! Decode a string from the ""JSON Pointer"" RFC 6901 format. +! +! It replaces `~1` with `/` and `~0` with `~`. + + pure function decode_rfc6901(str) result(str_out) + + implicit none + + character(kind=CK,len=*),intent(in) :: str + character(kind=CK,len=:),allocatable :: str_out + + str_out = str + + call replace_string(str_out,tilde//CK_'1',slash) + call replace_string(str_out,tilde//CK_'0',tilde) + + end function decode_rfc6901 +!***************************************************************************************** + +!***************************************************************************************** +!> +! Encode a string into the ""JSON Pointer"" RFC 6901 format. +! +! It replaces `~` with `~0` and `/` with `~1`. + + pure function encode_rfc6901(str) result(str_out) + + implicit none + + character(kind=CK,len=*),intent(in) :: str + character(kind=CK,len=:),allocatable :: str_out + + str_out = str + + call replace_string(str_out,tilde,tilde//CK_'0') + call replace_string(str_out,slash,tilde//CK_'1') + + end function encode_rfc6901 +!***************************************************************************************** + + end module json_string_utilities +!***************************************************************************************** +","Fortran" +"Genesis","genesis-release-r-ccs/genesis","src/lib/json-fortran/json_parameters.F90",".F90","9216","145","!***************************************************************************************** +!> author: Jacob Williams +! license: BSD +! +! Other parameters used by JSON-Fortran. +! This is a low-level module not meant to be used by a JSON-Fortran user. +! +!### License +! * JSON-Fortran is released under a BSD-style license. +! See the [LICENSE](https://github.com/jacobwilliams/json-fortran/blob/master/LICENSE) +! file for details. + + module json_parameters + + use json_kinds + + implicit none + + public + + character(kind=CDK,len=*),parameter :: json_ext = '.json' !! JSON file extension + + ! The types of JSON data. + integer(IK),parameter :: json_unknown = 0 !! Unknown JSON data type + !! (see [[json_file_variable_info]] and [[json_info]]) + integer(IK),parameter :: json_null = 1 !! Null JSON data type + !! (see [[json_file_variable_info]] and [[json_info]]) + integer(IK),parameter :: json_object = 2 !! Object JSON data type + !! (see [[json_file_variable_info]] and [[json_info]]) + integer(IK),parameter :: json_array = 3 !! Array JSON data type + !! (see [[json_file_variable_info]] and [[json_info]]) + integer(IK),parameter :: json_logical = 4 !! Logical JSON data type (`logical(LK)`) + !! (see [[json_file_variable_info]] and [[json_info]]) + integer(IK),parameter :: json_integer = 5 !! Integer JSON data type (`integer(IK)`) + !! (see [[json_file_variable_info]] and [[json_info]]). + integer(IK),parameter :: json_real = 6 !! Real number JSON data type (`real(RK)`) + !! (see [[json_file_variable_info]] and [[json_info]]) + integer(IK),parameter :: json_string = 7 !! String JSON data type (`character(kind=CK)`) + !! (see [[json_file_variable_info]] and [[json_info]]) + integer(IK),parameter :: json_double = json_real !! Equivalent to `json_real` for + !! backward compatibility. + + !special JSON characters + character(kind=CK,len=*),parameter :: space = CK_' ' !! space character + character(kind=CK,len=*),parameter :: start_object = CK_'{' !! start of a JSON object + character(kind=CK,len=*),parameter :: end_object = CK_'}' !! end of a JSON object + character(kind=CK,len=*),parameter :: start_array = CK_'[' !! start of a JSON array + character(kind=CK,len=*),parameter :: end_array = CK_']' !! end of a JSON array + character(kind=CK,len=*),parameter :: delimiter = CK_',' !! delimiter for JSON + character(kind=CK,len=*),parameter :: colon_char = CK_':' !! colon character for JSON + character(kind=CK,len=*),parameter :: start_array_alt = CK_'(' !! alternate start of JSON array for + !! [[json_get_by_path_default]] + character(kind=CK,len=*),parameter :: end_array_alt = CK_')' !! alternate end of JSON array for + !! [[json_get_by_path_default]] + character(kind=CK,len=*),parameter :: root = achar(36, kind=CK) !! (`$`) root for [[json_get_by_path_default]] + character(kind=CK,len=*),parameter :: this = CK_'@' !! 'this' for [[json_get_by_path_default]] + character(kind=CK,len=*),parameter :: dot = CK_'.' !! path separator for [[json_get_by_path_default]] + character(kind=CK,len=*),parameter :: tilde = CK_'~' !! RFC 6901 escape character + character(kind=CK,len=*),parameter :: single_quote = CK_""'"" !! for JSONPath bracket-notation + character(kind=CK,len=*),parameter :: slash = CK_'/' !! JSON special character + character(kind=CK,len=*),parameter :: backslash = CK_'\' !! JSON special character + character(kind=CK,len=*),parameter :: quotation_mark = CK_'""' !! JSON special character + character(kind=CK,len=*),parameter :: bspace = achar(8, kind=CK) !! JSON special character + character(kind=CK,len=*),parameter :: horizontal_tab = achar(9, kind=CK) !! JSON special character + character(kind=CK,len=*),parameter :: newline = achar(10, kind=CK) !! JSON special character + character(kind=CK,len=*),parameter :: formfeed = achar(12, kind=CK) !! JSON special character + character(kind=CK,len=*),parameter :: carriage_return = achar(13, kind=CK) !! JSON special character + + !> default real number format statement (for writing real values to strings and files). + ! Note that this can be overridden by calling [[json_initialize]]. +#ifdef REAL32 + character(kind=CDK,len=*),parameter :: default_real_fmt = '(ss,E17.8E3)' +#elif REAL128 + character(kind=CDK,len=*),parameter :: default_real_fmt = '(ss,E46.35E5)' +#else + character(kind=CDK,len=*),parameter :: default_real_fmt = '(ss,E27.17E4)' +#endif + + character(kind=CK,len=*),parameter :: star = CK_'*' !! for invalid numbers and + !! list-directed real output + +#if defined __GFORTRAN__ + !not parameters due to gfortran bug (https://gcc.gnu.org/bugzilla/show_bug.cgi?id=65141) + character(kind=CK,len=26),protected :: upper = CK_'ABCDEFGHIJKLMNOPQRSTUVWXYZ' !! uppercase characters + character(kind=CK,len=26),protected :: lower = CK_'abcdefghijklmnopqrstuvwxyz' !! lowercase characters +#else + character(kind=CK,len=*),parameter :: upper = CK_'ABCDEFGHIJKLMNOPQRSTUVWXYZ' !! uppercase characters + character(kind=CK,len=*),parameter :: lower = CK_'abcdefghijklmnopqrstuvwxyz' !! lowercase characters +#endif + +#if defined __GFORTRAN__ + !not parameters due to gfortran bug (https://gcc.gnu.org/bugzilla/show_bug.cgi?id=65141) + character(kind=CK,len=4),protected :: null_str = CK_'null' !! JSON Null variable string + character(kind=CK,len=4),protected :: true_str = CK_'true' !! JSON logical True string + character(kind=CK,len=5),protected :: false_str = CK_'false' !! JSON logical False string +#else + character(kind=CK,len=*),parameter :: null_str = CK_'null' !! JSON Null variable string + character(kind=CK,len=*),parameter :: true_str = CK_'true' !! JSON logical True string + character(kind=CK,len=*),parameter :: false_str = CK_'false' !! JSON logical False string +#endif + + integer, private :: i_ !! just a counter for `control_chars` array + character(kind=CK,len=*),dimension(32),parameter :: control_chars = & + [(achar(i_,kind=CK),i_=1,31), achar(127,kind=CK)] !! Control characters, possibly in unicode + + !find out the precision of the floating point number system + !and set safety factors + integer(IK),parameter :: rp_safety_factor = 1_IK + integer(IK),parameter :: rp_addl_safety = 2_IK + integer(IK),parameter :: real_precision = rp_safety_factor*precision(1.0_RK) + & + rp_addl_safety + + !Get the number of possible digits in the exponent when using decimal number system + integer(IK),parameter :: maxexp = maxexponent(1.0_RK) + integer(IK),parameter :: minexp = minexponent(1.0_RK) + integer(IK),parameter :: real_exponent_digits = floor( 1_IK + log10( & + real(max(maxexp,abs(maxexp)),& + kind=RK) ) ) + + integer(IK),parameter :: max_numeric_str_len = real_precision + real_exponent_digits + 6_IK + !! 6 = sign + leading 0 + decimal + 'E' + exponent sign + 1 extra + character(kind=CDK,len=*),parameter :: int_fmt = '(ss,I0)' !! minimum width format for integers + + integer(IK),parameter :: max_integer_str_len = 256_IK !! maximum string length of an integer. + !! This is totally arbitrary (any way + !! to get the compiler to tell us this?) + + integer(IK),parameter :: chunk_size = 256_IK !! for allocatable strings: allocate chunks of this size + integer(IK),parameter :: unit2str = -1_IK !! unit number to cause stuff to be + !! output to strings rather than files. + !! See 9.5.6.12 in the F2003/08 standard + character(kind=CK,len=*),parameter :: blank_chunk = repeat(space, chunk_size) !! a blank string + + integer(IK),parameter :: seq_chunk_size = 256_IK !! chunk size for reading sequential files + + integer(IK),parameter :: stream_chunk_size = 256_IK !! chunk size for reading stream files + + integer(IK),parameter :: print_str_chunk_size = 1000_IK !! chunk size for writing JSON to a string + + integer(IK),parameter :: pushed_char_size = 10_IK !! size for `pushed_char` + !! array in [[json_core(type)]] + + end module json_parameters +!***************************************************************************************** +","Fortran" +"Genesis","genesis-release-r-ccs/genesis","src/spdyn/pr_fileio.c",".c","2269","108","/******************************************************************/ +#define _FILE_OFFSET_BITS 64 + +#include +#include +#include + +#define MaxFile 128 + +static FILE* Files [MaxFile] = { 0 }; + +/******************************************************************/ + +void pr_fill(FILE* fp, long long *blen) +{ +#define Unit 1024LL + + long long i; + + const char Data[Unit] = { }; + + long long num = *blen / Unit; + long long tail = *blen % Unit; + + for(i = 0; i < num; ++i) + fwrite(Data, Unit, 1, fp); + for(i = 0; i < tail; ++i) + fwrite(&Data[0], 1, 1, fp); +} + +/******************************************************************/ + +void pr_open_(char* filename, long long *blen, int* unit_no, + int fnlen, int mlen) +{ + int i; + for(i = fnlen-1; i >= 0; --i) + if (filename[i] == ' ') + filename[i] = 0; + else + break; + + for(i = 0; i < MaxFile; ++i) + if (Files[i] == 0) + { + *unit_no = i; + + char str[256]; + sprintf(str, ""touch %s"", filename); + system(str); + + Files[i] = fopen(filename, ""rb+""); + if (Files[i] == 0) + { + printf(""Pr_Open_> File open error [%s]\n"", filename); + *unit_no = -1; + return ; + } + + pr_fill(Files[i], blen); + + //printf(""OPENED : unit_no:%d %s\n"", *unit_no, filename); + break; + } + + if (i == MaxFile) + *unit_no = -1; +} + +/******************************************************************/ + +void pr_close_(int* unit_no) +{ + if (*unit_no < 0 || *unit_no >= MaxFile) + return ; + fclose(Files[*unit_no]); + Files[*unit_no] = 0; +} + +/******************************************************************/ + +void pr_flen_(int* unit_no, long long* flen) +{ + FILE* fp = Files[*unit_no]; + fseeko(fp, (off_t)0, SEEK_END); + *flen = (long long)ftello(fp); +} + +/******************************************************************/ + +void pr_read_(int* unit_no, long long *pos, unsigned char* b, int *blen) +{ + FILE* fp = Files[*unit_no]; + fseeko(fp, (off_t)*pos, SEEK_SET); + fread(b, *blen, 1, fp); +} + +/******************************************************************/ + +void pr_write_(int* unit_no, long long *pos, unsigned char* b, int *blen) +{ + FILE* fp = Files[*unit_no]; + fseeko(fp, (off_t)*pos, SEEK_SET); + fwrite(b, *blen, 1, fp); +} + +/******************************************************************/ +","C" +"Genesis","genesis-release-r-ccs/genesis","src/spdyn/gpu_common.h",".h","1170","37","#ifndef _GPU_COMMON_H_ +#define _GPU_COMMON_H_ + +// #define DEBUG + +#define ERR_EQ(X,Y) do { if ((X) == (Y)) { \ + fprintf(stderr,""Error in %s at %s:%d\n"",__func__,__FILE__,__LINE__); \ + exit(-1);}} while(0) + +#define ERR_NE(X,Y) do { if ((X) != (Y)) { \ + fprintf(stderr,""Error in %s at %s:%d\n"",__func__,__FILE__,__LINE__); \ + exit(-1);}} while(0) + +#define CUDA_CALL(X) ERR_NE((X),cudaSuccess) +#define CUDA_ERROR() \ + do { \ + cudaError_t cerr = cudaGetLastError(); \ + if (cerr != cudaSuccess) { \ + const char *ptr = cudaGetErrorString( cerr ); \ + fprintf(stderr,""Error in %s at %s:%d, %s\n"",__func__,__FILE__,__LINE__,ptr); \ + exit(-1); \ + } \ + } while (0); + +#define DIVCEIL(X,Y) ((X+Y-1)/Y) + +#define CALIDX2(Y,NY,Z,NZ) ((Y)+((NY)*(Z))) +#define CALIDX3(X,NX,Y,NY,Z,NZ) CALIDX2(X,NX, CALIDX2(Y,NY,Z,NZ),(NY)*(NZ)) +#define CALIDX4(W,NW,X,NX,Y,NY,Z,NZ) CALIDX3(W,NW,X,NX, CALIDX2(Y,NY,Z,NZ),(NY)*(NZ)) + +#define MAX(X,Y) ((X)>(Y) ? (X) : (Y)) +#define MIN(X,Y) ((X)<(Y) ? (X) : (Y)) +#define ABS(X,Y) ((X)>(Y) ? (X)-(Y) : (Y)-(X)) +#define NINT(X) ((X)>=0 ? (int)((X)+0.5) : (int)((X)-0.5)) + +#endif // _GPU_COMMON_H_ +","Unknown" +"Genesis","genesis-release-r-ccs/genesis","tests/regression_test/test_vib.py",".py","12251","411","#!/usr/bin/env python +# coding: utf-8 + +# +# A python script for GENESIS regression tests +# +# (c) Copyright 2022 RIKEN. All rights reserved. +# + +import subprocess +import os +import os.path +import sys +import copy +import random +import glob +import shutil +import re +import math +from genesis import * + +############### DEFINITION ################################## +class Minfo(object): + def __init__(self): # initialization + self.is_passed = False + self.freq_data = [] + self.vec_data = [] + + def read(self, filename): + fid = open(filename, 'r') + text_list = fid.readlines() + fid.close() + self.parse(text_list) + + def parse(self, text_list): + patternVIB = re.compile('Vibrational Data') + patternFREQ = re.compile('Vibrational Frequency') + + num_data_per_line = 5 + + x = 0 + for x in range(0, len(text_list)): + line = text_list[x] + result = patternVIB.search(line) + if result is not None: + break + + x+=1 + line = text_list[x] + #print str(x)+"": ""+line + num_domain = 0 + result = re.search(""Domain"", line) + if result: + x+=1 + line = text_list[x] + num_domain = int(text_list[x]) + + nd = 0 + while nd < num_domain: + xx = x + for xx in range(x, len(text_list)): + line = text_list[xx] + result = patternFREQ.search(line) + if result: + break + + x = xx + 1 + ndata = int(text_list[x]) + nline = ndata // num_data_per_line + + if ndata % num_data_per_line != 0: + nline += 1 + + #print ""ndata = %d"" % ndata + #print ""nline = %d"" % nline + + nl = 0 + while nl < nline: + x += 1 + data_line = text_list[x].split("","") + for i in data_line: + #print str(nl)+"" ""+i + self.freq_data.append((float(i))) + nl += 1 + + x += 1 + nm = 0 + while nm < ndata: + #print ""mode = %d"" % nm + x = x + 2 + nl = 0 + tmp = [] + while nl < nline: + x += 1 + data_line = text_list[x].split("","") + for i in data_line: + #print str(nl)+"" ""+i + tmp = tmp + [(float(i))] + nl += 1 + self.vec_data.append(tmp) + nm += 1 + + nd += 1 + + def test_diff(self, obj, tolerance): # compare energies + is_failure = False + mode_failure = [] + vec_failure = [] + + for imode in range(0, len(self.freq_data)): + d = abs(self.freq_data[imode] - obj.freq_data[imode]) + if abs(self.freq_data[imode]) < 1e4: #min log is 1e-4 + ratio=d + else: + ebase=max(abs(self.freq_data[imode]),1.0) + ratio = d/ebase + tolerance2 = tolerance + + if abs(self.freq_data[imode]) < 1e4: + tolerance2 = tolerance2*1e4 + if ratio > tolerance2: + is_failure = True + mode_failure.append(imode) + + for ivec in range(0, len(self.vec_data[imode])): + d = abs(self.vec_data[imode][ivec]) - abs(obj.vec_data[imode][ivec]) + + if abs(self.vec_data[imode][ivec]) < 1e4: #min log is 1e-4 + ratio=d + else: + ebase=max(abs(self.vec_data[imode][ivec]),1.0) + ratio = d/ebase + + if ratio > tolerance: + is_failure = True + vec_failure.append([imode, ivec]) + + if is_failure: + self.is_passed = False + + if len(mode_failure) > 0: + print(""Failure in frequency (tolerance = %4.2e)"" % tolerance) + for count in range(0, len(mode_failure)): + imode = mode_failure[count] + print(""mode %d"" % (imode+1)) + + sys.stdout.write(""< "") + sys.stdout.write(""%14s"" % str(self.freq_data[imode]).rjust(14)) + sys.stdout.write(""\n"") + + sys.stdout.write(""> "") + sys.stdout.write(""%14s"" % str(obj.freq_data[imode]).rjust(14)) + sys.stdout.write(""\n\n"") + + if len(vec_failure) > 0: + print(""Failure in vector (tolerance = %4.2e)"" % tolerance) + for count in range(0, len(vec_failure)): + imode = vec_failure[count][0] + ivec = vec_failure[count][1] + print(""mode %d"" % (imode+1)) + print(""element %d"" % (ivec+1)) + + sys.stdout.write(""< "") + sys.stdout.write(""%14s"" % str(self.vec_data[imode][ivec]).rjust(14)) + sys.stdout.write(""\n"") + + sys.stdout.write(""> "") + sys.stdout.write(""%14s"" % str(obj.vec_data[imode][ivec]).rjust(14)) + sys.stdout.write(""\n\n"") + + else: + self.is_passed = True + print(""Passed (tolerance = %4.2e(freq, vec))"" % (tolerance)) + +############### DEFINITION ################################## +def getdirs(path): + test_dirs = [] + for test_dir in os.listdir(path): + test_dir_path = os.path.join(path,test_dir) + #print ""%s"" % test_dir_path + if os.path.isdir(test_dir_path): + if os.path.exists(test_dir_path + ""/inp"") and os.path.exists(test_dir_path + ""/ref1""): + test_dirs.append(test_dir_path) + test_dirs.sort() + return test_dirs + +############### MAIN ######################################## + +###### initialization + +#os.environ[""OMP_NUM_THREADS""] = ""1"" +tolerance = 1.0e-8 # relative energy difference (diff/abs(e)) +tolerance_single = 3.0e-5 # relative energy difference (diff/abs(e)) +virial_ratio = 1.0e2 + +tolerance_virial = tolerance*virial_ratio + +ipassed = 0 +ifailed = 0 +iaborted = 0 +itried = 0 +num = 0 + +is_atdyn = False +is_spdyn = False +is_parallelio = False +is_fugaku = False + +test_dirs = [] + +###### parse command line + +if len(sys.argv) == 1: + genesis_command = 'mpirun -np 8 atdyn' +elif len(sys.argv) == 2: + genesis_command = sys.argv[1] +else: + genesis_command = sys.argv[1] + is_number = re.compile(r'^[+-]?(\d*\.\d+|\d+\.?\d*)([eE][+-]?\d+|)\Z') + if is_number.match(sys.argv[2]): + tolerance = float(sys.argv[2]) + elif sys.argv[2] == ""parallel_io"": + is_parallelio = True + elif sys.argv[2] == ""fugaku"": + mpiexec_command = genesis_command.split(' ',2)[0] + genesis_path = genesis_command.split(' ',2)[1] + is_fugaku = True + if len(sys.argv) == 4: + if is_number.match(sys.argv[3]): + tolerance = float(sys.argv[3]) + else: + test_dirs = sys.argv[2:] + +if (genesis_command == ""-h"") or (genesis_command == ""--help""): + print("""""" usage: + $ ./test_vib.py [""genesis command""] [parallel_io or tolerance_value or directories] + + examples: + # run tests using the default command (""mpirun -np 8 atdyn"") + $ ./test_vib.py + + # run atdyn tests + $ ./test_vib.py ""mpirun -np 8 /path/to/genesis/bin/atdyn"" + + # run with specfic tolerance value + $ ./test_vib.py ""mpirun -np 8 /path/to/genesis/bin/atdyn"" 0.1 + + """""") + sys.exit(3) + +genesis_command_split = genesis_command.split() +genesis_command_last = genesis_command_split[-1] +if not os.path.exists(os.path.expanduser(genesis_command_last)): + print(""Error: %s does not exist"" % genesis_command_last) + sys.exit(3) + +if (is_fugaku): + genesis_mpi_number = 8 +else: + genesis_mpi_number = genesis_command.split()[-2] + if not int(genesis_mpi_number)%4 == 0: + print(""Error: %d should be multiplier of 4"" % int(genesis_mpi_number)) + sys.exit(3) + +# if given path is relpath, change it to abspath +genesis_command_split[-1] = os.path.abspath(os.path.expanduser(genesis_command_last)) +genesis_command = "" "".join(genesis_command_split) + +if genesis_command[-5:] == ""atdyn"": + is_atdyn = True +elif genesis_command[-5:] == ""spdyn"": + is_spdyn = True + +if is_spdyn: + print(""Error: test_vib for spdyn is not available"") + sys.exit(3) + +###### setup test directories + +if is_atdyn: + test_dirs = getdirs(os.path.dirname(os.path.abspath(__file__)) + ""/test_vib"") + +for dir in test_dirs: + if not os.path.exists(dir): + print(""Error: %s, this test directory does not exist"" % dir) + sys.exit(3) + +###### run tests +if (is_atdyn or is_spdyn): + print(""======================================================================="") + print("" Regression tests for VIBRATION"") + print(""======================================================================="") + + cwdname = os.getcwd() + for test_each in test_dirs: + os.chdir(cwdname) + dirname = test_each + if not os.path.isdir(dirname) : + continue + os.chdir(dirname) + + if (""VIB_ADDATOM2_g09"" in dirname) : + continue + + if (""VIB_ADDATOM1_g09"" in dirname) : + continue + + itried = itried + 1 + + # run MD + print(""-----------------------------------------------------------------------"") + print(""Running %s..."" % (dirname + ""/"")) + + for fl in glob.glob(""test*""): + os.remove(fl) + + minfo=""vib.minfo"" + if os.path.exists(minfo): + os.remove(minfo) + + minfodir=""minfo.files"" + if os.path.exists(minfodir): + for fl in glob.glob(minfodir + ""/*.minfo""): + os.remove(fl) + + inputname = ""inp"" + outname = ""log"" + if (is_fugaku): + commandline = '%s sh -c \""%s %s 1> %s 2> error\""' % (mpiexec_command, genesis_path, inputname, outname) + else: + commandline = '%s %s 1> %s 2> error' % (genesis_command, inputname, outname) + print(""$ %s"" % commandline) + status = subprocess.getstatusoutput(commandline) + + if (status[0] > 0) and (status[0] != 1024): + print() + print(""Aborted..."") + print() + iaborted = iaborted + 1 + continue + + # parse the result + #if status[0] == 0: + print() + print(""Checking %s"" % test_each) + print() + + num = 1 + ipassed_replica = 0 + ifailed_replica = 0 + while num < 9: + ref = Genesis() + refname = ""ref%d"" % num + ref.read(refname) + testname = ""test%d"" % num + test = Genesis() + test.read(testname) + + # check the result + print() + print(""Checking diff between %s and %s..."" % (refname, testname)) + print() + #ref.test_diff_energies(test, tolerance) + ref.test_diff(test, tolerance, tolerance_virial) + print() + + if ref.is_passed: + ipassed_replica = ipassed_replica + 1 + else: + ifailed_replica = ifailed_replica + 1 + + num += 1 + # post-cleaning + #os.remove(testname) + + ref = Minfo() + refname = ""ref.minfo"" + ref.read(refname) + + test = Minfo() + testname = ""vib.minfo"" + test.read(testname) + + # check the result + print() + print(""Checking diff between %s and %s..."" % (refname, testname)) + print() + + ref.test_diff(test, tolerance) + + if (ifailed_replica == 0 and ref.is_passed): + ipassed = ipassed + 1 + else: + ifailed = ifailed + 1 + +###### finalization +if (itried > 0): + print(""-----------------------------------------------------------------------"") + print(""Passed %d / %d"" % (ipassed, itried)) + print(""Failed %d / %d"" % (ifailed, itried)) + print(""Aborted %d / %d"" % (iaborted, itried)) + print(""-----------------------------------------------------------------------"") + +if iaborted > 0: + sys.exit(2) +elif ifailed > 0: + sys.exit(1) +else: + sys.exit(0) + +","Python" +"Genesis","genesis-release-r-ccs/genesis","tests/regression_test/charmm.py",".py","29558","599","#!/usr/bin/python +# coding: utf-8 + +# +# A parser script for CHARMM output style +# +# (c) Copyright 2022 RIKEN. All rights reserved. +# + +import re +import sys +import copy + +############### DEFINITION #################################################### +class Charmm(object): + def __init__(self): # initialization + # public attributes + self.dict_text = {} + self.dict_data = {} + self.is_passed = False + + def delete_last(self): + for key in list(self.dict_data.keys()): + self.dict_text[key].pop() + self.dict_data[key].pop() + + def delete_first(self): + for key in list(self.dict_data.keys()): + self.dict_text[key].pop(0) + self.dict_data[key].pop(0) + + def extract_first(self): + for key in list(self.dict_data.keys()): + self.dict_text[key] = [self.dict_text[key][0]] + self.dict_data[key] = [self.dict_data[key][0]] + + def read(self, filename): + fid = open(filename, 'r') + text_list = fid.readlines() + fid.close() + self.parse(text_list) + + def parse(self, text_list): + # parse titles + title = [] + patternMD = re.compile(' CHARMM>') #for CHARMM + patternMD2 = re.compile('Perform Molecular Dynamics Simulation|Perform Energy Minimization') #for GENESIS + patternLABEL = re.compile('DYNA.*:') + patternDYNA = re.compile('DYNA.*>') + patternCONSTR = re.compile('DYNA CONSTR:') + is_md = False + for line in text_list: + if is_md: + result = patternDYNA.search(line) + if result is not None: + break + result = patternCONSTR.search(line) + if result is not None: + continue + result = patternLABEL.search(line) + if result is None: + continue + else: # patternLABEL found + line_sub = patternLABEL.sub('', line.rstrip('\n')) + line_split = line_sub.split() + title = title + line_split + else: + result = patternMD.search(line) + if result is not None: + is_md = True + result = patternMD2.search(line) + if result is not None: + is_md = True + + # parse data + text = [] + data = [] + data_each = [] + is_md = False + patternMD = re.compile(' CHARMM>') #for CHARMM + patternMD2 = re.compile('Perform Molecular Dynamics Simulation|Perform Energy Minimization') #for GENESIS + patternDYNA = re.compile('DYNA.*>') + patternCONSTR = re.compile('DYNA CONSTR>') + #patternDATA = re.compile('(\S+?\.\S{5}|\d+)') + #patternDATA = re.compile('([\s-]+[\d\.]+)') + patternDATA = re.compile('(-?\d+?\.\d{4,5}|\d+|NaN)') + for line in text_list: + if is_md: + result = patternDYNA.search(line) + if result is None: + continue + else: # patternDYNA found + result = patternCONSTR.search(line) + if result is not None: + continue + line_sub = patternDYNA.sub('', line.rstrip('\n')) + line_split = patternDATA.findall(line_sub) + data_each = data_each + line_split + # print line + # print data_each + # print + if len(data_each) == len(title): + text.append(data_each) + # data.append([float(item) for item in data_each]) + tmp = [] + for i in range(len(data_each)): + if data_each[i][-1].isdigit(): # number ? + tmp = tmp + [(float(data_each[i]))] + else: # Not a Number ! + tmp = tmp + [(float(999999))] + data.append(tmp) + data_each = [] + else: + result = patternMD.search(line) + if result is not None: + is_md = True + result = patternMD2.search(line) + if result is not None: + is_md = True + + # remove duplicate steps + text2 = []; + data2 = []; + steps = []; + for i in range(len(text)): + if not data[i][0] in steps: + steps.append(data[i][0]) + text2.append(text[i]) + data2.append(data[i]) + + # append to the dictionary + self.dict_append(title, text2, data2) + + def dict_append(self, title, text, data): + if len(self.dict_text) == 0: + self.dict_text = dict.fromkeys(title, []) + if len(self.dict_data) == 0: + self.dict_data = dict.fromkeys(title, []) + text_transpose = list(map(list, list(zip(*text)))) + data_transpose = list(map(list, list(zip(*data)))) + for i in range(len(title)): + self.dict_text[title[i]] = self.dict_text[title[i]] + text_transpose[i] + self.dict_data[title[i]] = self.dict_data[title[i]] + data_transpose[i] + + # count IMAGE terms because these are not written in GENESIS output + if 'IMNBvdw' in self.dict_data and 'VDWaals' in self.dict_data: + nstep = len(self.dict_data['VDWaals']) + for istep in range(nstep): + self.dict_data['VDWaals'][istep] = self.dict_data['VDWaals'][istep] + self.dict_data['IMNBvdw'][istep] + self.dict_text['VDWaals'][istep] = ""%f"" % self.dict_data['VDWaals'][istep] + # del self.dict_data['IMNBvdw'] + # del self.dict_text['IMNBvdw'] + if 'IMELec' in self.dict_data and 'ELEC' in self.dict_data: + nstep = len(self.dict_data['ELEC']) + for istep in range(nstep): + self.dict_data['ELEC'][istep] = self.dict_data['ELEC'][istep] + self.dict_data['IMELec'][istep] + self.dict_text['ELEC'][istep] = ""%f"" % self.dict_data['ELEC'][istep] + # del self.dict_data['IMELec'] + # del self.dict_text['IMELec'] + + # if self.dict_data.has_key('IMHBnd'): + # del self.dict_data['IMHBnd'] + # del self.dict_text['IMHBnd'] + # if self.dict_data.has_key('RXNField'): + # del self.dict_data['RXNField'] + # del self.dict_text['RXNField'] + # if self.dict_data.has_key('EXTElec'): + # del self.dict_data['EXTElec'] + # del self.dict_text['EXTElec'] + + # count EWALD terms + if 'EWKSum' in self.dict_data and 'ELEC' in self.dict_data: + nstep = len(self.dict_data['ELEC']) + for istep in range(nstep): + self.dict_data['ELEC'][istep] = self.dict_data['ELEC'][istep] + self.dict_data['EWKSum'][istep] + self.dict_text['ELEC'][istep] = ""%f"" % self.dict_data['ELEC'][istep] + if 'EWSElf' in self.dict_data and 'ELEC' in self.dict_data: + nstep = len(self.dict_data['ELEC']) + for istep in range(nstep): + self.dict_data['ELEC'][istep] = self.dict_data['ELEC'][istep] + self.dict_data['EWSElf'][istep] + self.dict_text['ELEC'][istep] = ""%f"" % self.dict_data['ELEC'][istep] + if 'EWEXcl' in self.dict_data and 'ELEC' in self.dict_data: + nstep = len(self.dict_data['ELEC']) + for istep in range(nstep): + self.dict_data['ELEC'][istep] = self.dict_data['ELEC'][istep] + self.dict_data['EWEXcl'][istep] + self.dict_text['ELEC'][istep] = ""%f"" % self.dict_data['ELEC'][istep] + if 'EWQCor' in self.dict_data and 'ELEC' in self.dict_data: + nstep = len(self.dict_data['ELEC']) + for istep in range(nstep): + self.dict_data['ELEC'][istep] = self.dict_data['ELEC'][istep] + self.dict_data['EWQCor'][istep] + self.dict_text['ELEC'][istep] = ""%f"" % self.dict_data['ELEC'][istep] + if 'EWUTil' in self.dict_data and 'ELEC' in self.dict_data: + nstep = len(self.dict_data['ELEC']) + for istep in range(nstep): + self.dict_data['ELEC'][istep] = self.dict_data['ELEC'][istep] + self.dict_data['EWUTil'][istep] + self.dict_text['ELEC'][istep] = ""%f"" % self.dict_data['ELEC'][istep] + + def test_diff(self, obj, tolerance): # compare energies + # test MD steps + is_failure = False + dict_failure = dict.fromkeys(list(self.dict_data.keys()), False) + nstep_failure = 0 + + nstep = len(self.dict_data['Step']) + for istep in range(nstep): + for key in list(self.dict_data.keys()): + d = abs(self.dict_data[key][istep] - obj.dict_data[key][istep]) + if d > tolerance: + is_failure = True + dict_failure[key] = True + nstep_failure = istep + if is_failure: + break + + if is_failure: + self.is_passed = False + print(""Failure at step %d (tolerance = %4.2e)"" % (self.dict_data['Step'][nstep_failure], tolerance)) + nstep_max = min([nstep_failure + 3, nstep]) + for istep in range(nstep_failure, nstep_max): + print(""Step %d"" % (self.dict_data['Step'][istep])) + + sys.stdout.write("" "") + for key in list(self.dict_data.keys()): + if dict_failure[key]: + sys.stdout.write(""%14s"" % key.rjust(14)) + sys.stdout.write(""\n"") + + sys.stdout.write(""< "") + for key in list(self.dict_data.keys()): + if dict_failure[key]: + sys.stdout.write(""%14s"" % self.dict_text[key][istep].rjust(14)) + sys.stdout.write(""\n"") + + sys.stdout.write(""> "") + for key in list(self.dict_data.keys()): + if dict_failure[key]: + sys.stdout.write(""%14s"" % obj.dict_text[key][istep].rjust(14)) + sys.stdout.write(""\n\n"") + else: + self.is_passed = True + print(""Passed (tolerance = %4.2e)"" % (tolerance)) + + def test_diff_energies(self, obj, tolerance): # compare energies ignoring HFCKe and PRESSE terms + # test MD steps + is_failure = False + dict_failure = dict.fromkeys(list(self.dict_data.keys()), False) + nstep_failure = 0 + + nstep = len(self.dict_data['Step']) + for istep in range(nstep): + for key in list(self.dict_data.keys()): + if ((key == ""Time"") or (key == ""TOTEner"") or (key == ""TOTKe"") or (key == ""ENERgy"") or (key == ""TEMPerature"") or (key == ""GRMS"") or (key == ""BONDs"") or (key == ""ANGLes"") or (key == ""UREY-b"") or (key == ""DIHEdrals"") or (key == ""IMPRopers"") or (key == ""CMAPs"") or (key == ""VDWaals"") or (key == ""ELEC"")): + d = abs(self.dict_data[key][istep] - obj.dict_data[key][istep]) + if d > tolerance: + is_failure = True + dict_failure[key] = True + nstep_failure = istep + if is_failure: + break + + if is_failure: + self.is_passed = False + print(""Failure at step %d (tolerance = %4.2e)"" % (self.dict_data['Step'][nstep_failure], tolerance)) + nstep_max = min([nstep_failure + 3, nstep]) + for istep in range(nstep_failure, nstep_max): + print(""Step %d"" % (self.dict_data['Step'][istep])) + + sys.stdout.write("" "") + for key in list(self.dict_data.keys()): + if dict_failure[key]: + sys.stdout.write(""%14s"" % key.rjust(14)) + sys.stdout.write(""\n"") + + sys.stdout.write(""< "") + for key in list(self.dict_data.keys()): + if dict_failure[key]: + sys.stdout.write(""%14s"" % self.dict_text[key][istep].rjust(14)) + sys.stdout.write(""\n"") + + sys.stdout.write(""> "") + for key in list(self.dict_data.keys()): + if dict_failure[key]: + sys.stdout.write(""%14s"" % obj.dict_text[key][istep].rjust(14)) + sys.stdout.write(""\n\n"") + else: + self.is_passed = True + print(""Passed (tolerance = %4.2e)"" % (tolerance)) + + def calc_relative_error(self): + print(""not available."") + +############### TEST ########################################################## +if __name__ == '__main__': + + # test of 13 digits charmm format generated by GENESIS + text = '''[STEP4] Compute Single Point Energy for Molecules + +Output_Energy> CHARMM_Style is used + +DYNA DYN: Step Time TOTEner TOTKe ENERgy TEMPerature +DYNA PROP: GRMS HFCTote HFCKe EHFCor VIRKe +DYNA INTERN: BONDs ANGLes UREY-b DIHEdrals IMPRopers +DYNA EXTERN: VDWaals ELEC HBONds ASP USER +DYNA IMAGES: IMNBvdw IMELec IMHBnd RXNField EXTElec +DYNA EWALD: EWKSum EWSElf EWEXcl EWQCor EWUTil +DYNA PRESS: VIRE VIRI PRESSE PRESSI VOLUme + ---------- --------- --------- --------- --------- --------- +DYNA> 0 0.00000 0.00000 0.00000 0.00000 0.00000 +DYNA PROP> 0.00000 0.00000 0.00000 0.00000 0.00000 +DYNA INTERN> 585.70255 1228.72913 167.39536 921.88694 102.07776 +DYNA EXTERN> 7826.04840 -82787.99150 0.00000 0.00000 0.00000 +DYNA IMAGES> 0.00000 0.00000 0.00000 0.00000 0.00000 +DYNA EWALD> 0.00000 0.00000 0.00000 0.00000 0.00000 +DYNA PRESS> 0.00000 0.00000 0.00000 0.00000 0.00000 + ---------- --------- --------- --------- --------- --------- + +[STEP5] Perform Molecular Dynamics Simulation + +Initial_Velocity> Generate initial velocities + iseed = 113420321 + temperature = 298.150 + +Stop_Trans_Rotation> Information about center of mass + position = 32.818221 32.586474 33.117938 + velocity = -0.10577230E-02 -0.21867632E-02 0.12647128E-02 + angul_momentum = -2103.9577 -8227.0787 -709.00038 + kinetic_ene = 0.54193315 + +Stop_Trans_Rotation> Translational motion was removed + +Output_Energy> CHARMM_Style is used + +DYNA DYN: Step Time TOTEner TOTKe ENERgy TEMPerature +DYNA PROP: GRMS HFCTote HFCKe EHFCor VIRKe +DYNA INTERN: BONDs ANGLes UREY-b DIHEdrals IMPRopers +DYNA EXTERN: VDWaals ELEC HBONds ASP USER +DYNA PRESS: VIRE VIRI PRESSE PRESSI VOLUme + ---------- --------- --------- --------- --------- --------- +DYNA> 0 0.00000 -57645.31039 14291.91692 -71937.22731 297.30761 +DYNA PROP> 13.96023 0.00000 14325.47320 0.00000 12961.05092 +DYNA INTERN> 587.17584 1227.87650 156.01780 921.78016 101.45686 +DYNA EXTERN> 7830.32767 -82761.86215 0.00000 0.00000 0.00000 +DYNA PRESS> 0.00000 -8640.70061 2711.92949 252.53539 240990.21157 + ---------- --------- --------- --------- --------- --------- +DYNA> 1 0.00100 -57642.77801 14292.39734 -71935.17535 297.31760 +DYNA PROP> 13.95558 0.00000 14325.86224 0.00000 12692.20822 +DYNA INTERN> 586.23026 1230.73940 156.30088 920.53865 103.33305 +DYNA EXTERN> 7822.17684 -82754.49444 0.00000 0.00000 0.00000 +DYNA PRESS> 0.00000 -8461.47215 2712.02065 303.64017 240990.21157 + ---------- --------- --------- --------- --------- --------- +''' + + ene = Charmm() + + # test parse + ene.parse(text.split(""\n"")) + assert len(ene.dict_data) == 26 + assert ene.dict_text['ELEC'][0].lstrip() == ""-82761.86215"" + assert ((ene.dict_data['ELEC'][0] - 82761.86215) < 10**(-3)) + + # test diff + ene2 = copy.deepcopy(ene) + ene2.dict_text['ELEC'][0] = ""-82762.92832"" + ene2.dict_data['ELEC'][0] = -82762.92832 + ene2.dict_text['DIHEdrals'][0] = ""921.79238"" + ene2.dict_data['DIHEdrals'][0] = 921.79238 + ene.test_diff(ene2, 0.0001) + + # test of 14 digits charmm format generated by GENESIS + text = '''[STEP4] Compute Single Point Energy for Molecules + +Output_Energy> CHARMM_Style is used + +DYNA DYN: Step Time TOTEner TOTKe ENERgy TEMPerature +DYNA PROP: GRMS HFCTote HFCKe EHFCor VIRKe +DYNA INTERN: BONDs ANGLes UREY-b DIHEdrals IMPRopers +DYNA CROSS: CMAPs +DYNA EXTERN: VDWaals ELEC HBONds ASP USER +DYNA IMAGES: IMNBvdw IMELec IMHBnd RXNField EXTElec +DYNA EWALD: EWKSum EWSElf EWEXcl EWQCor EWUTil +DYNA PRESS: VIRE VIRI PRESSE PRESSI VOLUme + ---------- --------- --------- --------- --------- --------- +DYNA> 0 0.00000 0.00000 0.00000 0.00000 0.00000 +DYNA PROP> 0.00000 0.00000 0.00000 0.00000 0.00000 +DYNA INTERN> 50626.48594 41901.36860 718.14958 13934.58410 369.13107 +DYNA CROSS> -4835.31122 +DYNA EXTERN> 180164.31137-1681535.91075 0.00000 0.00000 0.00000 +DYNA IMAGES> 0.00000 0.00000 0.00000 0.00000 0.00000 +DYNA EWALD> 0.00000 0.00000 0.00000 0.00000 0.00000 +DYNA PRESS> 0.00000 0.00000 0.00000 0.00000 0.00000 + ---------- --------- --------- --------- --------- --------- + +[STEP5] Perform Molecular Dynamics Simulation + +Initial_Velocity> Generate initial velocities + iseed = 314159 + temperature = 303.000 + +Stop_Trans_Rotation> Information about center of mass + position = 90.975571 73.945738 69.101611 + velocity = -0.10096501E-02 -0.68857264E-03 -0.41584577E-03 + angul_momentum = 33445.157 -74978.687 -51807.025 + kinetic_ene = 1.6889621 + +Stop_Trans_Rotation> Translational motion was removed +Stop_Trans_Rotation> Rotational motion was removed + +Output_Energy> CHARMM_Style is used + +DYNA DYN: Step Time TOTEner TOTKe ENERgy TEMPerature +DYNA PROP: GRMS HFCTote HFCKe EHFCor VIRKe +DYNA INTERN: BONDs ANGLes UREY-b DIHEdrals IMPRopers +DYNA CROSS: CMAPs +DYNA EXTERN: VDWaals ELEC HBONds ASP USER +DYNA PRESS: VIRE VIRI PRESSE PRESSI VOLUme + ---------- --------- --------- --------- --------- --------- +DYNA> 0 0.00000-1102973.88152 295683.30978-1398657.19130 303.08506 +DYNA PROP> 0.68936 0.00000 295692.24304 0.00000 58224.74527 +DYNA INTERN> 50626.48594 41901.36860 718.14958 13934.58410 369.13107 +DYNA CROSS> -4835.31122 +DYNA EXTERN> 180164.31137-1681535.91075 0.00000 0.00000 0.00000 +DYNA PRESS> 0.00000 -38816.49684 0.00000 3497.82827 3104392.80981 + ---------- --------- --------- --------- --------- --------- +DYNA> 10 0.01000-1102171.72790 199241.40716-1301413.13506 204.22896 +DYNA PROP> 6.73354 0.00000 200056.67939 0.00000 -231313.62819 +DYNA INTERN> 50275.97200 52933.75308 2489.77467 15630.61920 1476.24534 +DYNA CROSS> -4679.64595 +DYNA EXTERN> 191984.83464-1611524.68804 0.00000 0.00000 0.00000 +DYNA PRESS> 0.00000 154209.08546 0.00000 6342.19102 3104392.80981 + ---------- --------- --------- --------- --------- --------- +Update_Pairlist_Pbc> Search Nonbonded Interactions +within PairListDist + num_nb15 = 124855668 + +DYNA> 20 0.02000-1101503.89915 177696.55379-1279200.45294 182.14478 +DYNA PROP> 8.59759 0.00000 179193.42738 0.00000 -320688.59931 +DYNA INTERN> 59219.07638 59489.86932 2503.37341 15329.32038 802.20635 +DYNA CROSS> -4618.48371 +DYNA EXTERN> 215022.87035-1626948.68543 0.00000 0.00000 0.00000 +DYNA PRESS> 0.00000 213792.39954 0.00000 7341.34682 3104392.80981 + ---------- --------- --------- --------- --------- --------- +''' + + ene = Charmm() + + # test parse + ene.parse(text.split(""\n"")) + assert len(ene.dict_data) == 27 + assert ene.dict_text['ELEC'][0].lstrip() == ""-1681535.91075"" + + # test diff + ene2 = copy.deepcopy(ene) + ene2.dict_text['ELEC'][0] = ""-1681535.93333"" + ene2.dict_data['ELEC'][0] = -1681535.93333 + ene.test_diff(ene2, 0.0001) + + # test of original charmm format generated by CHARMM + text = ''' CHARMM> !dynamics LEAP VERLET nstep 1 timestp 0.001 - + CHARMM> dynamics VV2 VERLET nstep 1 timestp 0.001 - + CHARMM> firstt 0.0 tstruc 0.0 iseed 314159 - + CHARMM> inbfrq 1 nprint 1 + IUNREA = -1 IUNWRI = -1 IUNOS = -1 + IUNCRD = -1 IUNVEL = -1 KUNIT = -1 + + NONBOND OPTION FLAGS: + ELEC VDW ATOMs CDIElec SHIFt VATOm VSWItch + BYGRoup NOEXtnd NOEWald + CUTNB = 12.000 CTEXNB =999.000 CTONNB = 8.000 CTOFNB = 10.000 + WMIN = 1.500 WRNMXD = 0.500 E14FAC = 1.000 EPS = 1.000 + NBXMOD = 5 + There are 6401815 atom pairs and 34709 atom exclusions. + There are 0 group pairs and 2149 group exclusions. + NSTEP = 1 NSAVC = 10 NSAVV = 10 + ISCALE = 0 ISCVEL = 0 IASORS = 0 + IASVEL = 1 ICHECW = 1 NTRFRQ = 0 + IHTFRQ = 0 IEQFRQ = 0 NPRINT = 1 + INBFRQ = 1 IHBFRQ = 0 IPRFRQ = 1 + ILBFRQ = 50 IMGFRQ = 50 ISEED = 314159 + ISVFRQ = 0 NCYCLE = 1 NSNOS = 10 + FIRSTT = 0.000 TEMINC = 5.000 TSTRUC = 0.000 + FINALT = 298.150 TWINDH = 10.000 TWINDL = -10.000 + + TIME STEP = 2.04548E-02 AKMA 1.00000E-03 PS + + + SHAKE TOLERANCE = 0.10000E-07 + NUMBER OF DEGREES OF FREEDOM = 48387 + + SEED FOR RANDOM NUMBER GENERATOR IS 314159 + GAUSSIAN OPTION IS 1 + VELOCITIES ASSIGNED AT TEMPERATURE = 0.0000 + + DETAILS ABOUT CENTRE OF MASS + POSITION : -3.67788895E-02 -7.52627313E-03 -9.06203823E-03 + VELOCITY : 0.0000000 0.0000000 0.0000000 + ANGULAR MOMENTUM : 0.0000000 0.0000000 0.0000000 + KINETIC ENERGY : 0.0000000 +DYNA DYN: Step Time TOTEner TOTKe ENERgy TEMPerature +DYNA PROP: GRMS HFCTote HFCKe EHFCor VIRKe +DYNA INTERN: BONDs ANGLes UREY-b DIHEdrals IMPRopers +DYNA EXTERN: VDWaals ELEC HBONds ASP USER +DYNA IMAGES: IMNBvdw IMELec IMHBnd RXNField EXTElec +DYNA PRESS: VIRE VIRI PRESSE PRESSI VOLUme + ---------- --------- --------- --------- --------- --------- +DYNA> 0 0.00000 -75725.03876 0.00000 -75725.03876 0.00000 +DYNA PROP> 14.57730 0.00000 0.00000 0.00000 9793.81757 +DYNA INTERN> 587.17584 1227.87650 156.01780 921.78016 101.45686 +DYNA EXTERN> 6665.06115 -74035.30372 0.00000 0.00000 0.00000 +DYNA IMAGES> 998.43105 -12347.53442 0.00000 0.00000 0.00000 +DYNA PRESS> -766.16674 -5763.04497 217.99525 -1639.74283 240990.21157 + ---------- --------- --------- --------- --------- --------- +DYNA> 1 0.00100 -75726.15470 130.81846 -75856.97316 2.72101 +DYNA PROP> 14.52245 0.00000 0.00000 0.00000 9790.85358 +DYNA INTERN> 570.25760 1198.54478 151.55814 919.37130 99.50939 +DYNA EXTERN> 6655.52114 -74091.33853 0.00000 0.00000 0.00000 +DYNA IMAGES> 997.13467 -12357.53166 0.00000 0.00000 0.00000 +DYNA PRESS> -630.42946 -5896.80626 179.37431 -1652.98728 240990.21157 + ---------- --------- --------- --------- --------- --------- + + * * * AVERAGES FOR THE LAST 1 STEPS +AVER DYN: Step Time TOTEner TOTKe ENERgy TEMPerature +AVER PROP: GRMS HFCTote HFCKe EHFCor VIRKe +AVER INTERN: BONDs ANGLes UREY-b DIHEdrals IMPRopers +AVER EXTERN: VDWaals ELEC HBONds ASP USER +AVER IMAGES: IMNBvdw IMELec IMHBnd RXNField EXTElec +AVER PRESS: VIRE VIRI PRESSE PRESSI VOLUme + ---------- --------- --------- --------- --------- --------- +AVER> 1 0.00100 -75726.15470 130.81846 -75856.97316 2.72101 +AVER PROP> 14.52245 0.00000 0.00000 0.00000 9790.85358 +AVER INTERN> 570.25760 1198.54478 151.55814 919.37130 99.50939 +AVER EXTERN> 6655.52114 -74091.33853 0.00000 0.00000 0.00000 +AVER IMAGES> 997.13467 -12357.53166 0.00000 0.00000 0.00000 +AVER PRESS> -630.42946 -5896.80626 179.37431 -1652.98728 240990.21157 + ---------- --------- --------- --------- --------- --------- + + * * * RMS FLUCTUATIONS FOR 1 STEPS +FLUC> 1 0.0 5734450506.1 17113.5 5754280377.7 7.4 +FLUC PROP> 210.901 0.000 0.000 0.000 95860813.856 +FLUC INTERN> 325193.7255 1436509.5936 22969.8713 845243.5857 9902.1182 +FLUC EXTERN> 44295961.7 5489526445.2 0.0 0.0 0.0 +FLUC IMAGES> 994277.55 152708588.73 0.00 0.00 0.00 +FLUC PRESS> 397441. 34772324. 32175. 2732367. 58076282071. + ---------- --------- --------- --------- --------- --------- + + DETAILS ABOUT CENTRE OF MASS + POSITION : -3.67788895E-02 -7.52627313E-03 -9.06203823E-03 + VELOCITY : -1.36816176E-15 -3.16622913E-16 -2.71919912E-16 + ANGULAR MOMENTUM : 4.6459702 138.80512 -45.385133 + KINETIC ENERGY : 1.47839360E-25 + + CHARMM> stop +$$$$$$ New timer profile $$$$$ + Shake Setup 0.01 Other: 0.00 + First List 2.94 Other: 0.00 + Shake time 0.02 Other: 0.00 + Dynamics total 0.01 Other: 0.00 + Electrostatic & VDW 0.63 Other: 0.00 + Nonbond force 0.63 Other: 0.00 + Bond energy 0.00 Other: 0.00 + Angle energy 0.00 Other: 0.00 + Dihedral energy 0.00 Other: 0.00 + Restraints energy 0.00 Other: 0.00 + INTRNL energy 0.01 Other: 0.00 + Energy time 0.64 Other: 0.00 + Total time 8.61 Other: 4.99 + + NORMAL TERMINATION BY NORMAL STOP + MAXIMUM STACK SPACE USED IS 9478830 + STACK CURRENTLY IN USE IS 0 + MOST SEVERE WARNING WAS AT LEVEL 1 + HEAP PRINTOUT- HEAP SIZE 10240000 + SPACE CURRENTLY IN USE IS 13824 + MAXIMUM SPACE USED IS 29464250 + FREE LIST + PRINHP> ADDRESS: 1 LENGTH: 10225676 NEXT: 10239501 + PRINHP> ADDRESS: 10239501 LENGTH: 500 NEXT: 199443967 + PRINHP> ADDRESS: 199443967 LENGTH: 671744 NEXT: 201278975 + PRINHP> ADDRESS: 201278975 LENGTH: 32309248 NEXT: 0 + + $$$$$ JOB ACCOUNTING INFORMATION $$$$$ + ELAPSED TIME: 8.62 SECONDS + CPU TIME: 8.58 SECONDS +''' + + ene = Charmm() + + # test parse + ene.parse(text.split(""\n"")) + assert len(ene.dict_data) == 26 + print(ene.dict_text['ELEC'][0]) + assert ene.dict_text['ELEC'][0] == ""-86382.838140"" + + # test diff + ene2 = copy.deepcopy(ene) + ene2.dict_text['ELEC'][0] = ""-86382.923020"" + ene2.dict_data['ELEC'][0] = -86382.923020 + ene.test_diff(ene2, 0.0001) + +","Python" +"Genesis","genesis-release-r-ccs/genesis","tests/regression_test/fep.py",".py","6539","173","#!/usr/bin/python +# coding: utf-8 + +# +# A parser script for GENESIS output style +# +# (c) Copyright 2022 RIKEN. All rights reserved. +# + +import re +import sys +import copy +import math + +############### DEFINITION #################################################### +class Fep(object): + def __init__(self): # initialization + # public attributes + self.dict_text = {} + self.dict_data = {} + self.dict_error = {} + self.is_passed = False + + def delete_last(self): + for key in list(self.dict_data.keys()): + self.dict_text[key].pop() + self.dict_data[key].pop() + + def delete_first(self): + for key in list(self.dict_data.keys()): + self.dict_text[key].pop(0) + self.dict_data[key].pop(0) + + def read(self, filename): + fid = open(filename, 'r') + text_list = fid.readlines() + fid.close() + self.parse(text_list) + + def parse(self, text_list): + title = [] + text = [] + data = [] + data_each = [] + is_md = False + for line in text_list: + if not line.startswith(""#""): + data_each = line.rstrip('\n').split() + if len(data_each) > 0: + text.append(data_each) + tmp = [] + for i in range(len(data_each)): + if data_each[i][-1].isdigit(): # number ? + tmp = tmp + [(float(data_each[i]))] + else: # Not a Number ! + tmp = tmp + [(float(999999))] + data.append(tmp) + else: + result = re.search(""STEP"", line) + if result is not None: + line_split = line.rstrip('\n').strip('#').split() + title = title + line_split + + # append to the dictionary + self.dict_append(title, text, data) + + def dict_append(self, title, text, data): + if len(self.dict_text) == 0: + self.dict_text = dict.fromkeys(title, []) + if len(self.dict_data) == 0: + self.dict_data = dict.fromkeys(title, []) + text_transpose = list(map(list, list(zip(*text)))) + data_transpose = list(map(list, list(zip(*data)))) + for i in range(len(title)): + self.dict_text[title[i]] = self.dict_text[title[i]] + text_transpose[i] + self.dict_data[title[i]] = self.dict_data[title[i]] + data_transpose[i] + + def test_diff(self, obj, tolerance): # compare energies + # test MD steps + keys = set(self.dict_data.keys()) & set(obj.dict_data.keys()) + is_failure = False + dict_failure = dict.fromkeys(keys, False) + nstep_failure = 0 + + nstep = len(self.dict_data['STEP']) + for istep in range(nstep): + for key in keys: + d = abs(self.dict_data[key][istep] - obj.dict_data[key][istep]) + if abs(self.dict_data[key][istep]) < 1e4: #min log is 1e-4 + ratio=d + else: + ebase=max(abs(self.dict_data[key][istep]),1.0) + ratio = d/ebase + tolerance2 = tolerance + if abs(self.dict_data[key][istep]) < 1e4: + tolerance2 = tolerance2*1e4 + if ratio > tolerance2: + is_failure = True + dict_failure[key] = True + nstep_failure = istep + if is_failure: + break + + if is_failure: + self.is_passed = False + print(""Failure at step %d (tolerance = %4.2e(ene))"" % (self.dict_data['STEP'][nstep_failure], tolerance)) + nstep_max = min([nstep_failure + 3, nstep]) + for istep in range(nstep_failure, nstep_max): + print(""Step %d"" % (self.dict_data['STEP'][istep])) + + sys.stdout.write("" "") + for key in keys: + if dict_failure[key]: + sys.stdout.write(""%14s"" % key.rjust(14)) + sys.stdout.write(""\n"") + + sys.stdout.write(""< "") + for key in keys: + if dict_failure[key]: + sys.stdout.write(""%14s"" % self.dict_text[key][istep].rjust(14)) + sys.stdout.write(""\n"") + + sys.stdout.write(""> "") + for key in keys: + if dict_failure[key]: + sys.stdout.write(""%14s"" % obj.dict_text[key][istep].rjust(14)) + sys.stdout.write(""\n\n"") + else: + self.is_passed = True + print(""Passed (tolerance = %4.2e(ene))"" % (tolerance)) + +############### TEST ########################################################## +if __name__ == '__main__': + text = '''# FEP window index 1 +# STEP Total_E_ref Delta_E_rev Delta_E_fwd + 55 -30601.3858 -0.0000 -11.9887 + 60 -30601.5122 0.0000 -12.4823 + 65 -30593.9519 0.0000 -12.5749 + 70 -30602.1777 0.0000 -11.6161 + 75 -30600.3690 -0.0000 -11.8543 + 80 -30593.0163 0.0000 -12.7094 + 85 -30594.2356 0.0000 -12.3298 + 90 -30597.9725 -0.0000 -11.4078 + 95 -30598.5747 0.0000 -12.0962 + 100 -30591.5799 0.0000 -13.1131 +# FEP window index 2 + 155 -30593.4023 14.4644 -0.0000 + 160 -30592.9836 13.8435 -0.0000 + 165 -30593.3225 13.7767 -0.0000 + 170 -30592.0947 15.1606 -0.0000 + 175 -30586.4292 15.1691 0.0000 + 180 -30577.7778 14.0791 0.0000 + 185 -30580.2632 13.5071 -0.0000 + 190 -30574.9835 13.9099 -0.0000 + 195 -30580.5005 13.1632 0.0000 + 200 -30582.4343 12.9324 -0.0000 +''' + ene = Fep() + + # test parse + ene.parse(text.split(""\n"")) +# assert len(ene.dict_data) == 25 +# assert ene.dict_text['ELECT'][0] == ""-39052.9065"" +# assert ((ene.dict_data['ELECT'][0] + 39052.9065) < 10**(-3)) + + # test diff + ene2 = copy.deepcopy(ene) + ene2.dict_text['Delta_E_rev'][1] = ""-0.3"" + ene2.dict_data['Delta_E_rev'][1] = -0.3 + ene.test_diff(ene2, 1.0e-8) + + ene.delete_last() +","Python" +"Genesis","genesis-release-r-ccs/genesis","tests/regression_test/test.py",".py","13504","407","#!/usr/bin/env python +# coding: utf-8 + +# +# A python script for GENESIS regression tests +# +# (c) Copyright 2022 RIKEN. All rights reserved. +# + +import subprocess +import os +import os.path +import sys +import copy +import random +import glob +import shutil +import re +from genesis import * + + +############### DEFINITION ################################## +def getdirs(path): + test_dirs = [] + for system_dir in os.listdir(path): + system_dir_path = os.path.join(path,system_dir) + if os.path.isdir(system_dir_path): + for test_dir in os.listdir(system_dir_path): + test_dir_path = os.path.join(system_dir_path,test_dir) + if os.path.isdir(test_dir_path): + if os.path.exists(test_dir_path + ""/inp"") and os.path.exists(test_dir_path + ""/ref""): + test_dirs.append(test_dir_path) + if os.path.exists(test_dir_path + ""/pio.inp""): + test_dirs.append(test_dir_path) + test_dirs.sort() + return test_dirs + +############### MAIN ######################################## + +###### initialization + +tolerance = 1.0e-6 # relative energy difference (diff/abs(e)) +tolerance_atdyn = 1.0e-8 # relative energy difference (diff/abs(e)) +tolerance_tmd = 5.0e-6 # relative energy difference (diff/abs(e)) +tolerance_fujitsu = 5.0e-6 # relative energy difference (diff/abs(e)) +tolerance_gpu_respa = 5.0e-6 # relative energy difference (diff/abs(e)) +tolerance_fujitsu_weak = 1.0e-5 # relative energy difference (diff/abs(e)) +tolerance_press_rmsd = 3.0e-5 # relative energy difference (diff/abs(e)) +tolerance_single = 3.0e-5 # relative energy difference (diff/abs(e)) +virial_ratio = 2.0e2 +#os.environ[""OMP_NUM_THREADS""] = ""1"" + +ipassed = 0 +ifailed = 0 +iaborted = 0 +itried = 0 + +is_atdyn = False +is_spdyn = False +is_parallelio = False +is_gpu = False +is_fugaku = False + +test_dirs = [] + +###### parse command line + +if len(sys.argv) == 1: + genesis_command = 'mpirun -np 8 atdyn' +elif len(sys.argv) == 2: + genesis_command = sys.argv[1] +else: + genesis_command = sys.argv[1] + is_number = re.compile(r'^[+-]?(\d*\.\d+|\d+\.?\d*)([eE][+-]?\d+|)\Z') + if is_number.match(sys.argv[2]): + tolerance = float(sys.argv[2]) + elif sys.argv[2] == ""parallel_io"": + is_parallelio = True + elif sys.argv[2] == ""gpu"": + is_gpu = True + if len(sys.argv) == 4: + if is_number.match(sys.argv[3]): + tolerance = float(sys.argv[3]) + elif sys.argv[2] == ""fugaku"": + mpiexec_command = genesis_command.split(' ',2)[0] + genesis_path = genesis_command.split(' ',2)[1] + is_fugaku = True + if len(sys.argv) == 4: + if is_number.match(sys.argv[3]): + tolerance = float(sys.argv[3]) + else: + test_dirs = sys.argv[2:] + +tolerance_virial = tolerance*virial_ratio + +if (genesis_command == ""-h"") or (genesis_command == ""--help""): + print("""""" usage: + $ ./test.py [""genesis command""] [parallel_io or tolerance_value or directories] + + examples: + # run tests using the default command (""mpirun -np 8 atdyn"") + $ ./test.py + + # run atdyn tests + $ ./test.py ""mpirun -np 8 /path/to/genesis/bin/atdyn"" + + # run spdyn tests + $ ./test.py ""mpirun -np 8 /path/to/genesis/bin/spdyn"" + + # run parallel IO tests + $ ./test.py ""mpirun -np 8 /path/to/genesis/bin/spdyn"" parallel_io + + # run gpu tests + $ ./test.py ""mpirun -np 8 /path/to/genesis/bin/spdyn"" gpu + + # run with specfic tolerance value + $ ./test.py ""mpirun -np 8 /path/to/genesis/bin/atdyn"" 0.1 + + # run specfic directories + $ ./test.py ""mpirun -np 8 /path/to/genesis/bin/atdyn"" test_common/jac_param27/LEAP_CUTOFF + """""") + sys.exit(3) + +genesis_command_split = genesis_command.split() +genesis_command_last = genesis_command_split[-1] +if not os.path.exists(os.path.expanduser(genesis_command_last)): + print(""Error: %s does not exist"" % genesis_command_last) + sys.exit(3) + +# if given path is relpath, change it to abspath +genesis_command_split[-1] = os.path.abspath(os.path.expanduser(genesis_command_last)) +genesis_command = "" "".join(genesis_command_split) + +if genesis_command[-5:] == ""atdyn"": + is_atdyn = True +elif genesis_command[-5:] == ""spdyn"": + is_spdyn = True + +###### setup test directories + +if len(test_dirs) == 0: + if is_parallelio and is_spdyn: + test_dirs = getdirs(os.path.dirname(os.path.abspath(__file__)) + ""/test_parallel_IO"") + elif is_parallelio and (not is_spdyn): + print(""Error: parallel_io is supported by only spdyn, please specify spdyn in command line"") + sys.exit(3) + elif is_spdyn: + test_dirs = getdirs(os.path.dirname(os.path.abspath(__file__)) + ""/test_spdyn"") + elif is_atdyn: + test_dirs = getdirs(os.path.dirname(os.path.abspath(__file__)) + ""/test_atdyn"") + +for dir in test_dirs: + if not os.path.exists(dir): + print(""Error: %s, this test directory does not exist"" % dir) + sys.exit(3) + +###### check the number of MPI processors +if (len(sys.argv) == 2 and is_atdyn): + genesis_mpi_number = 1 + +elif (is_fugaku): + genesis_mpi_number = 8 + +else: + is_mpi=False + genesis_mpi_number = -1 + for options in genesis_command_split: + if is_mpi: + genesis_mpi_number = int(options) + is_mpi=False + break + if options == ""-n"": + is_mpi = True + if options == ""-np"": + is_mpi = True + + if int(genesis_mpi_number) < 0: + genesis_mpi_number = genesis_command_split[-2] + + if (is_parallelio and is_spdyn): + if int(genesis_mpi_number) != 8: + print(""Error: Number of MPI processes should be 8 for parallel_io tests"") + sys.exit(3) + else: + if not int(genesis_mpi_number) in [1, 2, 4, 8]: + print(""Error: Number of MPI processes should be 1, 2, 4, or 8"") + sys.exit(3) + +###### run tests +if (is_atdyn or is_spdyn) and (not is_parallelio): + print(""======================================================================="") + print("" Regression tests for MD"") + print(""======================================================================="") + + cwdname = os.getcwd() + for test_each in test_dirs: + os.chdir(cwdname) + dirname = test_each + resultvver = re.compile(r'VVER_[A-Za-z_]+([1248])').search(dirname); + resultvres = re.compile(r'VRES_[A-Za-z_]+([1248])').search(dirname); + if (""WATER"" in dirname) and (is_gpu) : + continue + if (""CUTOFF"" in dirname) and (is_gpu) : + continue + if (""Go"" in dirname) and (is_gpu) : + continue + if (""All_atom"" in dirname) and (is_gpu) : + continue + if (""cago"" in dirname) and (is_gpu) : + continue + if (resultvver and (is_spdyn)): + if (int(genesis_mpi_number) != int(resultvver.group(1))) : + continue + if (resultvres and (is_spdyn)): + if (int(genesis_mpi_number) != int(resultvres.group(1))) : + continue + + itried = itried + 1 + if not os.path.isdir(dirname) : + continue + os.chdir(dirname) + + # run MD + print(""-----------------------------------------------------------------------"") + print(""Running %s..."" % (dirname + ""/"")) + + inputname = ""inp"" + testname = ""test"" + if (is_fugaku): + commandline = '%s sh -c \""%s %s 1> %s 2> error\""' % (mpiexec_command, genesis_path, inputname, testname) + else: + commandline = '%s %s 1> %s 2> error' % (genesis_command, inputname, testname) + + print(""$ %s"" % commandline) + status = subprocess.getstatusoutput(commandline) + + if (status[0] > 0) and (status[0] != 1024): + print() + print(""Aborted..."") + print() + iaborted = iaborted + 1 + continue + + # parse the result + #if status[0] == 0: + test = Genesis() + is_empty = os.stat(testname).st_size == 0 + if (is_empty): + print() + print(""Aborted..."") + print() + iaborted = iaborted + 1 + continue + test.read(testname) + + refname = ""ref"" + tolerance_cur = tolerance + if test.is_single and is_spdyn: + tolerance_cur = tolerance_single + if is_atdyn: + tolerance_cur = tolerance_atdyn + if is_atdyn and (""TMD"" in dirname): + tolerance_cur = tolerance_tmd + if is_spdyn and (""TMD"" in dirname) and not test.is_single: + tolerance_cur = tolerance_tmd + if not test.is_single and is_spdyn and is_gpu and (""VRES"" in dirname): + tolerance_cur = tolerance_gpu_respa + if test.is_fujitsu: + tolerance_cur = tolerance_fujitsu + if is_fugaku and not test.is_single: + tolerance_cur = tolerance_fujitsu + if test.is_fujitsu and is_spdyn and (""tip4"" in dirname): + tolerance_cur = tolerance_fujitsu_weak + if is_fugaku and is_spdyn and (""tip4"" in dirname): + tolerance_cur = tolerance_fujitsu_weak + if is_atdyn and (""REST-RMSD_EXT"" in dirname): + tolerance_cur = tolerance_press_rmsd + + tolerance_cur_virial = tolerance_cur*virial_ratio + + ref = Genesis() + ref.read(refname) + + # check the result + print() + print(""Checking %s"" % test_each) + print(""Checking diff between %s and %s..."" % (refname, testname)) + print() + if (""TMD"" in dirname and test.is_single): + ref.test_diff_TMD(test, tolerance_cur, tolerance_cur_virial) + else: + ref.test_diff(test, tolerance_cur, tolerance_cur_virial) + + print() + if ref.is_passed: + ipassed = ipassed + 1 + else: + ifailed = ifailed + 1 + # post-cleaning + #os.remove(testname) + +###### run parallel i/o tests +if is_parallelio and is_spdyn: + print(""======================================================================="") + print(""Parallel I/O Tests"") + print(""======================================================================="") + + + genesis_command_prst_setup_split = genesis_command_split + genesis_prst_setup=genesis_command_prst_setup_split[-1].replace('spdyn','prst_setup'); + genesis_prst_setup = os.path.abspath(os.path.expanduser(genesis_prst_setup)); + if not os.path.exists(os.path.expanduser(genesis_prst_setup)): + print(""Error: %s does not exist"" % genesis_prst_setup) + sys.exit(3) + + genesis_command_prst_setup_split[-1] = genesis_prst_setup + genesis_prst_setup = "" "".join(genesis_command_prst_setup_split) + + cwdname = os.getcwd() + + for test_each in test_dirs: + dirname = test_each + os.chdir(dirname) + itried = itried + 1 + for fl in glob.glob(""dd*.rst""): + os.remove(fl) + if os.path.isdir(""./cache""): + shutil.rmtree(""cache"") + os.mkdir(""./cache"") + + # run MD + print(""-----------------------------------------------------------------------"") + print(""Building Parallel I/O rst %s..."" % (dirname + ""/"")) + commandline = '%s prst.inp 1> log 2> error_prst' % genesis_prst_setup + status1 = subprocess.getstatusoutput(commandline) + if (status1[0] > 0) and (status1[0] != 1024): + print() + print(""Aborted..."") + print() + iaborted = iaborted + 1 + os.chdir(cwdname) + continue + + print(""Done ..."") + print(""Running single version %s..."" % (dirname + ""/"")) + commandline = '%s single.inp 1> ref 2> error_single' % genesis_command + status2 = subprocess.getstatusoutput(commandline) + if (status2[0] > 0) and (status2[0] != 1024): + print() + print(""Aborted..."") + print() + iaborted = iaborted + 1 + os.chdir(cwdname) + continue + + print(""Done ..."") + testname = ""test"" + commandline = '%s pio.inp 1> %s 2> error' % (genesis_command, testname) + print(""$ %s"" % commandline) + status4 = subprocess.getstatusoutput(commandline) + + if (status4[0] > 0) and (status4[0] != 1024): + print() + print(""Aborted..."") + print() + iaborted = iaborted + 1 + os.chdir(cwdname) + continue + + # parse the result + #if status[0] == 0: + refname = ""ref"" + ref = Genesis() + ref.read(refname) + test = Genesis() + test.read(testname) + + # check the result + print() + print(""Checking diff between %s and %s..."" % (refname, testname)) + print() + ref.test_diff(test, tolerance, tolerance_virial) + print() + if ref.is_passed: + ipassed = ipassed + 1 + else: + ifailed = ifailed + 1 + # post-cleaning + #os.remove(testname) + os.chdir(cwdname) + +###### finalization +if (itried > 0): + print(""-----------------------------------------------------------------------"") + print(""Passed %d / %d"" % (ipassed, itried)) + print(""Failed %d / %d"" % (ifailed, itried)) + print(""Aborted %d / %d"" % (iaborted, itried)) + print(""-----------------------------------------------------------------------"") + +if iaborted > 0: + sys.exit(2) +elif ifailed > 0: + sys.exit(1) +else: + sys.exit(0) + +","Python" +"Genesis","genesis-release-r-ccs/genesis","tests/regression_test/test_remd.py",".py","8440","283","#!/usr/bin/env python +# coding: utf-8 + +# +# A python script for GENESIS regression tests +# +# (c) Copyright 2022 RIKEN. All rights reserved. +# + +import subprocess +import os +import os.path +import sys +import copy +import random +import glob +import shutil +import re +from genesis import * + + +############### DEFINITION ################################## +def getdirs(path): + test_dirs = [] + for test_dir in os.listdir(path): + test_dir_path = os.path.join(path,test_dir) +# print ""%s"" % test_dir_path + if os.path.isdir(test_dir_path): + if os.path.exists(test_dir_path + ""/inp"") and os.path.exists(test_dir_path + ""/ref1""): + test_dirs.append(test_dir_path) + test_dirs.sort() + return test_dirs + +############### MAIN ######################################## + +###### initialization + +#os.environ[""OMP_NUM_THREADS""] = ""1"" +tolerance = 1.0e-8 # relative energy difference (diff/abs(e)) +tolerance_fujitsu = 5.0e-6 # relative energy difference (diff/abs(e)) +tolerance_gpu_respa = 5.0e-6 # relative energy difference (diff/abs(e)) +tolerance_fujitsu_weak = 1.0e-5 # relative energy difference (diff/abs(e)) +tolerance_single = 3.0e-5 # relative energy difference (diff/abs(e)) +virial_ratio = 1.0e2 + +tolerance_virial = tolerance*virial_ratio + +ipassed = 0 +ifailed = 0 +iaborted = 0 +itried = 0 +num = 0 + +is_atdyn = False +is_spdyn = False +is_parallelio = False +is_gpu = False +is_fugaku = False + +test_dirs = [] + +###### parse command line + +if len(sys.argv) == 1: + genesis_command = 'mpirun -np 32 atdyn' +elif len(sys.argv) == 2: + genesis_command = sys.argv[1] +else: + genesis_command = sys.argv[1] + is_number = re.compile(r'^[+-]?(\d*\.\d+|\d+\.?\d*)([eE][+-]?\d+|)\Z') + if is_number.match(sys.argv[2]): + tolerance = float(sys.argv[2]) + elif sys.argv[2] == ""parallel_io"": + is_parallelio = True + elif sys.argv[2] == ""gpu"": + is_gpu = True + elif sys.argv[2] == ""fugaku"": + mpiexec_command = genesis_command.split(' ',2)[0] + genesis_path = genesis_command.split(' ',2)[1] + is_fugaku = True + if len(sys.argv) == 4: + if is_number.match(sys.argv[3]): + tolerance = float(sys.argv[3]) + else: + test_dirs = sys.argv[2:] + +if (genesis_command == ""-h"") or (genesis_command == ""--help""): + print("""""" usage: + $ ./test.py [""genesis command""] [parallel_io or tolerance_value or directories] + + examples: + # run tests using the default command (""mpirun -np 8 atdyn"") + $ ./test_remd.py + + # run atdyn tests + $ ./test_remd.py ""mpirun -np 8 /path/to/genesis/bin/atdyn"" + + # run spdyn tests + $ ./test_remd.py ""mpirun -np 8 /path/to/genesis/bin/spdyn"" + + # run with specfic tolerance value + $ ./test_remd.py ""mpirun -np 8 /path/to/genesis/bin/atdyn"" 0.1 + + """""") + sys.exit(3) + +genesis_command_split = genesis_command.split() +genesis_command_last = genesis_command_split[-1] +if not os.path.exists(os.path.expanduser(genesis_command_last)): + print(""Error: %s does not exist"" % genesis_command_last) + sys.exit(3) + +# if given path is relpath, change it to abspath +genesis_command_split[-1] = os.path.abspath(os.path.expanduser(genesis_command_last)) +genesis_command = "" "".join(genesis_command_split) + +if genesis_command[-5:] == ""atdyn"": + is_atdyn = True +elif genesis_command[-5:] == ""spdyn"": + is_spdyn = True + +if (is_fugaku): + genesis_mpi_number = 32 +else: + is_mpi=False + genesis_mpi_number = -1 + for options in genesis_command_split: + if is_mpi: + genesis_mpi_number = int(options) + is_mpi=False + break + if options == ""-n"": + is_mpi = True + if options == ""-np"": + is_mpi = True + + if int(genesis_mpi_number) < 0: + genesis_mpi_number = genesis_command_split[-2] + + if not int(genesis_mpi_number)%4 == 0: + print(""Error: Number of MPI processes %d should be multiplier of 4"" % int(genesis_mpi_number)) + sys.exit(3) + +###### setup test directories + +if len(test_dirs) == 0: + if is_atdyn: + test_dirs = getdirs(os.path.dirname(os.path.abspath(__file__)) + ""/test_remd_atdyn"") + elif is_spdyn: + test_dirs = getdirs(os.path.dirname(os.path.abspath(__file__)) + ""/test_remd_spdyn"") + +for dir in test_dirs: + if not os.path.exists(dir): + print(""Error: %s, this test directory does not exist"" % dir) + sys.exit(3) + +###### run tests +if (is_atdyn or is_spdyn): + print(""======================================================================="") + print("" Regression tests for REMD"") + print(""======================================================================="") + + cwdname = os.getcwd() + for test_each in test_dirs: + os.chdir(cwdname) + dirname = test_each + if not os.path.isdir(dirname) : + continue + if (""H-REMD"" in dirname) and (is_gpu) : + continue + if (""REST"" in dirname) and (is_gpu) : + continue + if (""T-REMD"" in dirname) and (is_gpu) : + continue + + itried = itried + 1 + os.chdir(dirname) + + # run MD + print(""-----------------------------------------------------------------------"") + print(""Running %s..."" % (dirname + ""/"")) + + for fl in glob.glob(""test*""): + os.remove(fl) + + inputname = ""inp"" + outname = ""log"" + if (is_fugaku): + commandline = '%s sh -c \""%s %s 1> %s 2> error\""' % (mpiexec_command, genesis_path, inputname, outname) + else: + commandline = '%s %s 1> %s 2> error' % (genesis_command, inputname, outname) + print(""$ %s"" % commandline) + status = subprocess.getstatusoutput(commandline) + + if (status[0] > 0) and (status[0] != 1024): + print() + print(""Aborted..."") + print() + iaborted = iaborted + 1 + continue + + # parse the result + #if status[0] == 0: + print() + print(""Checking %s"" % test_each) + print() + + num = 1 + ipassed_replica = 0 + ifailed_replica = 0 + + out = Genesis() + out.read(outname) + tolerance_cur = tolerance + if out.is_single and is_spdyn: + tolerance_cur = tolerance_single + if not out.is_single and is_spdyn and is_gpu and (""VRES"" in dirname): + tolerance_cur = tolerance_gpu_respa + if out.is_fujitsu: + tolerance_cur = tolerance_fujitsu + if out.is_fugaku and not out.is_single: + tolerance_cur = tolerance_fujitsu + if out.is_fujitsu and is_spdyn and (""tip4"" in dirname): + tolerance_cur = tolerance_fujitsu_weak + if out.is_fugaku and is_spdyn and (""tip4"" in dirname): + tolerance_cur = tolerance_fujitsu_weak + + tolerance_cur_virial = tolerance_cur*virial_ratio + + while num < 5: + ref = Genesis() + refname = ""ref%d"" % num + ref.read(refname) + testname = ""test%d"" % num + is_empty = os.stat(testname).st_size == 0 + if (is_empty): + print() + print(""Aborted..."") + print() + ifailed_replica = ifailed_replica + 1 + continue + test = Genesis() + test.read(testname) + + # check the result + print() + print(""Checking diff between %s and %s..."" % (refname, testname)) + print() + #ref.test_diff_energies(test, tolerance) + ref.test_diff(test, tolerance_cur, tolerance_cur_virial) + print() + + if ref.is_passed: + ipassed_replica = ipassed_replica + 1 + else: + ifailed_replica = ifailed_replica + 1 + + num += 1 + # post-cleaning + #os.remove(testname) + + if (ifailed_replica == 0): + ipassed = ipassed + 1 + else: + ifailed = ifailed + 1 + + +###### finalization +if (itried > 0): + print(""-----------------------------------------------------------------------"") + print(""Passed %d / %d"" % (ipassed, itried)) + print(""Failed %d / %d"" % (ifailed, itried)) + print(""Aborted %d / %d"" % (iaborted, itried)) + print(""-----------------------------------------------------------------------"") + +if iaborted > 0: + sys.exit(2) +elif ifailed > 0: + sys.exit(1) +else: + sys.exit(0) + +","Python" +"Genesis","genesis-release-r-ccs/genesis","tests/regression_test/genesis.py",".py","14694","340","#!/usr/bin/python +# coding: utf-8 + +# +# A parser script for GENESIS output style +# +# (c) Copyright 2022 RIKEN. All rights reserved. +# + +import re +import sys +import copy +import math + +############### DEFINITION #################################################### +class Genesis(object): + def __init__(self): # initialization + # public attributes + self.dict_text = {} + self.dict_data = {} + self.dict_error = {} + self.is_passed = False + self.is_fujitsu = False + self.is_fugaku = False + self.is_single = False + self.is_gpu = False + + def delete_last(self): + for key in list(self.dict_data.keys()): + self.dict_text[key].pop() + self.dict_data[key].pop() + + def delete_first(self): + for key in list(self.dict_data.keys()): + self.dict_text[key].pop(0) + self.dict_data[key].pop(0) + + def read(self, filename): + fid = open(filename, 'r') + text_list = fid.readlines() + fid.close() + self.parse(text_list) + + def parse(self, text_list): + # parse titles + title = [] + text = [] + data = [] + data_each = [] + #patternMD = re.compile('^INFO:') + patternMD = re.compile('^INFO.') + patternMINUS = re.compile('-') + patternNONBOND = re.compile('^ nonbonding') + patternCPU = re.compile('^ cpu model') + patternFUJITSU = re.compile('SPARC') + patternFUGAKU = re.compile('Fugaku') + patternPRECISION = re.compile('^ precision') + patternSINGLE = re.compile('single') + patternMIXED = re.compile('mixed') + patternGPU = re.compile('GPU') + patternSTEP0 = re.compile(r'^\[STEP0') + patternSTEP1 = re.compile(r'^\[STEP1') + is_header = False + is_md = False + for line in text_list: + if is_md: + result = patternMD.search(line) + if result is None: + continue + else: # patternMD found + line_sub = patternMD.sub('', line.rstrip('\n')) + line_sub = patternMINUS.sub(' -', line_sub) + data_each = line_sub.split() + text.append(data_each) + tmp = [] + for i in range(len(data_each)): + result = False + if len(data_each[i]) >= 2 : + if data_each[i][-1].isdigit() & data_each[i][0].isalpha() : + result = True + + if result is True : # number ? + tmp = tmp + [(float(999999))] + elif data_each[i].replace('.','',1).isdigit(): # positive number +# print ""test-posi %s"" % data_each[i] + tmp = tmp + [(float(data_each[i]))] + elif data_each[i][0]==""-"" and data_each[i][1:].replace('.','',1).isdigit(): # negative number +# print ""test-nega %s"" % data_each[i] + tmp = tmp + [(float(data_each[i]))] + elif data_each[i][-1].isdigit() and data_each[i][0].isdigit(): # number ? +# print ""test-nani %s"" % data_each[i] + tmp = tmp + [(float(data_each[i]))] + else: # Not a Number ! + tmp = tmp + [(float(999999))] + data.append(tmp) + else: + result = patternMD.search(line) + if result is not None: + line_sub = patternMD.sub('', line.rstrip('\n')) + line_split = line_sub.split() + title = title + line_split + is_md = True + + if is_header: + result = patternSTEP1.search(line) + if result is not None: + is_header = False + continue + result = patternCPU.search(line) + if result is not None: + self.is_fujitsu = patternFUJITSU.search(line) + if result is not None: + self.is_fugaku = patternFUGAKU.search(line) + result = patternPRECISION.search(line) + if result is not None: + self.is_single = patternSINGLE.search(line) + if not self.is_single: + self.is_single = patternMIXED.search(line) + result = patternNONBOND.search(line) + if result is not None: + self.is_gpu = patternGPU.search(line) + else: + result = patternSTEP0.search(line) + if result is not None: + is_header = True + + # append to the dictionary + self.dict_append(title, text, data) + + def dict_append(self, title, text, data): + if len(self.dict_text) == 0: + self.dict_text = dict.fromkeys(title, []) + if len(self.dict_data) == 0: + self.dict_data = dict.fromkeys(title, []) + text_transpose = list(map(list, list(zip(*text)))) + data_transpose = list(map(list, list(zip(*data)))) + for i in range(len(title)): + self.dict_text[title[i]] = self.dict_text[title[i]] + text_transpose[i] + self.dict_data[title[i]] = self.dict_data[title[i]] + data_transpose[i] + + def test_diff(self, obj, tolerance, tolerance_virial): # compare energies + # test MD steps + keys = set(self.dict_data.keys()) & set(obj.dict_data.keys()) + is_failure = False + dict_failure = dict.fromkeys(keys, False) + nstep_failure = 0 + patternPRESS = re.compile('PRESS') + + nstep = len(self.dict_data['STEP']) + for istep in range(nstep): + for key in keys: + d = abs(self.dict_data[key][istep] - obj.dict_data[key][istep]) + if abs(self.dict_data[key][istep]) < 1e4: #min log is 1e-4 + ratio=d + else: + ebase=max(abs(self.dict_data[key][istep]),1.0) + ratio = d/ebase + if (key == ""VIRIAL""): + tolerance2 = tolerance_virial + elif (patternPRESS.search(key)): + tolerance2 = tolerance_virial + else: + tolerance2 = tolerance + if abs(self.dict_data[key][istep]) < 1e4: + tolerance2 = tolerance2*1e4 + if ratio > tolerance2: + is_failure = True + dict_failure[key] = True + nstep_failure = istep + if is_failure: + break + + if is_failure: + self.is_passed = False + print(""Failure at step %d (tolerance = %4.2e(ene), %4.2e(virial))"" % (self.dict_data['STEP'][nstep_failure], tolerance,tolerance_virial)) + nstep_max = min([nstep_failure + 3, nstep]) + for istep in range(nstep_failure, nstep_max): + print(""Step %d"" % (self.dict_data['STEP'][istep])) + + sys.stdout.write("" "") + for key in keys: + if dict_failure[key]: + sys.stdout.write(""%14s"" % key.rjust(14)) + sys.stdout.write(""\n"") + + sys.stdout.write(""< "") + for key in keys: + if dict_failure[key]: + sys.stdout.write(""%14s"" % self.dict_text[key][istep].rjust(14)) + sys.stdout.write(""\n"") + + sys.stdout.write(""> "") + for key in keys: + if dict_failure[key]: + sys.stdout.write(""%14s"" % obj.dict_text[key][istep].rjust(14)) + sys.stdout.write(""\n\n"") + else: + self.is_passed = True + print(""Passed (tolerance = %4.2e(ene), %4.2e(virial))"" % (tolerance, tolerance_virial)) + + def test_diff_TMD(self, obj, tolerance, tolerance_virial): # compare energies for TMD + # test MD steps + keys = set(self.dict_data.keys()) & set(obj.dict_data.keys()) + is_failure = False + dict_failure = dict.fromkeys(keys, False) + nstep_failure = 0 + patternPRESS = re.compile('PRESS') + + nstep = len(self.dict_data['STEP']) + for istep in range(nstep): + for key in keys: + d = abs(self.dict_data[key][istep] - obj.dict_data[key][istep]) + if abs(self.dict_data[key][istep]) < 1e4: #min log is 1e-4 + ratio=d + else: + ebase=max(abs(self.dict_data[key][istep]),1.0) + ratio = d/ebase + if (key == ""RESTRAINT_TOTAL""): + continue + if (key == ""TOTAL_ENE""): + continue + if (key == ""POTENTIAL_ENE""): + continue + elif (key == ""VIRIAL""): + tolerance2 = tolerance_virial + elif (patternPRESS.search(key)): + tolerance2 = tolerance_virial + else: + tolerance2 = tolerance + if abs(self.dict_data[key][istep]) < 1e4: + tolerance2 = tolerance2*1e4 + if ratio > tolerance2: + is_failure = True + dict_failure[key] = True + nstep_failure = istep + if is_failure: + break + + if is_failure: + self.is_passed = False + print(""Failure at step %d (tolerance = %4.2e(ene), %4.2e(virial))"" % (self.dict_data['STEP'][nstep_failure], tolerance,tolerance_virial)) + nstep_max = min([nstep_failure + 3, nstep]) + for istep in range(nstep_failure, nstep_max): + print(""Step %d"" % (self.dict_data['STEP'][istep])) + + sys.stdout.write("" "") + for key in keys: + if dict_failure[key]: + sys.stdout.write(""%14s"" % key.rjust(14)) + sys.stdout.write(""\n"") + + sys.stdout.write(""< "") + for key in keys: + if dict_failure[key]: + sys.stdout.write(""%14s"" % self.dict_text[key][istep].rjust(14)) + sys.stdout.write(""\n"") + + sys.stdout.write(""> "") + for key in keys: + if dict_failure[key]: + sys.stdout.write(""%14s"" % obj.dict_text[key][istep].rjust(14)) + sys.stdout.write(""\n\n"") + else: + self.is_passed = True + print(""Passed (tolerance = %4.2e(ene), %4.2e(virial))"" % (tolerance, tolerance_virial)) + +############### TEST ########################################################## +if __name__ == '__main__': + text = '''Update_Pairlist_Pbc> Memory for Pairlist was allocated +Update_Pairlist_Pbc> Pairlist was Updated +[STEP4] Compute Single Point Energy for Molecules + + STEP BOND ANGLE DIHEDRAL IMPROPER + VDWAALS ELECT UREY-BRADLEY CMAP RESTRAINT + --------------- --------------- --------------- --------------- --------------- + 0 3590.8442 1851.3287 0.0000 0.0000 + 4565.8064 -39052.9065 0.0000 0.0000 0.0000 + +[STEP5] Perform Molecular Dynamics Simulation + +Update_Pairlist_Pbc> Pairlist was Updated +INFO: STEP TIME TOTAL_ENE POTENTIAL_ENE KINETIC_ENE RMSG BOND ANGLE DIHEDRAL IMPROPER VDWAALS ELECT UREY-BRADLEY CMAP RESTRAINT BOXX BOXY BOXZ VOLUME TEMPERATURE PRESSXX PRESSYY PRESSZZ VIRIAL PRESSURE + --------------- --------------- --------------- --------------- --------------- +INFO: 1 0.0010 -20366.0251 -29044.9272 8678.9021 19.3525 3590.8442 1851.3287 0.0000 0.0000 4565.8064 -39052.9065 0.0000 0.0000 0.0000 45.8363 45.8363 45.8363 96300.5261 315.9647 -58.0979 -470.2127 939.1533 -5593.6679 136.9476 + +Update_Pairlist_Pbc> Memory for Pairlist was allocated +Update_Pairlist_Pbc> Pairlist was Updated +INFO: 2 0.0020 -20357.3017 -28943.4688 8586.1671 19.7050 3666.5101 1848.7082 0.0000 0.0000 4570.7423 -39029.4294 0.0000 0.0000 0.0000 45.8363 45.8363 45.8363 96300.5261 312.5886 187.0513 -715.0704 842.0369 -5577.1568 104.6726 + +[STEP6] Deallocate Arrays + +Output_Time> Timer profile of each rank + Rank Ebond Enbond Integ List Total + 0 0.001 0.172 0.001 1.021 1.706 + 1 0.000 0.176 0.001 1.017 1.708 + 2 0.000 0.178 0.001 1.015 1.709 + 3 0.000 0.173 0.001 1.020 1.709 + 4 0.000 0.178 0.001 1.015 1.708 + 5 0.002 0.175 0.001 1.018 1.710 + 6 0.001 0.180 0.001 1.012 1.708 + 7 0.000 0.174 0.001 1.019 1.708 + +Output_Time> Averaged timer profile + total time = 1.708 + setup = 0.976 + dynamics = 0.732 + energy = 0.179 + integrator = 0.001 + pairlist = 1.017 + energy + bond = 0.000 + angle = 0.000 + dihedral = 0.000 + nonbond = 0.176 + pme real = 0.146 + pme recip = 0.029 + restraint = 0.000 + integrator + constraint = 0.000 + update = 0.000 + comm1 = 0.000 + comm2 = 0.000 +''' + ene = Genesis() + + # test parse + ene.parse(text.split(""\n"")) + assert len(ene.dict_data) == 25 + assert ene.dict_text['ELECT'][0] == ""-39052.9065"" + assert ((ene.dict_data['ELECT'][0] + 39052.9065) < 10**(-3)) + + # test diff + ene2 = copy.deepcopy(ene) + ene2.dict_text['ELECT'][0] = ""-39052.0293"" + ene2.dict_data['ELECT'][0] = -39052.0293 + ene.test_diff(ene2, 0.0001, 0.001) + ene.delete_last() + + +","Python" +"Genesis","genesis-release-r-ccs/genesis","tests/regression_test/cleanup.sh",".sh","2837","81","#!/bin/bash + +# This script cleans up output and log files created +# by regression tests, and restores the status before +# the tests. +# +# Usage: +# $ ./cleanup.sh + +rm test_atdyn/*/*/error* >& /dev/null +rm test_atdyn/*/*/test* >& /dev/null +rm -r test_atdyn/*/*/output.?* >& /dev/null +rm test_atdyn/br/*/qmmm.0/*.inp >& /dev/null +rm test_atdyn/br/*/qmmm.0/*qm.xyz >& /dev/null +rm test_atdyn/br/*/qmmm.0/*pc.xyz >& /dev/null +rm test_atdyn/br/*/qmmm.0/mm_charges.dat >& /dev/null + +rm test_spdyn/*/*/error* >& /dev/null +rm test_spdyn/*/*/test* >& /dev/null +rm -r test_spdyn/*/*/output.?* >& /dev/null + +rm test_remd_atdyn/*/error* >& /dev/null +rm test_remd_atdyn/*/log >& /dev/null +rm test_remd_atdyn/*/test? >& /dev/null +rm -r test_remd_atdyn/*/output.?* >& /dev/null +rm test_remd_spdyn/*/error* >& /dev/null +rm test_remd_spdyn/*/log >& /dev/null +rm test_remd_spdyn/*/test? >& /dev/null +rm -r test_remd_spdyn/*/output.?* >& /dev/null + +rm test_rpath_atdyn/*/error* >& /dev/null +rm test_rpath_atdyn/*/log >& /dev/null +rm test_rpath_atdyn/*/test* >& /dev/null +rm test_rpath_atdyn/tim*/dcd* >& /dev/null +rm test_rpath_atdyn/tim*/rst* >& /dev/null +rm test_rpath_atdyn/tim*/qmmm.*/*.inp >& /dev/null +rm test_rpath_atdyn/tim*/qmmm.*/mm_charges.dat >& /dev/null +rm -r test_rpath_atdyn/*/output.?* >& /dev/null + +rm test_rpath_spdyn/*/error* >& /dev/null +rm test_rpath_spdyn/*/log >& /dev/null +rm test_rpath_spdyn/*/test? >& /dev/null +rm -r test_rpath_spdyn/*/output.?* >& /dev/null + +rm test_vib/*/error* >& /dev/null +rm test_vib/*/log >& /dev/null +rm test_vib/*/vib.minfo >& /dev/null +rm test_vib/*/test? >& /dev/null +rm test_vib/*/qmmm.?/*.inp >& /dev/null +rm -r test_vib/*/minfo.files >& /dev/null +rm -r test_vib/*/output.?* >& /dev/null + +rm test_gamd_atdyn/*/*/test* >& /dev/null +rm test_gamd_atdyn/*/*/error* >& /dev/null +rm test_gamd_atdyn/*/*/out.gamd >& /dev/null +rm -r test_gamd_atdyn/*/*/output.?* >& /dev/null + +rm test_gamd_spdyn/*/*/test* >& /dev/null +rm test_gamd_spdyn/*/*/error* >& /dev/null +rm test_gamd_spdyn/*/*/out.gamd >& /dev/null +rm -r test_gamd_spdyn/*/*/output.?* >& /dev/null + +rm test_fep/*/*/test* >& /dev/null +rm test_fep/*/*/error* >& /dev/null +rm test_fep/*/*/out*.fepout >& /dev/null +rm -r test_fep/*/*/output.?* >& /dev/null + +rm test_parallel_IO/*/*/error* >& /dev/null +rm test_parallel_IO/*/*/test* >& /dev/null +rm test_parallel_IO/*/*/log* >& /dev/null +rm test_parallel_IO/*/*/ref* >& /dev/null +rm test_parallel_IO/*/*/*.rst >& /dev/null +rm -r test_parallel_IO/*/*/cache >& /dev/null + +cd test_analysis +./cleanup.sh >& /dev/null +cd .. +cd test_spana +./cleanup.sh >& /dev/null +cd .. +","Shell" +"Genesis","genesis-release-r-ccs/genesis","tests/regression_test/test_fep.py",".py","9770","320","#!/usr/bin/env python +# coding: utf-8 + +# +# A python script for GENESIS regression tests +# +# (c) Copyright 2022 RIKEN. All rights reserved. +# + +import subprocess +import os +import os.path +import sys +import copy +import random +import glob +import shutil +import re +from genesis import * +from fep import * + + +############### DEFINITION ################################## +def getdirs(path): + test_dirs = [] + for forcefield_dir in os.listdir(path): + forcefield_dir_path = os.path.join(path,forcefield_dir) + if os.path.isdir(forcefield_dir_path): + for test_dir in os.listdir(forcefield_dir_path): + test_dir_path = os.path.join(forcefield_dir_path,test_dir) + if os.path.isdir(test_dir_path): + if os.path.exists(test_dir_path + ""/inp"") and (os.path.exists(test_dir_path + ""/ref"") or os.path.exists(test_dir_path + ""/ref1"")): + test_dirs.append(test_dir_path) + test_dirs.sort() + return test_dirs + +############### MAIN ######################################## + +###### initialization + +#os.environ[""OMP_NUM_THREADS""] = ""1"" +tolerance = 1.0e-8 # relative energy difference (diff/abs(e)) +tolerance_fujitsu = 5.0e-6 # relative energy difference (diff/abs(e)) +tolerance_gpu_respa = 5.0e-6 # relative energy difference (diff/abs(e)) +tolerance_fujitsu_weak = 1.0e-5 # relative energy difference (diff/abs(e)) +tolerance_single = 1.0e-4 # relative energy difference (diff/abs(e)) +virial_ratio = 1.0e2 + +ipassed = 0 +ifailed = 0 +iaborted = 0 +itried = 0 + +is_atdyn = False +is_spdyn = False +is_parallelio = False +is_gpu = False +is_fugaku = False + +test_dirs = [] + +###### parse command line + +if len(sys.argv) == 1: + genesis_command = 'mpirun -np 8 spdyn' +elif len(sys.argv) == 2: + genesis_command = sys.argv[1] +else: + genesis_command = sys.argv[1] + is_number = re.compile(r'^[+-]?(\d*\.\d+|\d+\.?\d*)([eE][+-]?\d+|)\Z') + if is_number.match(sys.argv[2]): + tolerance = float(sys.argv[2]) + elif sys.argv[2] == ""gpu"": + is_gpu = True +# if len(sys.argv) == 4: +# if is_number.match(sys.argv[3]): +# tolerance = float(sys.argv[3]) + elif sys.argv[2] == ""fugaku"": + mpiexec_command = genesis_command.split(' ',2)[0] + genesis_path = genesis_command.split(' ',2)[1] + is_fugaku = True + if len(sys.argv) == 4: + if is_number.match(sys.argv[3]): + tolerance = float(sys.argv[3]) + else: + test_dirs = sys.argv[2:] + +if (genesis_command == ""-h"") or (genesis_command == ""--help""): + print("""""" usage: + $ ./test.py [""genesis command""] [parallel_io or tolerance_value or directories] + + examples: + # run tests using the default command (""mpirun -np 8 spdyn"") + $ ./test_fep.py + + # run spdyn tests + $ ./test_fep.py ""mpirun -np 8 /path/to/genesis/bin/spdyn"" + + # run gpu tests + $ ./test_fep.py ""mpirun -np 8 /path/to/genesis/bin/spdyn"" gpu + + # run with specfic tolerance value + $ ./test_fep.py ""mpirun -np 8 /path/to/genesis/bin/spdyn"" 0.1 + + """""") + sys.exit(3) + +genesis_command_split = genesis_command.split() +genesis_command_last = genesis_command_split[-1] +if not os.path.exists(os.path.expanduser(genesis_command_last)): + print(""Error: %s does not exist"" % genesis_command_last) + sys.exit(3) + +if (is_fugaku): + genesis_mpi_number = 8 +else: + is_mpi=False + genesis_mpi_number = -1 + for options in genesis_command_split: + if is_mpi: + genesis_mpi_number = int(options) + is_mpi=False + break + if options == ""-n"": + is_mpi = True + if options == ""-np"": + is_mpi = True + + if int(genesis_mpi_number) < 0: + genesis_mpi_number = genesis_command_split[-2] + + if not int(genesis_mpi_number) == 8: + print(""Error: Number of MPI processes %d should be 8"" % int(genesis_mpi_number)) + sys.exit(3) + +# if given path is relpath, change it to abspath +genesis_command_split[-1] = os.path.abspath(os.path.expanduser(genesis_command_last)) +genesis_command = "" "".join(genesis_command_split) + +if genesis_command[-5:] == ""spdyn"": + is_spdyn = True +if genesis_command[-5:] == ""atdyn"": + print(""FEP is not available in atdyn. Use spdyn"") + sys.exit(3) + +###### setup test directories + +if is_spdyn: + test_dirs = getdirs(os.path.dirname(os.path.abspath(__file__)) + ""/test_fep"") + +for dir in test_dirs: + if not os.path.exists(dir): + print(""Error: %s, this test directory does not exist"" % dir) + sys.exit(3) + +###### run tests +if (is_spdyn): + print(""======================================================================="") + print("" Regression tests for FEP"") + print(""======================================================================="") + + cwdname = os.getcwd() + for test_each in test_dirs: + dirname = test_each + # skip CUTOFF and minimization if gpu is on + if ""CUTOFF"" in dirname or ""MIN"" in dirname: + if is_gpu: + continue + if ""REMD"" in dirname: + if is_fugaku: + continue + os.chdir(cwdname) + if not os.path.isdir(dirname) : + continue + os.chdir(dirname) + itried = itried + 1 + + # run MD + print(""-----------------------------------------------------------------------"") + print(""Running %s..."" % (dirname + ""/"")) + + for fl in glob.glob(""test*""): + os.remove(fl) + for fl in glob.glob(""out*.fepout""): + os.remove(fl) + + inputname = ""inp"" + testname = ""test"" + if (is_fugaku): + commandline = '%s sh -c \""%s %s 1> %s 2> error\""' % (mpiexec_command, genesis_path, inputname, testname) + else: + commandline = '%s %s 1> %s 2> error' % (genesis_command, inputname, testname) + print(""$ %s"" % commandline) + status = subprocess.getstatusoutput(commandline) + + if (status[0] > 0) and (status[0] != 1024): + print() + print(""Aborted..."") + print() + iaborted = iaborted + 1 + continue + + # parse the result + #if status[0] == 0: + print() + print(""Checking %s"" % test_each) + print() + + test = Genesis() + is_empty = os.stat(testname).st_size == 0 + if (is_empty): + print() + print(""Aborted..."") + print() + iaborted = iaborted + 1 + continue + + + test.read(testname) + refname = ""ref"" + tolerance_cur = tolerance + if test.is_single and is_spdyn: + tolerance_cur = tolerance_single + if test.is_fujitsu: + tolerance_cur = tolerance_fujitsu + if test.is_fugaku and not test.is_single: + tolerance_cur = tolerance_fujitsu + + tolerance_cur_virial = tolerance_cur*virial_ratio + + if ""REMD"" in dirname or ""FEPREST"" in dirname: + num = 1 + ipassed_replica = 0 + ifailed_replica = 0 + + while num < 5: + ref = Genesis() + refname = ""ref%d"" % num + ref.read(refname) + testname = ""test%d"" % num + test = Genesis() + test.read(testname) + + # check the result + print() + print(""Checking diff between %s and %s..."" % (refname, testname)) + print() + #ref.test_diff_energies(test, tolerance) + ref.test_diff(test, tolerance_cur, tolerance_cur_virial) + print() + + if ref.is_passed: + ipassed_replica = ipassed_replica + 1 + else: + ifailed_replica = ifailed_replica + 1 + + num += 1 + # post-cleaning + #os.remove(testname) + + if (ifailed_replica == 0): + ipassed = ipassed + 1 + else: + ifailed = ifailed + 1 + + else: + ref = Genesis() + ref.read(refname) + + # check the result + print() + print(""Checking diff between %s and %s..."" % (refname, testname)) + print() + ref.test_diff(test, tolerance_cur, tolerance_cur_virial) + print() + + if os.path.exists(""ref.fepout""): + test_fepout = Fep() + testname = ""out.fepout"" + test_fepout.read(testname) + tolerance_cur = tolerance + if test.is_single and is_spdyn: + tolerance_cur = tolerance_single + + refname = ""ref.fepout"" + ref_fepout = Fep() + ref_fepout.read(refname) + + # check the fepout result + print(""Checking diff between %s and %s..."" % (refname, testname)) + print() + ref_fepout.test_diff(test_fepout, tolerance_cur) + print() + if ref.is_passed and ref_fepout.is_passed: + ipassed = ipassed + 1 + else: + ifailed = ifailed + 1 + else: + if ref.is_passed: + ipassed = ipassed + 1 + else: + ifailed = ifailed + 1 + + +###### finalization +if (itried > 0): + print(""-----------------------------------------------------------------------"") + print(""Passed %d / %d"" % (ipassed, itried)) + print(""Failed %d / %d"" % (ifailed, itried)) + print(""Aborted %d / %d"" % (iaborted, itried)) + print(""-----------------------------------------------------------------------"") + +if iaborted > 0: + sys.exit(2) +elif ifailed > 0: + sys.exit(1) +else: + sys.exit(0) + +","Python" +"Genesis","genesis-release-r-ccs/genesis","tests/regression_test/test_gamd.py",".py","7498","253","#!/usr/bin/env python +# coding: utf-8 + +# +# A python script for GENESIS regression tests +# +# (c) Copyright 2022 RIKEN. All rights reserved. +# + +import subprocess +import os +import os.path +import sys +import copy +import random +import glob +import shutil +import re +from genesis import * + + +############### DEFINITION ################################## +def getdirs(path): + test_dirs = [] + for forcefield_dir in os.listdir(path): + forcefield_dir_path = os.path.join(path,forcefield_dir) + if os.path.isdir(forcefield_dir_path): + for test_dir in os.listdir(forcefield_dir_path): + test_dir_path = os.path.join(forcefield_dir_path,test_dir) + if os.path.isdir(test_dir_path): + if os.path.exists(test_dir_path + ""/inp"") and os.path.exists(test_dir_path + ""/ref""): + test_dirs.append(test_dir_path) + test_dirs.sort() + return test_dirs + +############### MAIN ######################################## + +###### initialization + +#os.environ[""OMP_NUM_THREADS""] = ""1"" +tolerance = 1.0e-8 # relative energy difference (diff/abs(e)) +tolerance_fujitsu = 5.0e-6 # relative energy difference (diff/abs(e)) +tolerance_gpu_respa = 5.0e-6 # relative energy difference (diff/abs(e)) +tolerance_fujitsu_weak = 1.0e-5 # relative energy difference (diff/abs(e)) +tolerance_single = 3.0e-5 # relative energy difference (diff/abs(e)) +virial_ratio = 1.0e2 + +ipassed = 0 +ifailed = 0 +iaborted = 0 +itried = 0 +num = 0 + +is_atdyn = False +is_spdyn = False +is_parallelio = False +is_gpu = False +is_fugaku = False + +test_dirs = [] + +###### parse command line + +if len(sys.argv) == 1: + genesis_command = 'mpirun -np 8 atdyn' +elif len(sys.argv) == 2: + genesis_command = sys.argv[1] +else: + genesis_command = sys.argv[1] + is_number = re.compile(r'^[+-]?(\d*\.\d+|\d+\.?\d*)([eE][+-]?\d+|)\Z') + if is_number.match(sys.argv[2]): + tolerance = float(sys.argv[2]) + elif sys.argv[2] == ""parallel_io"": + is_parallelio = True + elif sys.argv[2] == ""fugaku"": + mpiexec_command = genesis_command.split(' ',2)[0] + genesis_path = genesis_command.split(' ',2)[1] + is_fugaku = True + if len(sys.argv) == 4: + if is_number.match(sys.argv[3]): + tolerance = float(sys.argv[3]) + else: + test_dirs = sys.argv[2:] + +if (genesis_command == ""-h"") or (genesis_command == ""--help""): + print("""""" usage: + $ ./test.py [""genesis command""] [parallel_io or tolerance_value or directories] + + examples: + # run tests using the default command (""mpirun -np 8 atdyn"") + $ ./test_gamd.py + + # run atdyn tests + $ ./test_gamd.py ""mpirun -np 8 /path/to/genesis/bin/atdyn"" + + # run spdyn tests + $ ./test_gamd.py ""mpirun -np 8 /path/to/genesis/bin/spdyn"" + + # run with specfic tolerance value + $ ./test_gamd.py ""mpirun -np 8 /path/to/genesis/bin/atdyn"" 0.1 + + """""") + sys.exit(3) + +genesis_command_split = genesis_command.split() +genesis_command_last = genesis_command_split[-1] +if not os.path.exists(os.path.expanduser(genesis_command_last)): + print(""Error: %s does not exist"" % genesis_command_last) + sys.exit(3) + +if (is_fugaku): + genesis_mpi_number = 8 +else: + is_mpi=False + genesis_mpi_number = -1 + for options in genesis_command_split: + if is_mpi: + genesis_mpi_number = int(options) + is_mpi=False + break + if options == ""-n"": + is_mpi = True + if options == ""-np"": + is_mpi = True + + if int(genesis_mpi_number) < 0: + genesis_mpi_number = genesis_command_split[-2] + + if not int(genesis_mpi_number) == 8: + print(""Error: Number of MPI processes %d should be 8"" % int(genesis_mpi_number)) + sys.exit(3) + +# if given path is relpath, change it to abspath +genesis_command_split[-1] = os.path.abspath(os.path.expanduser(genesis_command_last)) +genesis_command = "" "".join(genesis_command_split) + +if genesis_command[-5:] == ""atdyn"": + is_atdyn = True +elif genesis_command[-5:] == ""spdyn"": + is_spdyn = True + +###### setup test directories + +if is_atdyn: + test_dirs = getdirs(os.path.dirname(os.path.abspath(__file__)) + ""/test_gamd_atdyn"") +elif is_spdyn: + test_dirs = getdirs(os.path.dirname(os.path.abspath(__file__)) + ""/test_gamd_spdyn"") + +for dir in test_dirs: + if not os.path.exists(dir): + print(""Error: %s, this test directory does not exist"" % dir) + sys.exit(3) + +###### run tests +if (is_atdyn or is_spdyn): + print(""======================================================================="") + print("" Regression tests for GaMD"") + print(""======================================================================="") + + cwdname = os.getcwd() + for test_each in test_dirs: + itried = itried + 1 + os.chdir(cwdname) + dirname = test_each + if not os.path.isdir(dirname) : + continue + os.chdir(dirname) + + # run MD + print(""-----------------------------------------------------------------------"") + print(""Running %s..."" % (dirname + ""/"")) + + for fl in glob.glob(""test*""): + os.remove(fl) + if os.path.exists(""out.gamd""): + os.remove(""out.gamd"") + + inputname = ""inp"" + testname = ""test"" + if (is_fugaku): + commandline = '%s sh -c \""%s %s 1> %s 2> error\""' % (mpiexec_command, genesis_path, inputname, testname) + else: + commandline = '%s %s 1> %s 2> error' % (genesis_command, inputname, testname) + print(""$ %s"" % commandline) + status = subprocess.getstatusoutput(commandline) + + if (status[0] > 0) and (status[0] != 1024): + print() + print(""Aborted..."") + print() + iaborted = iaborted + 1 + continue + + # parse the result + #if status[0] == 0: + print() + print(""Checking %s"" % test_each) + print() + + test = Genesis() + is_empty = os.stat(testname).st_size == 0 + if (is_empty): + print() + print(""Aborted..."") + print() + iaborted = iaborted + 1 + continue + + test.read(testname) + refname = ""ref"" + tolerance_cur = tolerance + if test.is_single and is_spdyn: + tolerance_cur = tolerance_single + if test.is_fujitsu: + tolerance_cur = tolerance_fujitsu + if test.is_fugaku and not test.is_single: + tolerance_cur = tolerance_fujitsu + + tolerance_cur_virial = tolerance_cur*virial_ratio + + ref = Genesis() + ref.read(refname) + + # check the result + print() + print(""Checking %s"" % test_each) + print(""Checking diff between %s and %s..."" % (refname, testname)) + print() + ref.test_diff(test, tolerance_cur, tolerance_cur_virial) + print() + if ref.is_passed: + ipassed = ipassed + 1 + else: + ifailed = ifailed + 1 + # post-cleaning + #os.remove(testname) + +###### finalization +if (itried > 0): + print(""-----------------------------------------------------------------------"") + print(""Passed %d / %d"" % (ipassed, itried)) + print(""Failed %d / %d"" % (ifailed, itried)) + print(""Aborted %d / %d"" % (iaborted, itried)) + print(""-----------------------------------------------------------------------"") + +if iaborted > 0: + sys.exit(2) +elif ifailed > 0: + sys.exit(1) +else: + sys.exit(0) + +","Python" +"Genesis","genesis-release-r-ccs/genesis","tests/regression_test/test_rpath.py",".py","8437","280","#!/usr/bin/env python +# coding: utf-8 + +# +# A python script for GENESIS regression tests +# +# (c) Copyright 2022 RIKEN. All rights reserved. +# + +import subprocess +import os +import os.path +import sys +import copy +import random +import glob +import shutil +import re +from genesis import * + + +############### DEFINITION ################################## +def getdirs(path): + test_dirs = [] + for test_dir in os.listdir(path): + test_dir_path = os.path.join(path,test_dir) + if os.path.isdir(test_dir_path): + if os.path.exists(test_dir_path + ""/inp"") and os.path.exists(test_dir_path + ""/ref1""): + test_dirs.append(test_dir_path) + test_dirs.sort() + return test_dirs + +############### MAIN ######################################## + +###### initialization + +#os.environ[""OMP_NUM_THREADS""] = ""1"" +tolerance = 1.0e-8 # relative energy difference (diff/abs(e)) +tolerance_fujitsu = 5.0e-6 # relative energy difference (diff/abs(e)) +tolerance_gpu_respa = 5.0e-6 # relative energy difference (diff/abs(e)) +tolerance_fujitsu_weak = 1.0e-5 # relative energy difference (diff/abs(e)) +tolerance_single = 3.0e-5 # relative energy difference (diff/abs(e)) +virial_ratio = 1.0e2 + +ipassed = 0 +ifailed = 0 +iaborted = 0 +itried = 0 +num = 0 + +is_atdyn = False +is_spdyn = False +is_parallelio = False +is_gpu = False +is_fugaku = False + +test_dirs = [] + +###### parse command line + +if len(sys.argv) == 1: + genesis_command = 'mpirun -np 8 atdyn' +elif len(sys.argv) == 2: + genesis_command = sys.argv[1] +else: + genesis_command = sys.argv[1] + is_number = re.compile(r'^[+-]?(\d*\.\d+|\d+\.?\d*)([eE][+-]?\d+|)\Z') + if is_number.match(sys.argv[2]): + tolerance = float(sys.argv[2]) + elif sys.argv[2] == ""parallel_io"": + is_parallelio = True + elif sys.argv[2] == ""fugaku"": + mpiexec_command = genesis_command.split(' ',2)[0] + genesis_path = genesis_command.split(' ',2)[1] + is_fugaku = True + if len(sys.argv) == 4: + if is_number.match(sys.argv[3]): + tolerance = float(sys.argv[3]) + else: + test_dirs = sys.argv[2:] + +if (genesis_command == ""-h"") or (genesis_command == ""--help""): + print("""""" usage: + $ ./test.py [""genesis command""] [parallel_io or tolerance_value or directories] + + examples: + # run tests using the default command (""mpirun -np 8 atdyn"") + $ ./test_rpath.py + + # run atdyn tests + $ ./test_rpath.py ""mpirun -np 8 /path/to/genesis/bin/atdyn"" + + # run spdyn tests + $ ./test_rpath.py ""mpirun -np 8 /path/to/genesis/bin/spdyn"" + + # run with specfic tolerance value + $ ./test_rpath.py ""mpirun -np 8 /path/to/genesis/bin/atdyn"" 0.1 + + """""") + sys.exit(3) + +genesis_command_split = genesis_command.split() +genesis_command_last = genesis_command_split[-1] +if not os.path.exists(os.path.expanduser(genesis_command_last)): + print(""Error: %s does not exist"" % genesis_command_last) + sys.exit(3) + +if (is_fugaku): + genesis_mpi_number = 64 +else: + is_mpi=False + genesis_mpi_number = -1 + for options in genesis_command_split: + if is_mpi: + genesis_mpi_number = int(options) + is_mpi=False + break + if options == ""-n"": + is_mpi = True + if options == ""-np"": + is_mpi = True + + if int(genesis_mpi_number) < 0: + genesis_mpi_number = genesis_command_split[-2] + + if not int(genesis_mpi_number)%8 == 0: + print(""Error: %d should be multiplier of 8"" % int(genesis_mpi_number)) + sys.exit(3) + +# if given path is relpath, change it to abspath +genesis_command_split[-1] = os.path.abspath(os.path.expanduser(genesis_command_last)) +genesis_command = "" "".join(genesis_command_split) + +if genesis_command[-5:] == ""atdyn"": + is_atdyn = True +elif genesis_command[-5:] == ""spdyn"": + is_spdyn = True + +###### setup test directories + +if is_atdyn: + #test_dirs = getdirs(""test_rpath_atdyn"") + test_dirs = getdirs(os.path.dirname(os.path.abspath(__file__)) + ""/test_rpath_atdyn"") + + if (not int(genesis_mpi_number) == 8): + print(""WARNING: test of MEP is not available, please execute # of MPI=8"") + +elif is_spdyn: + #test_dirs = getdirs(""test_rpath_spdyn"") + test_dirs = getdirs(os.path.dirname(os.path.abspath(__file__)) + ""/test_rpath_spdyn"") + +for dir in test_dirs: + if not os.path.exists(dir): + print(""Error: %s, this test directory does not exist"" % dir) + sys.exit(3) + +###### run tests +if (is_atdyn or is_spdyn): + print(""======================================================================="") + print("" Regression tests for RPATH"") + print(""======================================================================="") + + cwdname = os.getcwd() + for test_each in test_dirs: + itried = itried + 1 + os.chdir(cwdname) + dirname = test_each + if not os.path.isdir(dirname) : + continue + if ((""tim"" in dirname) and int(genesis_mpi_number) != 8) : + continue + + os.chdir(dirname) + + # run MD + print(""-----------------------------------------------------------------------"") + print(""Running %s..."" % (dirname + ""/"")) + + for fl in glob.glob(""test*""): + os.remove(fl) + + inputname = ""inp"" + outname = ""log"" + if (is_fugaku): + commandline = '%s sh -c \""%s %s 1> %s 2> error\""' % (mpiexec_command, genesis_path, inputname, outname) + else: + commandline = '%s %s 1> %s 2> error' % (genesis_command, inputname, outname) + print(""$ %s"" % commandline) + status = subprocess.getstatusoutput(commandline) + + if (status[0] > 0) and (status[0] != 1024): + print() + print(""Aborted..."") + print() + iaborted = iaborted + 1 + continue + + # parse the result + #if status[0] == 0: + print() + print(""Checking %s"" % test_each) + print() + + num = 1 + ipassed_replica = 0 + ifailed_replica = 0 + + out = Genesis() + out.read(outname) + tolerance_cur = tolerance + if out.is_single and is_spdyn: + tolerance_cur = tolerance_single + if not out.is_single and is_spdyn and is_gpu and (""VRES"" in dirname): + tolerance_cur = tolerance_gpu_respa + if out.is_fujitsu: + tolerance_cur = tolerance_fujitsu + if out.is_fugaku and not out.is_single: + tolerance_cur = tolerance_fujitsu + if out.is_fujitsu and is_spdyn and (""tip4"" in dirname): + tolerance_cur = tolerance_fujitsu_weak + if out.is_fugaku and is_spdyn and (""tip4"" in dirname): + tolerance_cur = tolerance_fujitsu_weak + + tolerance_cur_virial = tolerance_cur*virial_ratio + nref=sum(1 for fname in glob.iglob(""./ref*"") if os.path.isfile(fname)) + + while num <= nref: + ref = Genesis() + refname = ""ref%d"" % num + ref.read(refname) + testname = ""test%d"" % num + test = Genesis() + test.read(testname) + is_empty = os.stat(testname).st_size == 0 + if (is_empty): + print() + print(""Aborted..."") + print() + ifailed_replica = ifailed_replica + 1 + continue + + # check the result + print() + print(""Checking diff between %s and %s..."" % (refname, testname)) + print() + #ref.test_diff_energies(test, tolerance) + ref.test_diff(test, tolerance_cur, tolerance_cur_virial) + print() + + if ref.is_passed: + ipassed_replica = ipassed_replica + 1 + else: + ifailed_replica = ifailed_replica + 1 + + num += 1 + # post-cleaning + #os.remove(testname) + + if (ifailed_replica == 0): + ipassed = ipassed + 1 + else: + ifailed = ifailed + 1 + + +###### finalization +if (itried > 0): + print(""-----------------------------------------------------------------------"") + print(""Passed %d / %d"" % (ipassed, itried)) + print(""Failed %d / %d"" % (ifailed, itried)) + print(""Aborted %d / %d"" % (iaborted, itried)) + print(""-----------------------------------------------------------------------"") + +if iaborted > 0: + sys.exit(2) +elif ifailed > 0: + sys.exit(1) +else: + sys.exit(0) + +","Python" +"Genesis","genesis-release-r-ccs/genesis","tests/regression_test/test_atdyn/br/script/runGau.sh",".sh","262","16","#!/bin/bash + +# ----------------------------------------------- +# restart from given log and Fchk file +# ----------------------------------------------- + +QMINP=$1 +QMOUT=$2 +MOL=${QMINP%.*} + +if [ -e ${MOL}.Fchk ]; then + cp ${MOL}.Fchk gaussian.Fchk + exit 0 +fi + +","Shell" +"Genesis","genesis-release-r-ccs/genesis","tests/regression_test/test_atdyn/br/script/runqchem.sh",".sh","261","15","#!/bin/bash + +# ----------------------------------------------- +# restart from given log and Fchk file +# ----------------------------------------------- + +QMINP=$1 +QMOUT=$2 +MOL=${QMINP%.*} + +if [ -e ${MOL}_efield.dat ]; then + cp ${MOL}_efield.dat efield.dat +fi + +","Shell" +"Genesis","genesis-release-r-ccs/genesis","tests/regression_test/test_atdyn/br/script/runTC.sh",".sh","189","12","#!/bin/bash + +# ----------------------------------------------- +# restart from given log and Fchk file +# ----------------------------------------------- + +QMINP=$1 +QMOUT=$2 +MOL=${QMINP%.*} + + +","Shell" +"Genesis","genesis-release-r-ccs/genesis","tests/regression_test/test_atdyn/br/script/runDFTB.sh",".sh","314","17","#!/bin/bash + +# ----------------------------------------------- +# restart from given log and Fchk file +# ----------------------------------------------- + +QMINP=$1 +QMOUT=$2 +MOL=${QMINP%.*} + +if [ -e ${MOL}_charges.bin ]; then + cp ${MOL}_charges.bin charges.bin + cp ${MOL}_detailed.out detailed.out + exit 0 +fi + +","Shell" +"Genesis","genesis-release-r-ccs/genesis","tests/regression_test/test_analysis/test_analysis.py",".py","12666","397","#!/usr/bin/env python + +# +# A python script for GENESIS analysis tool regression tests +# +# (c) Copyright 2014 RIKEN. All rights reserved. +# + +# Usage: +# +# $ ./test_analysis.py /home/user/genesis/bin/ +# +# With the default settings, this script scans the directory where +# it is located for folders starting with ""test_"" and generates +# a separate test class for each folder. +# The analysis program to run is determined from the folder name +# after removing the ""test_"" prefix. +# Each test class then searches for folders that contain no subdirectories +# and generates a separate test for each one of them. +# By default, a test folder must have an ""int"" input script +# and a ""ref"" file with the expected analysis output. +# An ""out"" output file must be produced after the execution +# of the analysis program inside the test folder, +# which is then compared with the ""ref"" file. +# Divergence tolerance, analysis program execution parameters +# and environmental variables can be set in ""config.ini"" files +# located inside the test folder or any of its parent folders. +# When same options are set in several folders, +# values set in children folders overwrite those set in parent folders. +# The ""config.ini"" format is identical to that of ""defaults.ini"". +# By default, the script will only search the test folder for +# analysis program executables, so directories containing +# analysis programs should be provided via command line arguments +# or the program_folder option in the configuration files. + +import unittest +import sys +import os +import os.path +import subprocess + +# python3 compatibility +try: + from ConfigParser import SafeConfigParser as ConfigParser +except ImportError: + from configparser import ConfigParser + + +# subclass ConfigParser to preserve case in option names +class CaseConfigParser(ConfigParser): + + def optionxform(self, option): + return option + +# a metaclass for TestAnalysisProgramBase that generates +# separate test_ methods for each bottommost directory under self.topdir +class TestAnalysisProgramMeta(type): + + def __new__(mcs, name, bases, dict): + + def generate_folder_test(folder): + + def test_folder(self): + + self.run_test_in_folder(folder) + + return test_folder + + # self.topdir will be defined dynamically + + if ""topdir"" in dict: + + topdir = dict[""topdir""] + + # generate a test for each bottommost folder + + for path, folders, files in os.walk(topdir): + + if len(folders) == 0: + + module_name = ""test_folder_"" + path + + dict[module_name] = generate_folder_test(path) + + + return type.__new__(mcs, name, bases, dict) + +# python3 metaclass compatibility +try: + exec(""""""class TestAnalysisProgramPreBase( + metaclass=TestAnalysisProgramMeta): pass"""""") +except SyntaxError: + class TestAnalysisProgramPreBase(object): + __metaclass__ = TestAnalysisProgramMeta + +class TestAnalysisProgramBase(TestAnalysisProgramPreBase): + + # folders where tests will look for analysis program executables + program_folders = [] + + # search for executables inside program_folders and current folder + @classmethod + def find_executable(cls, folder, config): + + section_settings = ""settings"" + + exe_name = config.get(section_settings, ""program_name"") + + search_paths = [folder] + cls.program_folders + + program_folder = config.get(section_settings, ""program_folder"") + if program_folder: + search_path.append(program_folder) + + for search_path in search_paths: + + search_path = os.path.abspath(search_path) + + path = os.path.join(search_path, exe_name) + if os.path.isfile(path) and os.access(path, os.X_OK): + return path + + raise Exception(""Executable '%s' not found. "" % (exe_name,) + + ""Did you set program_folder correctly by passing "" + + ""folder with analysis programs to "" + + ""%s or setting it in %s?"" % (sys.argv[0], cls.defaults_ini)) + + # compare numeric values in reference and output files + def check_output_file(self, folder, config): + + section_name = ""settings"" + ref_name = config.get(section_name, ""reference_name"") + out_name = config.get(section_name, ""output_name"") + + ref_path = os.path.join(folder, ref_name) + out_path = os.path.join(folder, out_name) + + ref_fobj = open(ref_path, ""r"") + out_fobj = open(out_path, ""r"") + + ref_lines = ref_fobj.readlines() + out_lines = out_fobj.readlines() + + ref_fobj.close() + out_fobj.close() + + self.assertEqual(len(ref_lines), len(out_lines), + ""Output file '%s' "" % (out_path,) + + ""has different number of lines than reference file"") + + tolerance = config.get(section_name, ""tolerance"").split() + tolerance = tuple(map(float, tolerance)) + minimal_tolerance = min(tolerance) + + for irow, (rline, oline) in enumerate(zip(ref_lines, out_lines)): + rdata = rline.split() + odata = oline.split() + + self.assertEqual(len(rdata), len(odata), + ""Different number of columns in "" + + ""'%s' at row %d"" % ( out_path, irow)) + + for icol, (rstr, ostr) in enumerate(zip(rdata, odata)): + + try: + rval = float(rstr) + except ValueError: + rval = None + + try: + oval = float(ostr) + except ValueError: + oval = None + + if oval is None: + self.assertEqual(rval, oval, + ""Could not convert to float at "" + + ""row %d, column %d"" % (irow, icol) + + "" in file '%s'"" % (out_path,)) + continue + + if rval is None: + continue + + try: + error = tolerance[icol] + except IndexError: + error = minimal_tolerance + + diff = abs(oval - rval) + self.assertTrue(diff <= error, + ""Divergence larger than tolerance "" + + ""(|%g - %g| = %g > %g) "" % (oval, rval, diff, error) + + ""at row %d, column %d "" % (irow, icol) + + ""in file '%s'"" % (out_path,)) + + + @classmethod + def get_configuration(cls, folder): + + # cls.defaults_ini will be appended dynamically + defaults = CaseConfigParser() + defaults.read(cls.defaults_ini) + + config_name = defaults.get(""settings"", ""config_name"") + + # store all possible configuration file paths in config_inis + config_inis = [] + path = folder + while True: + config_inis.insert(0, os.path.join(path, config_name)) + if os.path.samefile(path, cls.topdir): break + path = os.path.dirname(path) + config_inis.insert(0, cls.defaults_ini) + + config = CaseConfigParser() + config.read(config_inis) + + # set program name if blank + section_name = ""settings"" + option_name = ""program_name"" + + test_prefix = config.get(section_name, ""test_folder_prefix"") + + # determine program name from topdir if not explicitly set + if not config.get(section_name, option_name): + program_name = os.path.basename(cls.topdir) + if program_name.startswith(test_prefix): + program_name = program_name[len(test_prefix):] + config.set(section_name, option_name, program_name) + + return config + + # check if input and reference files are inside folder + @staticmethod + def check_test_folder(folder, config): + + section_name = ""settings"" + option_names = [""input_name"", ""reference_name""] + + for option_name in option_names: + file_name = config.get(section_name, option_name) + path = os.path.join(folder, file_name) + if not os.path.isfile(path): + raise Exception( + ""File '%s' does not exist in %s"" % (file_name, folder)) + + # remove output files from test folder, because most GENESIS analysis + # programs can't overwrite them + @staticmethod + def prepare_test_folder(folder, config): + + out_name = config.get(""settings"", ""output_name"") + out_path = os.path.join(folder, out_name) + + if os.path.exists(out_path): os.remove(out_path) + + + def execute_analysis_program_in_folder(self, folder, config): + + exe = self.find_executable(folder, config) + + # set program environment according to config + env = os.environ.copy() + section_env = ""environ"" + for option in config.options(section_env): + + # delete option if value is an empty string, + # set to vale otherwise + value = config.get(section_env, option) + if value: + env[option] = config.get(section_env, option) + else: + if option in env: del env[option] + + section_settings = ""settings"" + inp_name = config.get(section_settings, ""input_name"") + log_name = config.get(section_settings, ""stdout_name"") + err_name = config.get(section_settings, ""stderr_name"") + + log_path = os.path.join(folder, log_name) + err_path = os.path.join(folder, err_name) + + log_fobj = open(log_path, ""w"") + err_fobj = open(err_path, ""w"") + + args = [exe, inp_name] + + mpi_name = config.get(section_settings, ""mpi_name"") + + # use mpi execution if mpi_name is not an empty string + if mpi_name: + + mpi_num = config.get(section_settings, ""mpi_process_number"") + + args[0:0] = [mpi_name, ""-n"", mpi_num] + + proc = subprocess.Popen( + args, + stdout=log_fobj, stderr=err_fobj, + cwd=folder, + env=env) + + ret = proc.wait() + + log_fobj.close() + err_fobj.close() + + self.assertEqual(ret, 0, """"""Analysis program did not terminate normally. +Execution arguments: %s\n"""""" % ("" "".join(args),)) + + + # this method will be called by all dynamically generated test_ methods + def run_test_in_folder(self, folder): + + config = self.get_configuration(folder) + + self.check_test_folder(folder, config) + + self.prepare_test_folder(folder, config) + + self.execute_analysis_program_in_folder(folder, config) + + self.check_output_file(folder, config) + +# generates separate test class for each test_ folder +def generate_test_classes(): + + # use the folder where test.py is located + work_dir = os.path.dirname(os.path.abspath(os.path.realpath(__file__))) + + # default values are stored in default.ini + defaults_ini = os.path.join(work_dir, ""defaults.ini"") + + defaults = CaseConfigParser() + defaults.read(defaults_ini) + + test_prefix = defaults.get(""settings"", ""test_folder_prefix"") + + # create separate class for each folder + path, folders, files = next(os.walk(work_dir)) + + for folder in folders: + if not folder.startswith(test_prefix): continue + test_name = folder[len(test_prefix):] + class_name = ""Test_"" + test_name + globals()[class_name] = type(class_name, + (TestAnalysisProgramBase, unittest.TestCase), + dict( + topdir=os.path.join(path, folder), + defaults_ini=defaults_ini, + )) + +generate_test_classes() +# delete just in case, so it can never be called again +del generate_test_classes + +def main(args=None): + + import optparse + + usage = """"""Usage: %prog [-h] [program_folder ...] [-- unittest_arguments ...] +Unittest help: %prog -- -h + +Positional arguments: + program_folder folder where analysis programs are stored"""""" + + parser = optparse.OptionParser(usage) + + # arguments before ""--"" are for optparse + # arguments after ""--"" are for unittest + if args is None: + try: + idx = sys.argv.index(""--"") + except ValueError: + idx = len(sys.argv) + + # arguments for optparse + args = sys.argv[1:idx] + + # arguments for unittest + sys.argv = [sys.argv[0]] + sys.argv[idx+1:] + + # make varbose by default + if len(sys.argv) == 1: sys.argv.append(""-v"") + + (options, args) = parser.parse_args(args) + + for folder in args: + TestAnalysisProgramBase.program_folders.append( + os.path.abspath(os.path.realpath(folder))) + + unittest.main() + +if __name__ == ""__main__"": + main() +","Python" +"Genesis","genesis-release-r-ccs/genesis","tests/regression_test/test_analysis/cleanup.sh",".sh","649","28","#!/bin/sh + +rm ./*/*/out +rm ./*/*/err +rm ./*/*/log + +rm ./*/*/*/out +rm ./*/*/*/err +rm ./*/*/*/log + +rm test_remd_convert/APP/*.log +rm test_remd_convert/APP/*.dcd +rm test_kmeans_clustering/BPTI/output* +rm test_flccrd_analysis/BPTI/output* +rm test_eigmat_analysis/APP/output* +rm test_rpath_generator/AdK/*.rst +rm test_rpath_generator/AdK/*.pdb +rm test_pathcv_analysis/path/*.pathcv +rm test_*/*/*.dat +rm test_spheres_generator/BPTI/*.* +rm test_add_ions/BPTI/*.* +rm test_add_metabolites/BPTI/*.* +rm test_ca_fitting/BPTI/*.* +rm test_collision_canceller/BPTI/*.* +rm test_solvate/BPTI/*.* +rm test_spheres_generator/BPTI/*.* +rm trajectories/BPTI_cssb/BPTI/*.* +","Shell" +"Genesis","genesis-release-r-ccs/genesis","tests/regression_test/test_analysis/trajectories/villin_crowding/toppar/toppar_c36_feb16/toppar/silicates/code/crystal.f",".f","3355","134","c +c +c ""crystal"" is a utility program which converts between +c fractional and Cartesian coordinates, and can generate +c full unit cells from asymmetric units +c +c + subroutine crystal(n,name,valence,x,y,z) + implicit none + + integer mxatm + parameter (mxatm=1000) + integer n + integer iout + integer valence(mxatm) + double precision radian + parameter (radian=57.295780d0) + double precision alpha,beta,gamma + double precision adim,bdim,cdim + double precision aleng(3),bleng(3),cleng(3) + double precision x(mxatm),y(mxatm),z(mxatm) + logical orthogonal,triclinic + character*4 name(mxatm) +c + call cell(adim,bdim,cdim,alpha,beta,gamma) +c +c + iout = 6 + + orthogonal = .false. + triclinic = .false. + + if (alpha.eq. 90.0d0 .and. beta .eq. 90.0d0 + $ .and. gamma.eq.90.0d0) then + orthogonal = .true. + else + triclinic = .true. + end if +c +c determina as coordenadas de cada crystal system +c + if (orthogonal) then + aleng(1) = adim + aleng(2) = 0.0d0 + aleng(3) = 0.0d0 + bleng(1) = 0.0d0 + bleng(2) = bdim + bleng(3) = 0.0d0 + cleng(1) = 0.0d0 + cleng(2) = 0.0d0 + cleng(3) = cdim + else if (triclinic) then + aleng(1) = adim*sin(alpha/radian) + aleng(2) = adim*cos(alpha/radian) + aleng(3) = 0.0d0 + bleng(1) = bdim*cos(gamma/radian) + bleng(2) = bdim*sin(gamma/radian) + bleng(3) = 0.0d0 + cleng(1) = 0.0d0 + cleng(2) = 0.0d0 + cleng(3) = cdim + end if +c +c print out the initial cell dimensions to be used +c + write (iout,10) adim,bdim,cdim,alpha,beta,gamma + 10 format (/,'Unit Cell Dimensions :' + $ /,' a =',f10.4, + $ /,' b =',f10.4, + $ /,' c =',f10.4, + $ /,' alpha =',f10.4, + $ /,' beta =',f10.4, + $ /,' gamma =',f10.4) +c +c replicate the unit cell to make a block of unit cells +c + call replicate + $ (n,name,valence,x,y,z,aleng,bleng,cleng) +c +c + return + end +c + subroutine replicate + $ (n,name,valence,x,y,z,aleng,bleng,cleng) + implicit none + + integer mxatm + parameter (mxatm=1000) + integer i,n,nunit !nunit number of atoms in unit cell PEML + integer valence(mxatm) + character*4 name(mxatm) + double precision aleng(3),bleng(3),cleng(3) + double precision x(mxatm),y(mxatm),z(mxatm) +c +c translate along XX +c + nunit = n + do i = 1, nunit + n = n + 1 + x(n) = x(i) + aleng(1) + y(n) = y(i) + aleng(2) + z(n) = z(i) + aleng(3) + + name(n) = name(i) + valence(n) = valence(i) + + end do +c +c translate along YY +c + do i = 1, nunit + n = n + 1 + x(n) = x(i) + bleng(1) + y(n) = y(i) + bleng(2) + z(n) = z(i) + bleng(3) + name(n) = name(i) + valence(n) = valence(i) + end do +c +c translate along XY +c + do i = 1, nunit + n = n + 1 + x(n) = x(i) + aleng(1) + bleng(1) + y(n) = y(i) + aleng(2) + bleng(2) + z(n) = z(i) + aleng(3) + bleng(3) + name(n) = name(i) + valence(n) = valence(i) + end do + + return + end +","Fortran" +"Genesis","genesis-release-r-ccs/genesis","tests/regression_test/test_analysis/trajectories/villin_crowding/toppar/toppar_c36_feb16/toppar/silicates/code/patchfind.f",".f","20412","669","C ****************************************************************************** +C DISCLAIMER +C Everything that is free comes with NO warranty!!! +C Pedro E M Lopes +C ****************************************************************************** + program patchfind + implicit none + + integer mxatm + parameter (mxatm=1000) + integer mxcon + parameter (mxcon=8) + integer i,j,k,l + integer n,nunit + integer ia,ib,ic,id + integer ncryst + integer valence(mxatm) + integer ncon(mxatm),nval(mxatm) + integer icon(mxatm,mxcon),cel(mxatm,mxcon) + integer nbnd,nang,ntor + integer ibond(2000,2),bcel(2000,2) + integer iangl(2000,3),acel(2000,3) + integer itor(2000,4),tcel(2000,4) + integer indcel,jtrans + integer indx + integer celind(2000,8) + integer nprim,nxx,nxy,nyy,nxxyy,nxxxy,nyyxy,nxxyyxy + double precision radii(mxatm) + double precision dist, rcut + double precision x(mxatm),y(mxatm),z(mxatm) + character*4 name(mxatm) + logical primitive,similar + logical xx,yy,xy + logical skip,skip1,skip2,skip3,skip4 +c +c read the unit cell coordinates +c + open(3,file='input.xyz',status='old') + + read(3,*) n + do i = 1, n + read(3,*) name(i),x(i),y(i),z(i) + end do +c +c create a supercell by replicating the unit cell along X, Y and XY PEML +c + nunit = n ! NUNIT is the number of atoms in the unit cell PEML + + call crystal(n,name,valence,x,y,z) ! CRYSTAL replicates the unit cell along X, Y and XY PEML + + ncryst = n + + n = nunit + +c do i = 1, ncryst +c write(6,*) name(i),x(i),y(i),z(i) +c end do + +c +c determine valences of atoms +c + do i = 1, n + if ((name(i)(1:1) .eq. 'S') .or. (name(i)(1:1) .eq. 's')) + $ valence(i) = 4 + if ((name(i)(1:1) .eq. 'O') .or. (name(i)(1:1) .eq. 'o')) + $ valence(i) = 2 + if ((name(i)(1:1) .eq. 'H') .or. (name(i)(1:1) .eq. 'h')) + $ valence(i) = 1 + end do +c + do i = 1, ncryst + if ((name(i)(1:1) .eq. 'S') .or. (name(i)(1:1) .eq. 's')) + $ radii(i) = 1.09d0 + if ((name(i)(1:1) .eq. 'O') .or. (name(i)(1:1) .eq. 'o')) + $ radii(i) = 0.66d0 + if ((name(i)(1:1) .eq. 'H') .or. (name(i)(1:1) .eq. 'h')) + $ radii(i) = 0.30d0 + end do +c +c + do i = 1, mxatm + ncon(i) = 0 + nval(i) = 0 + do j = 1, mxcon + icon(i,j) = 0 + end do + end do +c + primitive = .false. + similar = .false. + do i = 1, n-1 + do j = i+1, n + rcut = 1.35d0*(radii(i)+radii(j)) + dist =sqrt((x(i)-x(j))**2 + (y(i)-y(j))**2 + (z(i)-z(j))**2) + if (dist .le. rcut) then + ncon(i) = ncon(i) + 1 + nval(i) = nval(i) + 1 + ncon(j) = ncon(j) + 1 + nval(j) = nval(j) + 1 + icon(i,ncon(i)) = j + cel(i,ncon(i)) = 1 + icon(j,ncon(j)) = i + cel(j,ncon(j)) = 1 + end if + end do + end do +c + do i = 1, n + do j = n+1, ncryst + rcut = 1.35d0*(radii(i)+radii(j)) + dist =sqrt((x(i)-x(j))**2 + + $ (y(i)-y(j))**2 + (z(i)-z(j))**2) + if (dist .le. rcut) then + indcel = int(j/n) + 1 + jtrans = j - (indcel-1)*n + ncon(i) = ncon(i) + 1 + nval(i) = nval(i) + 1 +c ncon(jtrans) = ncon(jtrans) + 1 +c nval(jtrans) = nval(jtrans) + 1 + icon(i,ncon(i)) = jtrans + cel(i,ncon(i)) = indcel +c icon(jtrans,ncon(jtrans)) = i +c cel(jtrans,ncon(jtrans)) = 1 + end if + end do + end do +c + do i = 1, 2000 + ibond(i,1) = 0 + ibond(i,2) = 0 + iangl(i,1) = 0 + iangl(i,2) = 0 + iangl(i,3) = 0 + itor(i,1) = 0 + itor(i,2) = 0 + itor(i,3) = 0 + itor(i,4) = 0 + end do +c + nbnd = 0 + do i = 1, n + ia = i + if (ncon(ia) .gt. 0) then + do j = 1, ncon(ia) + ib = icon(ia,j) + nbnd = nbnd + 1 + ibond(nbnd,1) = ia + ibond(nbnd,2) = ib + bcel(nbnd,1) = 1 + bcel(nbnd,2) = cel(ia,j) + + if (nbnd .ge. 2) then + skip = .false. + skip1 = .false. + skip2 = .false. + do k = 1, nbnd-1 + if ((ibond(k,1) .eq. ibond(nbnd,2)) .and. + $ (bcel(k,1) .eq. bcel(nbnd,2))) skip1 = .true. + if ((ibond(k,2) .eq. ibond(nbnd,1)) .and. + $ (bcel(k,2) .eq. bcel(nbnd,1))) skip2 = .true. + if (skip1 .and. skip2) skip = .true. + if (skip) then + nbnd = nbnd - 1 + skip = .false. + skip1 = .false. + skip2 = .false. + goto 550 + end if + end do + skip = .false. + skip1 = .false. + skip2 = .false. + do k = 1, nbnd-1 + if ((ibond(k,1) .eq. ibond(nbnd,1)) .and. + $ (bcel(k,1) .eq. bcel(nbnd,1))) skip1 = .true. + if ((ibond(k,2) .eq. ibond(nbnd,2)) .and. + $ (bcel(k,2) .eq. bcel(nbnd,2))) skip2 = .true. + if (skip1 .and. skip2) skip = .true. + if (skip) then + nbnd = nbnd - 1 + skip = .false. + skip1 = .false. + skip2 = .false. + goto 550 + end if + end do + 550 continue + end if + end do + end if + end do +c + nang = 0 + do i = 1, nbnd + ia = ibond(i,1) + ib = ibond(i,2) + if (ncon(ib) .gt. 0) then + do j = 1, ncon(ib) + ic = icon(ib,j) + if (ic .ne. ia) then + nang = nang + 1 + iangl(nang,1) = ia + iangl(nang,2) = ib + iangl(nang,3) = ic + acel(nang,1) = bcel(i,1) + acel(nang,2) = bcel(i,2) + acel(nang,3) = bcel(i,2) + cel(ib,j) - 1 + end if + end do + end if +c + if (ncon(ia) .gt. 0) then + do j = 1, ncon(ia) + ic = icon(ia,j) + if (ic .ne. ib) then + nang = nang + 1 + iangl(nang,1) = ic + iangl(nang,2) = ia + iangl(nang,3) = ib + acel(nang,1) = bcel(i,1) + cel(ia,j) - 1 + acel(nang,2) = bcel(i,1) + acel(nang,3) = bcel(i,2) + end if + end do + end if + end do +c + do i = 1, nang-1 + do k = i+1, nang + skip = .false. + skip1 = .false. + skip2 = .false. + skip3 = .false. + if ((iangl(i,1) .eq. iangl(k,1)) .and. + $ (acel(i,1) .eq. acel(k,1))) skip1 = .true. + if ((iangl(i,2) .eq. iangl(k,2)) .and. + $ (acel(i,2) .eq. acel(k,2))) skip2 = .true. + if ((iangl(i,3) .eq. iangl(k,3)) .and. + $ (acel(i,3) .eq. acel(k,3))) skip3 = .true. + if (skip1 .and. skip2 .and. skip3) skip = .true. + if (skip) then + do l = k, nang-1 + iangl(l,1) = iangl(l+1,1) + acel(l,1) = acel(l+1,1) + iangl(l,2) = iangl(l+1,2) + acel(l,2) = acel(l+1,2) + iangl(l,3) = iangl(l+1,3) + acel(l,3) = acel(l+1,3) + end do + nang = nang - 1 + end if + end do + end do + do i = 1, nang-1 + do k = i+1, nang + skip = .false. + skip1 = .false. + skip2 = .false. + skip3 = .false. + if ((iangl(i,1) .eq. iangl(k,3)) .and. + $ (acel(i,1) .eq. acel(k,3))) skip1 = .true. + if ((iangl(i,2) .eq. iangl(k,2)) .and. + $ (acel(i,2) .eq. acel(k,2))) skip2 = .true. + if ((iangl(i,3) .eq. iangl(k,1)) .and. + $ (acel(i,3) .eq. acel(k,1))) skip3 = .true. + if (skip1 .and. skip2 .and. skip3) skip = .true. + if (skip) then + do l = k, nang-1 + iangl(l,1) = iangl(l+1,1) + acel(l,1) = acel(l+1,1) + iangl(l,2) = iangl(l+1,2) + acel(l,2) = acel(l+1,2) + iangl(l,3) = iangl(l+1,3) + acel(l,3) = acel(l+1,3) + end do + nang = nang - 1 + end if + end do + end do +c + ntor = 0 + do i = 1, nang + ia = iangl(i,1) + ib = iangl(i,2) + ic = iangl(i,3) + if (ncon(ic) .gt. 0) then + do j = 1, ncon(ic) + id = icon(ic,j) + if (id .ne. ib) then + ntor = ntor + 1 + itor(ntor,1) = ia + itor(ntor,2) = ib + itor(ntor,3) = ic + itor(ntor,4) = id + tcel(ntor,1) = acel(i,1) + tcel(ntor,2) = acel(i,2) + tcel(ntor,3) = acel(i,3) + tcel(ntor,4) = acel(i,3) + cel(ic,j) - 1 + end if + end do + end if +c + if (ncon(ia) .gt. 0) then + do j = 1, ncon(ia) + id = icon(ia,j) + if (id .ne. ib) then + ntor = ntor + 1 + itor(ntor,1) = id + itor(ntor,2) = ia + itor(ntor,3) = ib + itor(ntor,4) = ic + tcel(ntor,1) = acel(i,1) + cel(ia,j) - 1 + tcel(ntor,2) = acel(i,1) + tcel(ntor,3) = acel(i,2) + tcel(ntor,4) = acel(i,3) + end if + end do + end if + end do +c + do i = 1, ntor-1 + do k = i+1, ntor + skip = .false. + skip1 = .false. + skip2 = .false. + skip3 = .false. + skip4 = .false. + if ((itor(i,1) .eq. itor(k,1)) .and. + $ (tcel(i,1) .eq. tcel(k,1))) skip1 = .true. + if ((itor(i,2) .eq. itor(k,2)) .and. + $ (tcel(i,2) .eq. tcel(k,2))) skip2 = .true. + if ((itor(i,3) .eq. itor(k,3)) .and. + $ (tcel(i,3) .eq. tcel(k,3))) skip3 = .true. + if ((itor(i,4) .eq. itor(k,4)) .and. + $ (tcel(i,4) .eq. tcel(k,4))) skip4 = .true. + if (skip1 .and. skip2 .and. skip3 .and. skip4) skip = .true. + if (skip) then + do l = k, ntor-1 + itor(l,1) = itor(l+1,1) + tcel(l,1) = tcel(l+1,1) + itor(l,2) = itor(l+1,2) + tcel(l,2) = tcel(l+1,2) + itor(l,3) = itor(l+1,3) + tcel(l,3) = tcel(l+1,3) + itor(l,4) = itor(l+1,4) + tcel(l,4) = tcel(l+1,4) + end do + ntor = ntor - 1 + end if + end do + end do + do i = 1, ntor-1 + do k = i+1, ntor + skip = .false. + skip1 = .false. + skip2 = .false. + skip3 = .false. + skip4 = .false. + if ((itor(i,1) .eq. itor(k,4)) .and. + $ (tcel(i,1) .eq. tcel(k,4))) skip1 = .true. + if ((itor(i,2) .eq. itor(k,3)) .and. + $ (tcel(i,2) .eq. tcel(k,3))) skip2 = .true. + if ((itor(i,3) .eq. itor(k,2)) .and. + $ (tcel(i,3) .eq. tcel(k,2))) skip3 = .true. + if ((itor(i,4) .eq. itor(k,1)) .and. + $ (tcel(i,4) .eq. tcel(k,1))) skip4 = .true. + if (skip1 .and. skip2 .and. skip3 .and. skip4) skip = .true. + if (skip) then + do l = k, ntor-1 + itor(l,1) = itor(l+1,1) + tcel(l,1) = tcel(l+1,1) + itor(l,2) = itor(l+1,2) + tcel(l,2) = tcel(l+1,2) + itor(l,3) = itor(l+1,3) + tcel(l,3) = tcel(l+1,3) + itor(l,4) = itor(l+1,4) + tcel(l,4) = tcel(l+1,4) + end do + ntor = ntor - 1 + end if + end do + end do + +c +c PRINT SECTION +c +c +c print bonds +c + + write(6,'(///,a)')'Bonds inside unit cell' + do i = 1,nbnd + if (bcel(i,2) .eq. 1) then + write(6,200) ibond(i,1),ibond(i,2),bcel(i,1),bcel(i,2) + end if + end do + + write(6,'(//,a)')'Bonds along XX' + do i = 1,nbnd + if (bcel(i,2) .eq. 2) then + write(6,200)ibond(i,1),ibond(i,2),bcel(i,1),bcel(i,2) + end if + end do + write(6,'(/,a)')'Bonds along YY' + do i = 1,nbnd + if (bcel(i,2) .eq. 3) then + write(6,200)ibond(i,1),ibond(i,2),bcel(i,1),bcel(i,2) + end if + end do + write(6,'(/,a)')'Bonds along XY' + do i = 1,nbnd + if (bcel(i,2) .eq. 4) then + write(6,200)ibond(i,1),ibond(i,2),bcel(i,1),bcel(i,2) + end if + end do + 200 format (2i5,4x,2i3) + +c write(6,*)'Nang=',nang + + do i = 1, 2000 + celind(i,1) = 0 + celind(i,2) = 0 + celind(i,3) = 0 + celind(i,4) = 0 + celind(i,5) = 0 + celind(i,6) = 0 + celind(i,7) = 0 + celind(i,8) = 0 + end do + + nprim = 0 + nxx = 0 + nxy = 0 + nyy = 0 + nxxyy = 0 + nxxxy = 0 + nyyxy = 0 + do i = 1, nang + xx = .false. + xy = .false. + yy = .false. + do j = 1, 3 + if (acel(i,j) .eq. 2) xx = .true. + if (acel(i,j) .eq. 3) yy = .true. + if (acel(i,j) .eq. 4) xy = .true. + end do + + if (.not. xx .and. .not. yy .and. .not. xy) then + nprim = nprim + 1 + celind(nprim,1) = i + end if + + if (xx .and. .not. yy .and. .not. xy) then + nxx = nxx + 1 + celind(nxx,2) = i + end if + + if (yy .and. .not. xx .and. .not. xy) then + nyy = nyy + 1 + celind(nyy,3) = i + end if +c + if (xy .and. .not. xx .and. .not. yy) then + nxy = nxy + 1 + celind(nxy,4) = i + end if +c + if (xx .and. yy .and. .not. xy) then + nxxyy = nxxyy + 1 + celind(nxxyy,5) = i + end if +c + if (xx .and. .not. yy .and. xy) then + nxxxy = nxxxy + 1 + celind(nxxxy,6) = i + end if +c + if (.not. xx .and. yy .and. xy) then + nyyxy = nyyxy + 1 + celind(nyyxy,7) = i + end if + end do +c +c print angles +c + +c write(6,'(///,a)')'Angles inside unit cell' +c do i = 1, nprim +c indx = celind(i,1) +c write(6,201)(iangl(indx,k),k=1,3),(acel(indx,k),k=1,3) +c end do + + write(6,'(//,a)')'Angles along XX' + do i = 1, nxx + indx = celind(i,2) + write(6,201)(iangl(indx,k),k=1,3),(acel(indx,k),k=1,3) + end do + write(6,'(/,a)')'Angles along YY' + do i = 1, nyy + indx = celind(i,3) + write(6,201)(iangl(indx,k),k=1,3),(acel(indx,k),k=1,3) + end do + write(6,'(/,a)')'Angles along XY' + do i = 1, nxy + indx = celind(i,4) + write(6,201)(iangl(indx,k),k=1,3),(acel(indx,k),k=1,3) + end do + write(6,'(/,a)')'Angles along XX and YY' + do i = 1, nxxyy + indx = celind(i,5) + write(6,201)(iangl(indx,k),k=1,3),(acel(indx,k),k=1,3) + end do + write(6,'(/,a)')'Angles along XX and XY' + do i = 1, nxxxy + indx = celind(i,6) + write(6,201)(iangl(indx,k),k=1,3),(acel(indx,k),k=1,3) + end do + write(6,'(/,a)')'Angles along YY and XY' + do i = 1, nyyxy + indx = celind(i,7) + write(6,201)(iangl(indx,k),k=1,3),(acel(indx,k),k=1,3) + end do + + 201 format (3i5,4x,3i3) + + nang = nang - (nprim+nxx+nyy+nxy+nxxyy+nxxxy+nyyxy) + + if (nang .eq. 0) then + write(6,'(/,a)') 'All angles are accounted for' + else + write(6,202)'Angles missing: ',nang + write(6,'(/,a)')'You have to change the code, print all angles' + write(6,'(a)')'and see which ones are missing' + end if + 202 format (/,a16,i6) + +c write(6,*)'ntor=',ntor + + do i = 1, 2000 + celind(i,1) = 0 + celind(i,2) = 0 + celind(i,3) = 0 + celind(i,4) = 0 + celind(i,5) = 0 + celind(i,6) = 0 + celind(i,7) = 0 + celind(i,8) = 0 + end do + + nprim = 0 + nxx = 0 + nxy = 0 + nyy = 0 + nxxyy = 0 + nxxxy = 0 + nyyxy = 0 + nxxyyxy = 0 + do i = 1, ntor + xx = .false. + yy = .false. + xy = .false. + do j = 1, 4 + if (tcel(i,j) .eq. 2) xx = .true. + if (tcel(i,j) .eq. 3) yy = .true. + if (tcel(i,j) .eq. 4) xy = .true. + end do +c + if (.not. xx .and. .not. yy .and. .not. xy) then + nprim = nprim + 1 + celind(nprim,1) = i + end if +c + if (xx .and. .not. yy .and. .not. xy) then + nxx = nxx + 1 + celind(nxx,2) = i + end if + + if (yy .and. .not. xx .and. .not. xy) then + nyy = nyy + 1 + celind(nyy,3) = i + end if +c + if (xy .and. .not. xx .and. .not. yy) then + nxy = nxy + 1 + celind(nxy,4) = i + end if +c + if (xx .and. yy .and. .not. xy) then + nxxyy = nxxyy + 1 + celind(nxxyy,5) = i + end if +c + if (xx .and. .not. yy .and. xy) then + nxxxy = nxxxy + 1 + celind(nxxxy,6) = i + end if +c + if (.not. xx .and. yy .and. xy) then + nyyxy = nyyxy + 1 + celind(nyyxy,7) = i + end if +c + if (xx .and. yy .and. xy) then + nxxyyxy = nxxyyxy + 1 + celind(nxxyyxy,8) = i + end if + end do + +c +c print torsions +c + +c write(6,'(///,a)')'Torsions inside unit cell' +c do i = 1, nprim +c indx = celind(i,1) +c write(6,203)(itor(indx,k),k=1,4),(tcel(indx,k),k=1,4) +c end do + + write(6,'(//,a)')'Torsions along XX' + do i = 1, nxx + indx = celind(i,2) + write(6,203)(itor(indx,k),k=1,4),(tcel(indx,k),k=1,4) + end do + write(6,'(/,a)')'Torsions along YY' + do i = 1, nyy + indx = celind(i,3) + write(6,203)(itor(indx,k),k=1,4),(tcel(indx,k),k=1,4) + end do + write(6,'(/,a)')'Torsions along XY' + do i = 1, nxy + indx = celind(i,4) + write(6,203)(itor(indx,k),k=1,4),(tcel(indx,k),k=1,4) + end do + write(6,'(/,a)')'Torsions along XX and YY' + do i = 1, nxxyy + indx = celind(i,5) + write(6,203)(itor(indx,k),k=1,4),(tcel(indx,k),k=1,4) + end do + write(6,'(/,a)')'Torsions along XX and XY' + do i = 1, nxxxy + indx = celind(i,6) + write(6,203)(itor(indx,k),k=1,4),(tcel(indx,k),k=1,4) + end do + write(6,'(/,a)')'Torsions along YY and XY' + do i = 1, nyyxy + indx = celind(i,7) + write(6,203)(itor(indx,k),k=1,4),(tcel(indx,k),k=1,4) + end do + write(6,'(/,a)')'Torsions along XX and XY and XY' + do i = 1, nxxyyxy + indx = celind(i,8) + write(6,203)(itor(indx,k),k=1,4),(tcel(indx,k),k=1,4) + end do + + 203 format (4i5,4x,4i3) + + ntor = ntor - (nprim+nxx+nyy+nxy+nxxyy+nxxxy+nyyxy+nxxyyxy) + + if (ntor .eq. 0) then + write(6,'(//,a)') 'All torsions are accounted for' + else + write(6,204)'Torsions missing: ',ntor + write(6,'(/,a)')'You have to change the code, print all angles' + write(6,'(a)')'and see which ones are missing' + end if + 204 format (//,a18,i6) + + end +","Fortran" +"Genesis","genesis-release-r-ccs/genesis","tests/regression_test/test_analysis/trajectories/villin_crowding/toppar/toppar_c36_feb16/toppar/silicates/code/cell.f",".f","1187","41"," subroutine cell(alen,blen,clen,alpha,beta,gamma) + implicit none +c + double precision alpha,beta,gamma + double precision alen,blen,clen + integer iout +c +c initialize periodic lengths and angles +c + iout = 6 + + alen = 0.0d0 + blen = 0.0d0 + clen = 0.0d0 + alpha = 0.0d0 + beta = 0.0d0 + gamma = 0.0d0 + + open (11,file='crystal.dat',status='old') + read(11,*) alen,blen,clen,alpha,beta,gamma + close (11) +c +c checck unspecified periodic lengths and angles +c + if (alen .eq. 0.0d0) write(iout,'(/,a)') 'a lenght not set' + if (blen .eq. 0.0d0) write(iout,'(/,a)') 'b lenght not set' + if (clen .eq. 0.0d0) write(iout,'(/,a)') 'c lenght not set' + if (alpha .eq. 0.0d0) write(iout,'(/,a)') 'alpha not set' + if (beta .eq. 0.0d0) write(iout,'(/,a)') 'beta not set' + if (gamma .eq. 0.0d0) write(iout,'(/,a)') 'gamma not set' +c +c stop if lengths or angles are nought +c + if (alen.eq.0.0d0 .or. blen.eq.0.0d0 + & .or. clen.eq.90.0d0) stop + if (alpha.eq.0.0d0 .or. beta.eq.0.0d0 + & .or. gamma.eq.90.0d0) stop + + return + end +","Fortran" +"Genesis","genesis-release-r-ccs/genesis","tests/regression_test/test_analysis/trajectories/umbrella_1d/convert_file.m",".m","534","20","center = 0:3:180; + +for i = 1:numel(center) + filename1 = sprintf('old/run_%d.dat', center(i)); + filename2 = sprintf('%d.dat', i); + command = sprintf('cp %s %s', filename1, filename2) + system(command); + + filename1 = sprintf('old/run_%d.out', center(i)); + filename2 = sprintf('%d.out', i); + command = sprintf('cp %s %s', filename1, filename2) + system(command); + + filename1 = sprintf('old/run_%d.nc', center(i)); + filename2 = sprintf('%d.dcd', i); + [trj, box] = readnetcdf(filename1); + writedcd(filename2, trj, box); +end + +","MATLAB" +"Genesis","genesis-release-r-ccs/genesis","tests/regression_test/test_analysis/trajectories/umbrella_1d/generate_cv.m",".m","341","17","index = [9 15 17 19]; +center = 0:3:180; + +for i = 1:numel(center) + filename = sprintf('%d.dcd', i) + trj = readdcd(filename); + phi = calcdihedral(trj, index).*180./pi; + + filename = sprintf('%d.dat', i); + fid = fopen(filename, 'w'); + for istep = 1:numel(phi) + fprintf(fid, '%d %f\n', istep*5000, phi(istep)); + end + fclose(fid); +end + +","MATLAB" +"Genesis","genesis-release-r-ccs/genesis","tests/regression_test/test_rpath_atdyn/tim_ImageParallel/script/runDFTB.sh",".sh","314","17","#!/bin/bash + +# ----------------------------------------------- +# restart from given log and Fchk file +# ----------------------------------------------- + +QMINP=$1 +QMOUT=$2 +MOL=${QMINP%.*} + +if [ -e ${MOL}_charges.bin ]; then + cp ${MOL}_charges.bin charges.bin + cp ${MOL}_detailed.out detailed.out + exit 0 +fi + +","Shell" +"Genesis","genesis-release-r-ccs/genesis","tests/regression_test/test_rpath_atdyn/tim_NEB/script/runDFTB.sh",".sh","314","17","#!/bin/bash + +# ----------------------------------------------- +# restart from given log and Fchk file +# ----------------------------------------------- + +QMINP=$1 +QMOUT=$2 +MOL=${QMINP%.*} + +if [ -e ${MOL}_charges.bin ]; then + cp ${MOL}_charges.bin charges.bin + cp ${MOL}_detailed.out detailed.out + exit 0 +fi + +","Shell" +"Genesis","genesis-release-r-ccs/genesis","tests/regression_test/test_rpath_atdyn/tim_STRING/script/runDFTB.sh",".sh","314","17","#!/bin/bash + +# ----------------------------------------------- +# restart from given log and Fchk file +# ----------------------------------------------- + +QMINP=$1 +QMOUT=$2 +MOL=${QMINP%.*} + +if [ -e ${MOL}_charges.bin ]; then + cp ${MOL}_charges.bin charges.bin + cp ${MOL}_detailed.out detailed.out + exit 0 +fi + +","Shell" +"Genesis","genesis-release-r-ccs/genesis","tests/regression_test/test_spana/test_spana.py",".py","12666","397","#!/usr/bin/env python + +# +# A python script for GENESIS analysis tool regression tests +# +# (c) Copyright 2014 RIKEN. All rights reserved. +# + +# Usage: +# +# $ ./test_analysis.py /home/user/genesis/bin/ +# +# With the default settings, this script scans the directory where +# it is located for folders starting with ""test_"" and generates +# a separate test class for each folder. +# The analysis program to run is determined from the folder name +# after removing the ""test_"" prefix. +# Each test class then searches for folders that contain no subdirectories +# and generates a separate test for each one of them. +# By default, a test folder must have an ""int"" input script +# and a ""ref"" file with the expected analysis output. +# An ""out"" output file must be produced after the execution +# of the analysis program inside the test folder, +# which is then compared with the ""ref"" file. +# Divergence tolerance, analysis program execution parameters +# and environmental variables can be set in ""config.ini"" files +# located inside the test folder or any of its parent folders. +# When same options are set in several folders, +# values set in children folders overwrite those set in parent folders. +# The ""config.ini"" format is identical to that of ""defaults.ini"". +# By default, the script will only search the test folder for +# analysis program executables, so directories containing +# analysis programs should be provided via command line arguments +# or the program_folder option in the configuration files. + +import unittest +import sys +import os +import os.path +import subprocess + +# python3 compatibility +try: + from ConfigParser import SafeConfigParser as ConfigParser +except ImportError: + from configparser import ConfigParser + + +# subclass ConfigParser to preserve case in option names +class CaseConfigParser(ConfigParser): + + def optionxform(self, option): + return option + +# a metaclass for TestAnalysisProgramBase that generates +# separate test_ methods for each bottommost directory under self.topdir +class TestAnalysisProgramMeta(type): + + def __new__(mcs, name, bases, dict): + + def generate_folder_test(folder): + + def test_folder(self): + + self.run_test_in_folder(folder) + + return test_folder + + # self.topdir will be defined dynamically + + if ""topdir"" in dict: + + topdir = dict[""topdir""] + + # generate a test for each bottommost folder + + for path, folders, files in os.walk(topdir): + + if len(folders) == 0: + + module_name = ""test_folder_"" + path + + dict[module_name] = generate_folder_test(path) + + + return type.__new__(mcs, name, bases, dict) + +# python3 metaclass compatibility +try: + exec(""""""class TestAnalysisProgramPreBase( + metaclass=TestAnalysisProgramMeta): pass"""""") +except SyntaxError: + class TestAnalysisProgramPreBase(object): + __metaclass__ = TestAnalysisProgramMeta + +class TestAnalysisProgramBase(TestAnalysisProgramPreBase): + + # folders where tests will look for analysis program executables + program_folders = [] + + # search for executables inside program_folders and current folder + @classmethod + def find_executable(cls, folder, config): + + section_settings = ""settings"" + + exe_name = config.get(section_settings, ""program_name"") + + search_paths = [folder] + cls.program_folders + + program_folder = config.get(section_settings, ""program_folder"") + if program_folder: + search_path.append(program_folder) + + for search_path in search_paths: + + search_path = os.path.abspath(search_path) + + path = os.path.join(search_path, exe_name) + if os.path.isfile(path) and os.access(path, os.X_OK): + return path + + raise Exception(""Executable '%s' not found. "" % (exe_name,) + + ""Did you set program_folder correctly by passing "" + + ""folder with analysis programs to "" + + ""%s or setting it in %s?"" % (sys.argv[0], cls.defaults_ini)) + + # compare numeric values in reference and output files + def check_output_file(self, folder, config): + + section_name = ""settings"" + ref_name = config.get(section_name, ""reference_name"") + out_name = config.get(section_name, ""output_name"") + + ref_path = os.path.join(folder, ref_name) + out_path = os.path.join(folder, out_name) + + ref_fobj = open(ref_path, ""r"") + out_fobj = open(out_path, ""r"") + + ref_lines = ref_fobj.readlines() + out_lines = out_fobj.readlines() + + ref_fobj.close() + out_fobj.close() + + self.assertEqual(len(ref_lines), len(out_lines), + ""Output file '%s' "" % (out_path,) + + ""has different number of lines than reference file"") + + tolerance = config.get(section_name, ""tolerance"").split() + tolerance = tuple(map(float, tolerance)) + minimal_tolerance = min(tolerance) + + for irow, (rline, oline) in enumerate(zip(ref_lines, out_lines)): + rdata = rline.split() + odata = oline.split() + + self.assertEqual(len(rdata), len(odata), + ""Different number of columns in "" + + ""'%s' at row %d"" % ( out_path, irow)) + + for icol, (rstr, ostr) in enumerate(zip(rdata, odata)): + + try: + rval = float(rstr) + except ValueError: + rval = None + + try: + oval = float(ostr) + except ValueError: + oval = None + + if oval is None: + self.assertEqual(rval, oval, + ""Could not convert to float at "" + + ""row %d, column %d"" % (irow, icol) + + "" in file '%s'"" % (out_path,)) + continue + + if rval is None: + continue + + try: + error = tolerance[icol] + except IndexError: + error = minimal_tolerance + + diff = abs(oval - rval) + self.assertTrue(diff <= error, + ""Divergence larger than tolerance "" + + ""(|%g - %g| = %g > %g) "" % (oval, rval, diff, error) + + ""at row %d, column %d "" % (irow, icol) + + ""in file '%s'"" % (out_path,)) + + + @classmethod + def get_configuration(cls, folder): + + # cls.defaults_ini will be appended dynamically + defaults = CaseConfigParser() + defaults.read(cls.defaults_ini) + + config_name = defaults.get(""settings"", ""config_name"") + + # store all possible configuration file paths in config_inis + config_inis = [] + path = folder + while True: + config_inis.insert(0, os.path.join(path, config_name)) + if os.path.samefile(path, cls.topdir): break + path = os.path.dirname(path) + config_inis.insert(0, cls.defaults_ini) + + config = CaseConfigParser() + config.read(config_inis) + + # set program name if blank + section_name = ""settings"" + option_name = ""program_name"" + + test_prefix = config.get(section_name, ""test_folder_prefix"") + + # determine program name from topdir if not explicitly set + if not config.get(section_name, option_name): + program_name = os.path.basename(cls.topdir) + if program_name.startswith(test_prefix): + program_name = program_name[len(test_prefix):] + config.set(section_name, option_name, program_name) + + return config + + # check if input and reference files are inside folder + @staticmethod + def check_test_folder(folder, config): + + section_name = ""settings"" + option_names = [""input_name"", ""reference_name""] + + for option_name in option_names: + file_name = config.get(section_name, option_name) + path = os.path.join(folder, file_name) + if not os.path.isfile(path): + raise Exception( + ""File '%s' does not exist in %s"" % (file_name, folder)) + + # remove output files from test folder, because most GENESIS analysis + # programs can't overwrite them + @staticmethod + def prepare_test_folder(folder, config): + + out_name = config.get(""settings"", ""output_name"") + out_path = os.path.join(folder, out_name) + + if os.path.exists(out_path): os.remove(out_path) + + + def execute_analysis_program_in_folder(self, folder, config): + + exe = self.find_executable(folder, config) + + # set program environment according to config + env = os.environ.copy() + section_env = ""environ"" + for option in config.options(section_env): + + # delete option if value is an empty string, + # set to vale otherwise + value = config.get(section_env, option) + if value: + env[option] = config.get(section_env, option) + else: + if option in env: del env[option] + + section_settings = ""settings"" + inp_name = config.get(section_settings, ""input_name"") + log_name = config.get(section_settings, ""stdout_name"") + err_name = config.get(section_settings, ""stderr_name"") + + log_path = os.path.join(folder, log_name) + err_path = os.path.join(folder, err_name) + + log_fobj = open(log_path, ""w"") + err_fobj = open(err_path, ""w"") + + args = [exe, inp_name] + + mpi_name = config.get(section_settings, ""mpi_name"") + + # use mpi execution if mpi_name is not an empty string + if mpi_name: + + mpi_num = config.get(section_settings, ""mpi_process_number"") + + args[0:0] = [mpi_name, ""-n"", mpi_num] + + proc = subprocess.Popen( + args, + stdout=log_fobj, stderr=err_fobj, + cwd=folder, + env=env) + + ret = proc.wait() + + log_fobj.close() + err_fobj.close() + + self.assertEqual(ret, 0, """"""Analysis program did not terminate normally. +Execution arguments: %s\n"""""" % ("" "".join(args),)) + + + # this method will be called by all dynamically generated test_ methods + def run_test_in_folder(self, folder): + + config = self.get_configuration(folder) + + self.check_test_folder(folder, config) + + self.prepare_test_folder(folder, config) + + self.execute_analysis_program_in_folder(folder, config) + + self.check_output_file(folder, config) + +# generates separate test class for each test_ folder +def generate_test_classes(): + + # use the folder where test.py is located + work_dir = os.path.dirname(os.path.abspath(os.path.realpath(__file__))) + + # default values are stored in default.ini + defaults_ini = os.path.join(work_dir, ""defaults.ini"") + + defaults = CaseConfigParser() + defaults.read(defaults_ini) + + test_prefix = defaults.get(""settings"", ""test_folder_prefix"") + + # create separate class for each folder + path, folders, files = next(os.walk(work_dir)) + + for folder in folders: + if not folder.startswith(test_prefix): continue + test_name = folder[len(test_prefix):] + class_name = ""Test_"" + test_name + globals()[class_name] = type(class_name, + (TestAnalysisProgramBase, unittest.TestCase), + dict( + topdir=os.path.join(path, folder), + defaults_ini=defaults_ini, + )) + +generate_test_classes() +# delete just in case, so it can never be called again +del generate_test_classes + +def main(args=None): + + import optparse + + usage = """"""Usage: %prog [-h] [program_folder ...] [-- unittest_arguments ...] +Unittest help: %prog -- -h + +Positional arguments: + program_folder folder where analysis programs are stored"""""" + + parser = optparse.OptionParser(usage) + + # arguments before ""--"" are for optparse + # arguments after ""--"" are for unittest + if args is None: + try: + idx = sys.argv.index(""--"") + except ValueError: + idx = len(sys.argv) + + # arguments for optparse + args = sys.argv[1:idx] + + # arguments for unittest + sys.argv = [sys.argv[0]] + sys.argv[idx+1:] + + # make varbose by default + if len(sys.argv) == 1: sys.argv.append(""-v"") + + (options, args) = parser.parse_args(args) + + for folder in args: + TestAnalysisProgramBase.program_folders.append( + os.path.abspath(os.path.realpath(folder))) + + unittest.main() + +if __name__ == ""__main__"": + main() +","Python" +"Genesis","genesis-release-r-ccs/genesis","tests/regression_test/test_spana/cleanup.sh",".sh","115","12","#!/bin/sh + +rm ./*/*/out +rm ./*/*/err +rm ./*/*/log + +rm ./*/*/*/out +rm ./*/*/*/err +rm ./*/*/*/log + +rm test_*/*/*.dat +","Shell" +"Genesis","genesis-release-r-ccs/genesis","tests/regression_test/test_vib/script/runGau.sh",".sh","262","16","#!/bin/bash + +# ----------------------------------------------- +# restart from given log and Fchk file +# ----------------------------------------------- + +QMINP=$1 +QMOUT=$2 +MOL=${QMINP%.*} + +if [ -e ${MOL}.Fchk ]; then + cp ${MOL}.Fchk gaussian.Fchk + exit 0 +fi + +","Shell" +"Genesis","li-lab-genetics/IGES_2025_Education_Workshop","scripts/Tutorial_scripts_part3.1_Generate_Rare_Variant_Summary_Statistics.R",".R","13819","268","################################################################################ +# IGES 2025 Education Workshop: Navigating Biobank Data: +# Advanced Strategies for Genetic Epidemiology Research +# Session 4: Practical Tutorial – A Walkthrough of Cross-Biobank Analysis +# Part 3: Meta-analysis of sequencing data using MetaSTAARlite +# - Part 3.1: Generate rare variant summary statistics for each study +# - More tutorial: https://github.com/li-lab-genetics/MetaSTAARlite-tutorial +# - Date: August 2025 +################################################################################ + +rm(list=ls()) +gc() + +### Setup repository +setwd(""/data/williamsjacr/IGES_2025_Education_Workshop/data/"") + +### load required package +library(gdsfmt) +library(SeqArray) +library(SeqVarTools) +library(STAAR) +library(STAARpipeline) +library(MetaSTAAR) +library(MetaSTAARlite) +library(Matrix) + +## agds file +agds_dir <- ""1kGP_high_coverage_Illumina.chr19.filtered.SNV_INDEL_SV_phased_panel_annotated.gds"" +## Annotation_dir +Annotation_dir <- ""annotation/info/FunctionalAnnotation"" +## Annotation channel +Annotation_name_catalog <- read.csv(url(""https://raw.githubusercontent.com/xihaoli/STAARpipeline-Tutorial/refs/heads/main/FAVORannotator_csv/Annotation_name_catalog.csv"")) + +## channel name of the QC label in the GDS/aGDS file +QC_label <- ""annotation/info/QC_label"" +## variant type +variant_type <- ""SNV"" +## geno_missing_imputation +geno_missing_imputation <- ""mean"" +## genes info +genes <- genes_info +## extract phenotype sample.id +pheno_cov <- read.table(""integrated_call_samples_v3.20130502.ALL.panel"",header=TRUE) +phenotype.id <- as.vector(pheno_cov$sample) +length(phenotype.id) +# [1] 2504 + +## Use_annotation_weights +Use_annotation_weights <- TRUE +## Annotation name (for variant weighting using the MetaSTAAR method) +Annotation_name <- c(""CADD"",""LINSIGHT"",""FATHMM.XF"",""aPC.EpigeneticActive"",""aPC.EpigeneticRepressed"",""aPC.EpigeneticTranscription"", + ""aPC.Conservation"",""aPC.LocalDiversity"",""aPC.Mappability"",""aPC.TF"",""aPC.Protein"") + +########################################## +# load phenotype and ancestry PCs +########################################## +### phenotype +pheno <- read.csv(""phenotype_LDLR_coding_APOE_noncoding.csv"") +### PCs +PCs <- read.csv(""1000G_PCA.csv"") + +pheno <- dplyr::left_join(pheno,PCs[,2:12],by=c(""sample""=""id"")) + +########################################## +# fit null model for each ""study"" +########################################## +obj_nullmodel_AFR <- fit_nullmodel(Y~gender+PC1+PC2+PC3+PC4+PC5+PC6+PC7+PC8+PC9+PC10, + data = pheno[pheno$super_pop==""AFR"",], kins = NULL, id = ""sample"", + family = gaussian(link = ""identity""), verbose=T) +obj_nullmodel_AMR <- fit_nullmodel(Y~gender+PC1+PC2+PC3+PC4+PC5+PC6+PC7+PC8+PC9+PC10, + data = pheno[pheno$super_pop==""AMR"",], kins = NULL, id = ""sample"", + family = gaussian(link = ""identity""), verbose=T) +obj_nullmodel_EAS <- fit_nullmodel(Y~gender+PC1+PC2+PC3+PC4+PC5+PC6+PC7+PC8+PC9+PC10, + data = pheno[pheno$super_pop==""EAS"",], kins = NULL, id = ""sample"", + family = gaussian(link = ""identity""), verbose=T) +obj_nullmodel_EUR <- fit_nullmodel(Y~gender+PC1+PC2+PC3+PC4+PC5+PC6+PC7+PC8+PC9+PC10, + data = pheno[pheno$super_pop==""EUR"",], kins = NULL, id = ""sample"", + family = gaussian(link = ""identity""), verbose=T) +obj_nullmodel_SAS <- fit_nullmodel(Y~gender+PC1+PC2+PC3+PC4+PC5+PC6+PC7+PC8+PC9+PC10, + data = pheno[pheno$super_pop==""SAS"",], kins = NULL, id = ""sample"", + family = gaussian(link = ""identity""), verbose=T) + + +##################################################### +# For each study, generate rare variant +# summary statistics using MetaSTAARlite worker +# Gene-Centric Coding: LDLR +##################################################### + +### run coding mask of LDLR +gene_name <- ""LDLR"" + +## genotype: chr +chr <- 19 +gds.path <- agds_dir +genofile <- seqOpen(gds.path) + +genes <- genes_info + +## AFR +coding_sumstat <- list() +coding_cov <- list() +results_coding_AFR <- coding_MetaSTAARlite_worker(chr=chr,gene_name=gene_name,genofile=genofile,obj_nullmodel=obj_nullmodel_AFR,gene=genes, + cov_maf_cutoff=0.05, + QC_label=QC_label,variant_type=variant_type, + Annotation_dir=Annotation_dir,Annotation_name_catalog=Annotation_name_catalog, + Use_annotation_weights=Use_annotation_weights,Annotation_name=Annotation_name) + +coding_sumstat[[gene_name]] <- results_coding_AFR$summary_stat_list +coding_cov[[gene_name]] <- results_coding_AFR$GTSinvG_rare_list + +save(coding_sumstat,file=""AFR_LDLR_coding_sumstat.Rdata"",compress = ""xz"") +save(coding_cov,file=""AFR_LDLR_coding_cov.Rdata"",compress = ""xz"") + +## AMR +coding_sumstat <- list() +coding_cov <- list() +results_coding_AMR <- coding_MetaSTAARlite_worker(chr=chr,gene_name=gene_name,genofile=genofile,obj_nullmodel=obj_nullmodel_AMR,gene=genes, + cov_maf_cutoff=0.05, + QC_label=QC_label,variant_type=variant_type, + Annotation_dir=Annotation_dir,Annotation_name_catalog=Annotation_name_catalog, + Use_annotation_weights=Use_annotation_weights,Annotation_name=Annotation_name) + +coding_sumstat[[gene_name]] <- results_coding_AMR$summary_stat_list +coding_cov[[gene_name]] <- results_coding_AMR$GTSinvG_rare_list + +save(coding_sumstat,file=""AMR_LDLR_coding_sumstat.Rdata"",compress = ""xz"") +save(coding_cov,file=""AMR_LDLR_coding_cov.Rdata"",compress = ""xz"") + +## EAS +coding_sumstat <- list() +coding_cov <- list() +results_coding_EAS <- coding_MetaSTAARlite_worker(chr=chr,gene_name=gene_name,genofile=genofile,obj_nullmodel=obj_nullmodel_EAS,gene=genes, + cov_maf_cutoff=0.05, + QC_label=QC_label,variant_type=variant_type, + Annotation_dir=Annotation_dir,Annotation_name_catalog=Annotation_name_catalog, + Use_annotation_weights=Use_annotation_weights,Annotation_name=Annotation_name) + +coding_sumstat[[gene_name]] <- results_coding_EAS$summary_stat_list +coding_cov[[gene_name]] <- results_coding_EAS$GTSinvG_rare_list + +save(coding_sumstat,file=""EAS_LDLR_coding_sumstat.Rdata"",compress = ""xz"") +save(coding_cov,file=""EAS_LDLR_coding_cov.Rdata"",compress = ""xz"") + +## EUR +coding_sumstat <- list() +coding_cov <- list() +results_coding_EUR <- coding_MetaSTAARlite_worker(chr=chr,gene_name=gene_name,genofile=genofile,obj_nullmodel=obj_nullmodel_EUR,gene=genes, + cov_maf_cutoff=0.05, + QC_label=QC_label,variant_type=variant_type, + Annotation_dir=Annotation_dir,Annotation_name_catalog=Annotation_name_catalog, + Use_annotation_weights=Use_annotation_weights,Annotation_name=Annotation_name) + +coding_sumstat[[gene_name]] <- results_coding_EUR$summary_stat_list +coding_cov[[gene_name]] <- results_coding_EUR$GTSinvG_rare_list + +save(coding_sumstat,file=""EUR_LDLR_coding_sumstat.Rdata"",compress = ""xz"") +save(coding_cov,file=""EUR_LDLR_coding_cov.Rdata"",compress = ""xz"") + +## SAS +coding_sumstat <- list() +coding_cov <- list() +results_coding_SAS <- coding_MetaSTAARlite_worker(chr=chr,gene_name=gene_name,genofile=genofile,obj_nullmodel=obj_nullmodel_SAS,gene=genes, + cov_maf_cutoff=0.05, + QC_label=QC_label,variant_type=variant_type, + Annotation_dir=Annotation_dir,Annotation_name_catalog=Annotation_name_catalog, + Use_annotation_weights=Use_annotation_weights,Annotation_name=Annotation_name) + +coding_sumstat[[gene_name]] <- results_coding_SAS$summary_stat_list +coding_cov[[gene_name]] <- results_coding_SAS$GTSinvG_rare_list + +save(coding_sumstat,file=""SAS_LDLR_coding_sumstat.Rdata"",compress = ""xz"") +save(coding_cov,file=""SAS_LDLR_coding_cov.Rdata"",compress = ""xz"") + +seqClose(genofile) + +######################################################## +#. For each study, generate rare variant +# summary statistics using MetaSTAARlite worker +# Gene-Centric Noncoding: APOE +######################################################## + +### run noncoding mask of APOE +gene_name <- ""APOE"" + +## genotype: chr +chr <- 19 +gds.path <- agds_dir +genofile <- seqOpen(gds.path) + +## AFR +noncoding_sumstat <- list() +noncoding_cov <- list() +results_noncoding_AFR <- noncoding_MetaSTAARlite_worker(chr=chr,gene_name=gene_name,genofile=genofile,obj_nullmodel=obj_nullmodel_AFR, + cov_maf_cutoff=0.05, + QC_label=QC_label,variant_type=variant_type, + Annotation_dir=Annotation_dir,Annotation_name_catalog=Annotation_name_catalog, + Use_annotation_weights=Use_annotation_weights,Annotation_name=Annotation_name) + +noncoding_sumstat[[gene_name]] <- results_noncoding_AFR$summary_stat_list +noncoding_cov[[gene_name]] <- results_noncoding_AFR$GTSinvG_rare_list + +save(noncoding_sumstat,file=""AFR_APOE_noncoding_sumstat.Rdata"",compress = ""xz"") +save(noncoding_cov,file=""AFR_APOE_noncoding_cov.Rdata"",compress = ""xz"") + +## AMR +noncoding_sumstat <- list() +noncoding_cov <- list() +results_noncoding_AMR <- noncoding_MetaSTAARlite_worker(chr=chr,gene_name=gene_name,genofile=genofile,obj_nullmodel=obj_nullmodel_AMR, + cov_maf_cutoff=0.05, + QC_label=QC_label,variant_type=variant_type, + Annotation_dir=Annotation_dir,Annotation_name_catalog=Annotation_name_catalog, + Use_annotation_weights=Use_annotation_weights,Annotation_name=Annotation_name) + +noncoding_sumstat[[gene_name]] <- results_noncoding_AMR$summary_stat_list +noncoding_cov[[gene_name]] <- results_noncoding_AMR$GTSinvG_rare_list + +save(noncoding_sumstat,file=""AMR_APOE_noncoding_sumstat.Rdata"",compress = ""xz"") +save(noncoding_cov,file=""AMR_APOE_noncoding_cov.Rdata"",compress = ""xz"") + +## EAS +noncoding_sumstat <- list() +noncoding_cov <- list() +results_noncoding_EAS <- noncoding_MetaSTAARlite_worker(chr=chr,gene_name=gene_name,genofile=genofile,obj_nullmodel=obj_nullmodel_EAS, + cov_maf_cutoff=0.05, + QC_label=QC_label,variant_type=variant_type, + Annotation_dir=Annotation_dir,Annotation_name_catalog=Annotation_name_catalog, + Use_annotation_weights=Use_annotation_weights,Annotation_name=Annotation_name) + +noncoding_sumstat[[gene_name]] <- results_noncoding_EAS$summary_stat_list +noncoding_cov[[gene_name]] <- results_noncoding_EAS$GTSinvG_rare_list + +save(noncoding_sumstat,file=""EAS_APOE_noncoding_sumstat.Rdata"",compress = ""xz"") +save(noncoding_cov,file=""EAS_APOE_noncoding_cov.Rdata"",compress = ""xz"") + +## EUR +noncoding_sumstat <- list() +noncoding_cov <- list() +results_noncoding_EUR <- noncoding_MetaSTAARlite_worker(chr=chr,gene_name=gene_name,genofile=genofile,obj_nullmodel=obj_nullmodel_EUR, + cov_maf_cutoff=0.05, + QC_label=QC_label,variant_type=variant_type, + Annotation_dir=Annotation_dir,Annotation_name_catalog=Annotation_name_catalog, + Use_annotation_weights=Use_annotation_weights,Annotation_name=Annotation_name) + +noncoding_sumstat[[gene_name]] <- results_noncoding_EUR$summary_stat_list +noncoding_cov[[gene_name]] <- results_noncoding_EUR$GTSinvG_rare_list + +save(noncoding_sumstat,file=""EUR_APOE_noncoding_sumstat.Rdata"",compress = ""xz"") +save(noncoding_cov,file=""EUR_APOE_noncoding_cov.Rdata"",compress = ""xz"") + +## SAS +noncoding_sumstat <- list() +noncoding_cov <- list() +results_noncoding_SAS <- noncoding_MetaSTAARlite_worker(chr=chr,gene_name=gene_name,genofile=genofile,obj_nullmodel=obj_nullmodel_SAS, + cov_maf_cutoff=0.05, + QC_label=QC_label,variant_type=variant_type, + Annotation_dir=Annotation_dir,Annotation_name_catalog=Annotation_name_catalog, + Use_annotation_weights=Use_annotation_weights,Annotation_name=Annotation_name) + +noncoding_sumstat[[gene_name]] <- results_noncoding_SAS$summary_stat_list +noncoding_cov[[gene_name]] <- results_noncoding_SAS$GTSinvG_rare_list + +save(noncoding_sumstat,file=""SAS_APOE_noncoding_sumstat.Rdata"",compress = ""xz"") +save(noncoding_cov,file=""SAS_APOE_noncoding_cov.Rdata"",compress = ""xz"") + +seqClose(genofile) + +","R" +"Genesis","li-lab-genetics/IGES_2025_Education_Workshop","scripts/Tutorial_scripts_part1.1_CommonVariant_Prep.R",".R","3992","86","################################################################################ +# IGES 2025 Education Workshop: Navigating Biobank Data: +# Advanced Strategies for Genetic Epidemiology Research +# Session 4: Practical Tutorial – A Walkthrough of Cross-Biobank Analysis +# Part 4: Common Variant PRS using summary statistics +# - Part 4.1: Obtaining common variants and setting up summary statistics +# - Date: August 2025 +################################################################################ + +rm(list=ls()) + +library(bigsnpr) +library(dplyr) +library(stringr) + +gc() + +setwd(""/data/williamsjacr/IGES_2025_Education_Workshop/data/"") + +## extract phenotype sample.id +pheno_cov <- read.table(""integrated_call_samples_v3.20130502.ALL.panel"",header=TRUE) +phenotype.id <- as.vector(pheno_cov$sample) +## write a keep file to filter vcf +write.table(phenotype.id,file = paste0(""Keep.txt""),row.names = FALSE,col.names = FALSE,quote = FALSE) +## filter vcf to the 2504 participants and MAF >= 0.05 +system(""/data/williamsjacr/software/plink2 --vcf 1kGP_high_coverage_Illumina.chr19.filtered.SNV_INDEL_SV_phased_panel.vcf.gz --keep Keep.txt --maf 0.05 --make-bed --out chr19"") + +## create a rds file and bk file with bigsnpr +if(file.exists(""chr19.rds"")){ + file.remove(""chr19.rds"") + file.remove(""chr19.bk"") +} +snp_readBed(""chr19.bed"") + +## using the bim file generate create a simulated response and a summary statistics file +chr19_bim <- read.delim(""chr19.bim"", header=FALSE) + +obj.bigSNP <- snp_attach(""chr19.rds"") +G <- obj.bigSNP$genotypes # genotype matrix + +## create a simulated response +set.seed(1440) +simulated_beta <- rep(0, length.out = nrow(chr19_bim)) +simulated_beta[sample(1:nrow(chr19_bim),5000)] <- rnorm(5000) +simulated_beta <- data.frame(SNP = chr19_bim[,2],pos = chr19_bim[,4], ref = chr19_bim[,5], alt = chr19_bim[,6], beta = simulated_beta,se = runif(nrow(chr19_bim),0.5,5),pval = runif(nrow(chr19_bim),0,1), n = 2504) + +Trait_of_Interest <- big_prodVec(G, simulated_beta$beta) +chr19_fam <- read.delim(""chr19.fam"", header=FALSE) +Phenotype_Data <- data.frame(IID = chr19_fam[,2],Y = Trait_of_Interest) + +write.csv(Phenotype_Data,file = ""Phenotype_CommonVariant.csv"",row.names = FALSE) + +## create a fake external biobank summary statistics + +SNPinfo <- read.csv(""SNPInfo_GrCH38.csv"") + +SNPinfo_38 <- data.frame(rsid = SNPinfo$rsid,unique_id1 = paste0(SNPinfo$chr,""_"",SNPinfo$pos38,""_"",SNPinfo$allele1_38,""_"",SNPinfo$allele2_38), + unique_id2 = paste0(SNPinfo$chr,""_"",SNPinfo$pos38,""_"",SNPinfo$allele2_38,""_"",SNPinfo$allele1_38)) +rm(SNPinfo) + +gc() + +## use rsids instead of GrCH38 + +simulated_beta$unique_id <- paste0(19,""_"",simulated_beta$pos,""_"",simulated_beta$ref,""_"",simulated_beta$alt) +sumstats_modified <- left_join(simulated_beta,SNPinfo_38,by = c(""unique_id"" = ""unique_id1"")) +sumstats_modified$SNP[!is.na(sumstats_modified$rsid)] <- sumstats_modified$rsid[!is.na(sumstats_modified$rsid)] +sumstats_modified <- subset(sumstats_modified,select = -c(rsid)) +sumstats_modified <- left_join(sumstats_modified,SNPinfo_38,by = c(""unique_id"" = ""unique_id2"")) +sumstats_modified$SNP[!is.na(sumstats_modified$rsid)] <- sumstats_modified$rsid[!is.na(sumstats_modified$rsid)] +sumstats_modified <- sumstats_modified[str_detect(sumstats_modified$SNP,""rs""),] +sumstats_modified <- sumstats_modified[,c(""SNP"",""ref"",""alt"",""beta"",""se"",""pval"",""n"")] +colnames(sumstats_modified) <- c(""RSID"",""REF"",""ALT"",""BETA"",""SE"",""PVAL"",""N"") +sumstats_modified <- sumstats_modified[!duplicated(sumstats_modified$RSID),] + +## swap some alleles for illustrative purposes + +swap_indexes <- sample(1:nrow(sumstats_modified),5000) +sumstats_crossbiobank <- sumstats_modified +sumstats_crossbiobank$ALT[swap_indexes] <- sumstats_modified$REF[swap_indexes] +sumstats_crossbiobank$REF[swap_indexes] <- sumstats_modified$ALT[swap_indexes] +sumstats_crossbiobank$BETA[swap_indexes] <- (-1)*sumstats_modified$BETA[swap_indexes] + +write.csv(sumstats_crossbiobank,file = ""SumStats_Biobank.csv"",row.names = FALSE) + +","R" +"Genesis","li-lab-genetics/IGES_2025_Education_Workshop","scripts/Tutorial_scripts_part2.2_Example_Simulated_Phenotype.R",".R","8781","295","################################################################################ +# IGES 2025 Education Workshop: Navigating Biobank Data: +# Advanced Strategies for Genetic Epidemiology Research +# Session 4: Practical Tutorial – A Walkthrough of Cross-Biobank Analysis +# Part 2: Rare variant analysis of sequencing data using STAARpipeline +# - Part 2.2: Simulated Phenotype (mimicking LDL-cholesterol) +# - Date: August 2025 +################################################################################ + +rm(list=ls()) +gc() + +### Setup repository +setwd(""/data/williamsjacr/IGES_2025_Education_Workshop/data/"") + +### load required package +library(gdsfmt) +library(SeqArray) +library(SeqVarTools) +library(TxDb.Hsapiens.UCSC.hg38.knownGene) +library(STAAR) +library(STAARpipeline) + +## gds file +agds_dir <- ""1kGP_high_coverage_Illumina.chr19.filtered.SNV_INDEL_SV_phased_panel_annotated.gds"" +## Annotation_dir +Annotation_dir <- ""annotation/info/FunctionalAnnotation"" +## Annotation channel +Annotation_name_catalog <- read.csv(url(""https://raw.githubusercontent.com/xihaoli/STAARpipeline-Tutorial/refs/heads/main/FAVORannotator_csv/Annotation_name_catalog.csv"")) + +## channel name of the QC label in the GDS/aGDS file +QC_label <- ""annotation/info/QC_label"" +## variant type +variant_type <- ""SNV"" +## geno_missing_imputation +geno_missing_imputation <- ""mean"" +## genes info +genes <- genes_info +## extract phenotype sample.id +pheno_cov <- read.table(""integrated_call_samples_v3.20130502.ALL.panel"",header=TRUE) +phenotype.id <- as.vector(pheno_cov$sample) +length(phenotype.id) +# [1] 2504 + +####################################################### +# Chr 19, LDLR, plof_ds +####################################################### + +## gene name +gene_name <- ""LDLR"" + +## genotype: chr +chr <- 19 +gds.path <- agds_dir +genofile <- seqOpen(gds.path) + +############ genotype +## get SNV id, position, REF, ALT (whole genome) +filter <- seqGetData(genofile, QC_label) +if(variant_type==""variant"") +{ + SNVlist <- filter == ""PASS"" +} + +if(variant_type==""SNV"") +{ + SNVlist <- (filter == ""PASS"") & isSNV(genofile) +} + +if(variant_type==""Indel"") +{ + SNVlist <- (filter == ""PASS"") & (!isSNV(genofile)) +} + +position <- as.numeric(seqGetData(genofile, ""position"")) +variant.id <- seqGetData(genofile, ""variant.id"") + +rm(filter) +gc() + + +### Gene +kk <- which(genes[,1]==gene_name) + +sub_start_loc <- genes[kk,3] +sub_end_loc <- genes[kk,4] + +is.in <- (SNVlist)&(position>=sub_start_loc)&(position<=sub_end_loc) +variant.id.gene <- variant.id[is.in] + +seqSetFilter(genofile,variant.id=variant.id.gene,sample.id=phenotype.id) + +## plof_ds +## Gencode_Exonic +GENCODE.EXONIC.Category <- seqGetData(genofile, paste0(Annotation_dir,Annotation_name_catalog$dir[which(Annotation_name_catalog$name==""GENCODE.EXONIC.Category"")])) +## Gencode +GENCODE.Category <- seqGetData(genofile, paste0(Annotation_dir,Annotation_name_catalog$dir[which(Annotation_name_catalog$name==""GENCODE.Category"")])) +## Meta.SVM.Pred +MetaSVM_pred <- seqGetData(genofile, paste0(Annotation_dir,Annotation_name_catalog$dir[which(Annotation_name_catalog$name==""MetaSVM"")])) + +variant.id.gene <- seqGetData(genofile, ""variant.id"") +lof.in.plof <- (GENCODE.EXONIC.Category==""stopgain"")|(GENCODE.EXONIC.Category==""stoploss"")|(GENCODE.Category==""splicing"")|(GENCODE.Category==""exonic;splicing"")|(GENCODE.Category==""ncRNA_splicing"")|(GENCODE.Category==""ncRNA_exonic;splicing"")|((GENCODE.EXONIC.Category==""nonsynonymous SNV"")&(MetaSVM_pred==""D"")) +variant.id.gene <- variant.id.gene[lof.in.plof] + +seqSetFilter(genofile,variant.id=variant.id.gene,sample.id=phenotype.id) + +## genotype id +id.genotype <- seqGetData(genofile,""sample.id"") +# id.genotype.match <- rep(0,length(id.genotype)) + +id.genotype.merge <- data.frame(id.genotype,index=seq(1,length(id.genotype))) +phenotype.id.merge <- data.frame(phenotype.id) +phenotype.id.merge <- dplyr::left_join(phenotype.id.merge,id.genotype.merge,by=c(""phenotype.id""=""id.genotype"")) +id.genotype.match <- phenotype.id.merge$index + +## Genotype +Geno <- seqGetData(genofile, ""$dosage"") +Geno <- Geno[id.genotype.match,,drop=FALSE] + +## impute missing +if(!is.null(dim(Geno))) +{ + if(dim(Geno)[2]>0) + { + if(geno_missing_imputation==""mean"") + { + Geno <- matrix_flip_mean(Geno)$Geno + } + if(geno_missing_imputation==""minor"") + { + Geno <- matrix_flip_minor(Geno)$Geno + } + } +} + +Geno_LDLR <- Geno +AF_LDLR <- apply(Geno_LDLR,2,mean)/2 +MAF_LDLR <- pmin(AF_LDLR,1-AF_LDLR) + +############ LDLR effect +c0_LDLR <- 0.25 +beta_LDLR <- -c0_LDLR * log10(MAF_LDLR) + +LDLR_Effect <- Geno_LDLR %*% beta_LDLR + +seqClose(genofile) + + +####################################################### +# Chr 19, APOE, enhancer_DHS +####################################################### + +## gene name +gene_name <- ""APOE"" + +## genotype: chr +chr <- 19 +gds.path <- agds_dir +genofile <- seqOpen(gds.path) + +############ genotype +## Enhancer +varid <- seqGetData(genofile, ""variant.id"") + +# Now extract the GeneHancer with rOCRs Signal Overlay +genehancerAnno <- seqGetData(genofile, paste0(Annotation_dir,Annotation_name_catalog$dir[which(Annotation_name_catalog$name==""GeneHancer"")])) +genehancer <- genehancerAnno!="""" + +rOCRsAnno <- seqGetData(genofile, paste0(Annotation_dir,Annotation_name_catalog$dir[which(Annotation_name_catalog$name==""DHS"")])) +rOCRs <- rOCRsAnno!="""" +rOCRsGeneHancervt <- rOCRsAnno!=""""&genehancerAnno!="""" +rOCRsGeneHanceridx <- which(rOCRsGeneHancervt,useNames=TRUE) +seqSetFilter(genofile,variant.id=varid[rOCRsGeneHanceridx]) + +# Variants that covered by whole GeneHancer without rOCRs overlap +genehancerSet <- seqGetData(genofile, paste0(Annotation_dir,Annotation_name_catalog$dir[which(Annotation_name_catalog$name==""GeneHancer"")])) +enhancerGene <- unlist(lapply(strsplit(genehancerSet,""=""),`[[`,4)) +enhancer2GENE <- unlist(lapply(strsplit(enhancerGene,"";""),`[[`,1)) +enhancervchr <- as.numeric(seqGetData(genofile,""chromosome"")) +enhancervpos <- as.numeric(seqGetData(genofile,""position"")) +enhancervref <- as.character(seqGetData(genofile,""$ref"")) +enhancervalt <- as.character(seqGetData(genofile,""$alt"")) +dfHancerrOCRsVarGene <- data.frame(enhancervchr,enhancervpos,enhancervref,enhancervalt,enhancer2GENE) + +rm(varid) +gc() + +## get SNV id +filter <- seqGetData(genofile, QC_label) +if(variant_type==""variant"") +{ + SNVlist <- filter == ""PASS"" +} + +if(variant_type==""SNV"") +{ + SNVlist <- (filter == ""PASS"") & isSNV(genofile) +} + +if(variant_type==""Indel"") +{ + SNVlist <- (filter == ""PASS"") & (!isSNV(genofile)) +} + +variant.id <- seqGetData(genofile, ""variant.id"") +variant.id.SNV <- variant.id[SNVlist] + +dfHancerrOCRsVarGene.SNV <- dfHancerrOCRsVarGene[SNVlist,] +dfHancerrOCRsVarGene.SNV$enhancervpos <- as.character(dfHancerrOCRsVarGene.SNV$enhancervpos) +dfHancerrOCRsVarGene.SNV$enhancervref <- as.character(dfHancerrOCRsVarGene.SNV$enhancervref) +dfHancerrOCRsVarGene.SNV$enhancervalt <- as.character(dfHancerrOCRsVarGene.SNV$enhancervalt) + +seqResetFilter(genofile) + +rm(dfHancerrOCRsVarGene) +gc() + +### Gene +is.in <- which(dfHancerrOCRsVarGene.SNV[,5]==gene_name) +variant.is.in <- variant.id.SNV[is.in] + +seqSetFilter(genofile,variant.id=variant.is.in,sample.id=phenotype.id) + +## genotype id +id.genotype <- seqGetData(genofile,""sample.id"") +# id.genotype.match <- rep(0,length(id.genotype)) + +id.genotype.merge <- data.frame(id.genotype,index=seq(1,length(id.genotype))) +phenotype.id.merge <- data.frame(phenotype.id) +phenotype.id.merge <- dplyr::left_join(phenotype.id.merge,id.genotype.merge,by=c(""phenotype.id""=""id.genotype"")) +id.genotype.match <- phenotype.id.merge$index + +## Genotype +Geno <- seqGetData(genofile, ""$dosage"") +Geno <- Geno[id.genotype.match,,drop=FALSE] + +## impute missing +if(!is.null(dim(Geno))) +{ + if(dim(Geno)[2]>0) + { + if(geno_missing_imputation==""mean"") + { + Geno <- matrix_flip_mean(Geno)$Geno + } + if(geno_missing_imputation==""minor"") + { + Geno <- matrix_flip_minor(Geno)$Geno + } + } +} + +Geno_APOE <- Geno +AF_APOE <- apply(Geno_APOE,2,mean)/2 +MAF_APOE <- pmin(AF_APOE,1-AF_APOE) + +Geno_APOE <- Geno_APOE[,MAF_APOE<0.01] +MAF_APOE <- MAF_APOE[MAF_APOE<0.01] + + +############ APOE effect +c0 <- 0.22 +beta_APOE <- -c0 * log10(MAF_APOE) + +set.seed(6) +# Generate two covariates +beta_dir_APOE <- 2*(runif(length(beta_APOE),0,1)<0.5) - 1 +beta_APOE <- beta_APOE*beta_dir_APOE + +APOE_Effect <- Geno_APOE %*% beta_APOE + +seqClose(genofile) + + +######################################################## +# Simulated Phenotype +######################################################## + +alpha0 <- 0 +alpha1 <- 0.5 + +set.seed(666) +# Generate two covariates +N <- length(phenotype.id) +sex <- pheno_cov$gender==""female"" + +# Generate error distributions +eps <- rnorm(N) + +Y <- alpha0 + alpha1 * sex + LDLR_Effect + APOE_Effect + eps + +pheno <- cbind(pheno_cov,Y) + +write.csv(pheno,file=""phenotype_LDLR_coding_APOE_noncoding.csv"",quote = FALSE,row.names = FALSE) +","R" +"Genesis","li-lab-genetics/IGES_2025_Education_Workshop","scripts/Tutorial_scripts_part1.3_METAL_CommonVariants.R",".R","1892","52","################################################################################ +# IGES 2025 Education Workshop: Navigating Biobank Data: +# Advanced Strategies for Genetic Epidemiology Research +# Session 4: Practical Tutorial – A Walkthrough of Cross-Biobank Analysis +# Part 4: Common Variant Meta-Analysis using Metal +# - Part 4.3: Creating summary statistics files to use with METAL +# - Date: August 2025 +################################################################################ + +rm(list=ls()) + +library(bigsnpr) +library(dplyr) + +gc() + +setwd(""/data/williamsjacr/IGES_2025_Education_Workshop/data/"") + +SumStats_Biobank1 <- read.csv(""SumStats_Biobank.csv"") + +SNPinfo <- read.csv(""SNPInfo_GrCH38.csv"") +SNPinfo <- data.frame(rsid = SNPinfo$rsid,chr = SNPinfo$chr, pos = SNPinfo$pos38) +SumStats_Biobank1 <- left_join(SumStats_Biobank1,SNPinfo, by = c(""RSID"" = ""rsid"")) + +SumStats_Biobank1 <- data.frame(SNP = SumStats_Biobank1$RSID, CHR = SumStats_Biobank1$chr, BP = SumStats_Biobank1$pos, + EA = SumStats_Biobank1$ALT,NEA = SumStats_Biobank1$REF, BETA = SumStats_Biobank1$BETA, + SE = SumStats_Biobank1$SE, P = SumStats_Biobank1$PVAL, N = SumStats_Biobank1$N) + +SumStats_Biobank2 <- SumStats_Biobank1[sample(1:nrow(SumStats_Biobank1),round(0.8*nrow(SumStats_Biobank1))),] + +write.table(SumStats_Biobank1,file = ""SumStats_Biobank1.txt"",sep = ""\t"",quote = FALSE,row.names = FALSE) +write.table(SumStats_Biobank2,file = ""SumStats_Biobank2.txt"",sep = ""\t"",quote = FALSE,row.names = FALSE) + +metal_script <- c( + ""SCHEME STDERR"", + ""MARKER SNP"", + ""ALLELE EA NEA"", + ""EFFECT BETA"", + ""STDERR SE"", + ""PVAL P"", + ""WEIGHT N"", + """", + ""PROCESS SumStats_Biobank1.txt"", + ""PROCESS SumStats_Biobank2.txt"", + """", + ""OUTFILE meta_results_ .tbl"", + ""ANALYZE"" +) + +writeLines(metal_script, ""metal_script.txt"") + +# system(""./metal metal_script.txt"")","R" +"Genesis","li-lab-genetics/IGES_2025_Education_Workshop","scripts/Tutorial_scripts_part2.3_Examples_STAARpipeline.R",".R","7488","161","################################################################################ +# IGES 2025 Education Workshop: Navigating Biobank Data: +# Advanced Strategies for Genetic Epidemiology Research +# Session 4: Practical Tutorial – A Walkthrough of Cross-Biobank Analysis +# Part 2: Rare variant analysis of sequencing data using STAARpipeline +# - Part 2.3: WGS rare variant association analysis using STAARpipeline +# - More tutorial: https://github.com/xihaoli/STAARpipeline-tutorial +# - Date: August 2025 +################################################################################ + +rm(list=ls()) +gc() + +### Setup repository +setwd(""/data/williamsjacr/IGES_2025_Education_Workshop/data/"") + +### load required package +library(gdsfmt) +library(SeqArray) +library(SeqVarTools) +library(STAAR) +library(STAARpipeline) +library(STAARpipelineSummary) + +## agds file +agds_dir <- ""1kGP_high_coverage_Illumina.chr19.filtered.SNV_INDEL_SV_phased_panel_annotated.gds"" +## Annotation_dir +Annotation_dir <- ""annotation/info/FunctionalAnnotation"" +## Annotation channel +Annotation_name_catalog <- read.csv(url(""https://raw.githubusercontent.com/xihaoli/STAARpipeline-Tutorial/refs/heads/main/FAVORannotator_csv/Annotation_name_catalog.csv"")) + +## channel name of the QC label in the GDS/aGDS file +QC_label <- ""annotation/info/QC_label"" +## variant type +variant_type <- ""SNV"" +## geno_missing_imputation +geno_missing_imputation <- ""mean"" +## genes info +genes <- genes_info +## extract phenotype sample.id +pheno_cov <- read.table(""integrated_call_samples_v3.20130502.ALL.panel"",header=TRUE) +phenotype.id <- as.vector(pheno_cov$sample) +length(phenotype.id) +# [1] 2504 + +## Use_annotation_weights +Use_annotation_weights <- TRUE +## Annotation name (for variant weighting using the STAAR method) +Annotation_name <- c(""CADD"",""LINSIGHT"",""FATHMM.XF"",""aPC.EpigeneticActive"",""aPC.EpigeneticRepressed"",""aPC.EpigeneticTranscription"", + ""aPC.Conservation"",""aPC.LocalDiversity"",""aPC.Mappability"",""aPC.TF"",""aPC.Protein"") +## Annotation name (for variants info in summary) +Annotation_name_info <- c(""rs_num"",""GENCODE.Category"",""GENCODE.Info"",""GENCODE.EXONIC.Category"", + ""MetaSVM"",""GeneHancer"",""CAGE"",""DHS"",""CADD"",""LINSIGHT"",""FATHMM.XF"", + ""aPC.EpigeneticActive"",""aPC.EpigeneticRepressed"",""aPC.EpigeneticTranscription"", + ""aPC.Conservation"",""aPC.LocalDiversity"",""aPC.Mappability"",""aPC.TF"",""aPC.Protein"") + +## known loci: rs7412 (APOE), rs429358 (APOE), rs35136575 (APOE), rs12151108 (LDLR), rs688 (LDLR), rs6511720 (LDLR) +known_loci <- read.csv(""known_loci_info.csv"") +known_loci + +########################################## +# load phenotype and ancestry PCs +########################################## +### phenotype +pheno <- read.csv(""phenotype_LDLR_coding_APOE_noncoding.csv"") +### PCs +PCs <- read.csv(""1000G_PCA.csv"") + +pheno <- dplyr::left_join(pheno,PCs[,2:12],by=c(""sample""=""id"")) + +########################################## +# fit null model +########################################## +obj_nullmodel <- fit_nullmodel(Y~gender+super_pop+PC1+PC2+PC3+PC4+PC5+PC6+PC7+PC8+PC9+PC10, + data = pheno, kins = NULL, id = ""sample"", + family = gaussian(link = ""identity""), verbose=T) + + +########################################## +# Gene-Centric Coding: LDLR +########################################## + +### run coding mask of LDLR +gene_name <- ""LDLR"" + +## genotype: chr +chr <- 19 +gds.path <- agds_dir +genofile <- seqOpen(gds.path) + +results_coding <- Gene_Centric_Coding(chr=chr,gene_name=gene_name,genofile=genofile,obj_nullmodel=obj_nullmodel, + rare_maf_cutoff=0.01,rv_num_cutoff=2, + QC_label=QC_label,variant_type=variant_type,geno_missing_imputation=geno_missing_imputation, + Annotation_dir=Annotation_dir,Annotation_name_catalog=Annotation_name_catalog, + Use_annotation_weights=Use_annotation_weights,Annotation_name=Annotation_name) + +results_coding + +## Conditional Analysis +category <- ""plof_ds"" +results_coding_cond <- Gene_Centric_Coding_cond(chr=chr,gene_name=gene_name,genofile=genofile,obj_nullmodel=obj_nullmodel,category=category, + known_loci=known_loci,rare_maf_cutoff=0.01,rv_num_cutoff=2, + QC_label=QC_label,variant_type=variant_type,geno_missing_imputation=geno_missing_imputation, + Annotation_dir=Annotation_dir,Annotation_name_catalog=Annotation_name_catalog, + Use_annotation_weights=Use_annotation_weights,Annotation_name=Annotation_name) + +results_coding_cond + +## variants info for summary +results_coding_info <- Gene_Centric_Coding_Info(category=category,chr=chr,genofile=genofile,obj_nullmodel=obj_nullmodel,gene_name=gene_name,known_loci=known_loci, + QC_label=QC_label,variant_type=variant_type,geno_missing_imputation=geno_missing_imputation, + Annotation_dir=Annotation_dir,Annotation_name_catalog=Annotation_name_catalog,Annotation_name=Annotation_name_info) + +dim(results_coding_info) +# [1] 17 33 +head(results_coding_info) + +seqClose(genofile) + +########################################## +# Gene-Centric Noncoding: APOE +########################################## + +### run noncoding mask of APOE +gene_name <- ""APOE"" + +## genotype: chr +chr <- 19 +gds.path <- agds_dir +genofile <- seqOpen(gds.path) + +results_noncoding <- Gene_Centric_Noncoding(chr=chr,gene_name=gene_name,genofile=genofile,obj_nullmodel=obj_nullmodel, + rare_maf_cutoff=0.01,rv_num_cutoff=2, + QC_label=QC_label,variant_type=variant_type,geno_missing_imputation=geno_missing_imputation, + Annotation_dir=Annotation_dir,Annotation_name_catalog=Annotation_name_catalog, + Use_annotation_weights=Use_annotation_weights,Annotation_name=Annotation_name) + +results_noncoding + +## Conditional Analysis +category <- ""enhancer_DHS"" +results_noncoding_cond <- Gene_Centric_Noncoding_cond(chr=chr,gene_name=gene_name,genofile=genofile,obj_nullmodel=obj_nullmodel,category=category, + known_loci=known_loci,rare_maf_cutoff=0.01,rv_num_cutoff=2, + QC_label=QC_label,variant_type=variant_type,geno_missing_imputation=geno_missing_imputation, + Annotation_dir=Annotation_dir,Annotation_name_catalog=Annotation_name_catalog, + Use_annotation_weights=Use_annotation_weights,Annotation_name=Annotation_name) + +results_noncoding_cond + +## variants info for summary +results_noncoding_info <- Gene_Centric_Noncoding_Info(category=category,chr=chr,genofile=genofile,obj_nullmodel=obj_nullmodel,gene_name=gene_name,known_loci=known_loci, + QC_label=QC_label,variant_type=variant_type,geno_missing_imputation=geno_missing_imputation, + Annotation_dir=Annotation_dir,Annotation_name_catalog=Annotation_name_catalog,Annotation_name=Annotation_name_info) + +dim(results_noncoding_info) +# [1] 50 33 +head(results_noncoding_info) + +seqClose(genofile) + +","R" +"Genesis","li-lab-genetics/IGES_2025_Education_Workshop","scripts/Tutorial_scripts_part1.2_CommonVariants_PRS.R",".R","3396","76","################################################################################ +# IGES 2025 Education Workshop: Navigating Biobank Data: +# Advanced Strategies for Genetic Epidemiology Research +# Session 4: Practical Tutorial – A Walkthrough of Cross-Biobank Analysis +# Part 4: Common Variant PRS using different summary statistics +# - Part 4.2: Flipping summary statistics and testing if it worked +# - Date: August 2025 +################################################################################ + +rm(list=ls()) + +library(bigsnpr) +library(dplyr) + +gc() + +setwd(""/data/williamsjacr/IGES_2025_Education_Workshop/data/"") + +## create a rds file and bk file with bigsnpr +if(file.exists(""chr19.rds"")){ + file.remove(""chr19.rds"") + file.remove(""chr19.bk"") +} +snp_readBed(""chr19.bed"") + +SumStats_CrossBiobank <- read.csv(""SumStats_Biobank.csv"") + +Phenotype_CommonVariant <- read.csv(""Phenotype_CommonVariant.csv"") + +obj.bigSNP <- snp_attach(""chr19.rds"") +G <- obj.bigSNP$genotypes # genotype matrix + +## Fails as the files are not the same size +## In plink this would fail as the ids are not the same +# Test <- big_prodVec(G, SumStats_CrossBiobank$beta) + +## Use the bim file to generate a NULL summary statistics file +chr19_bim <- read.delim(""chr19.bim"", header=FALSE) +sumstats <- data.frame(SNP = chr19_bim[,2],pos = chr19_bim[,4], ref = chr19_bim[,5], alt = chr19_bim[,6], beta = 0) + +## need to join the biobanks summary statistics with our null summary file + +## first add chr and pos +SNPinfo <- read.csv(""SNPInfo_GrCH38.csv"") +SNPinfo <- data.frame(rsid = SNPinfo$rsid,chr = SNPinfo$chr, pos = SNPinfo$pos38) + +SumStats_CrossBiobank <- left_join(SumStats_CrossBiobank,SNPinfo, by = c(""RSID"" = ""rsid"")) + +## Next create a unique id to merge the null file and the biobank sumstats +sumstats$unique_id <- paste0(19,""_"",sumstats$pos,""_"",sumstats$ref,""_"",sumstats$alt) +SumStats_CrossBiobank$unique_id1 <- paste0(19,""_"",SumStats_CrossBiobank$pos,""_"",SumStats_CrossBiobank$REF,""_"",SumStats_CrossBiobank$ALT) +SumStats_CrossBiobank$unique_id2 <- paste0(19,""_"",SumStats_CrossBiobank$pos,""_"",SumStats_CrossBiobank$ALT,""_"",SumStats_CrossBiobank$REF) + +## Merge by the unique CHR_POS_REF_ALT ID to add the estimated coefficients to our NULL file +sumstats <- left_join(sumstats,SumStats_CrossBiobank[,c(""unique_id1"",""BETA"")],by = c(""unique_id"" = ""unique_id1"")) +sumstats$beta[!is.na(sumstats$BETA)] <- sumstats$BETA[!is.na(sumstats$BETA)] +sumstats <- subset(sumstats,select = -c(BETA)) +sumstats <- left_join(sumstats,SumStats_CrossBiobank[,c(""unique_id2"",""BETA"")],by = c(""unique_id"" = ""unique_id2"")) +sumstats$beta[!is.na(sumstats$BETA)] <- sumstats$BETA[!is.na(sumstats$BETA)] +sumstats <- subset(sumstats,select = -c(BETA)) + +## Calculate the PRS +Test <- big_prodVec(G, sumstats$beta) +all.equal(Test,Phenotype_CommonVariant$Y) +cor(Test,Phenotype_CommonVariant$Y) + +## However, the PRS above did not account for the mismatched alleles, if the reference allele is different (target data vs training data) we also have to flip beta's +sumstats$beta[sumstats$unique_id %in% SumStats_CrossBiobank$unique_id2] <- (-1)*sumstats$beta[sumstats$unique_id %in% SumStats_CrossBiobank$unique_id2] + +## This improves are accuracy with our response +Test <- big_prodVec(G, sumstats$beta) +all.equal(Test,Phenotype_CommonVariant$Y) +cor(Test,Phenotype_CommonVariant$Y) + +write.csv(sumstats,file = ""SumStats_Aligned.csv"", row.names = FALSE) +","R" +"Genesis","li-lab-genetics/IGES_2025_Education_Workshop","scripts/Tutorial_scripts_part3.2_Rare_Variant_Meta_Analysis.R",".R","5168","121","################################################################################ +# IGES 2025 Education Workshop: Navigating Biobank Data: +# Advanced Strategies for Genetic Epidemiology Research +# Session 4: Practical Tutorial – A Walkthrough of Cross-Biobank Analysis +# Part 3: Meta-analysis of sequencing data using MetaSTAARlite +# - Part 3.2: Rare variant meta-analysis using MetaSTAARlite +# - More tutorial: https://github.com/li-lab-genetics/MetaSTAARlite-tutorial +# - Date: August 2025 +################################################################################ + +rm(list=ls()) +gc() + +### Setup repository +setwd(""/data/williamsjacr/IGES_2025_Education_Workshop/data/"") + +### load required package +library(gdsfmt) +library(SeqArray) +library(SeqVarTools) +library(STAAR) +library(STAARpipeline) +library(MetaSTAAR) +library(MetaSTAARlite) +library(Matrix) + +##################################################### +# User Input +##################################################### +## Sample sizes of participating studies +sample.sizes <- c(661,347,504,503,489) + +## variant_type +variant_type <- ""SNV"" +## cov_maf_cutoff +cov_maf_cutoff <- c(0.05,0.05,0.05,0.05,0.05) + +## Use_annotation_weights +Use_annotation_weights <- TRUE +## Annotation name (for variant weighting using the MetaSTAAR method) +Annotation_name <- c(""CADD"",""LINSIGHT"",""FATHMM.XF"",""aPC.EpigeneticActive"",""aPC.EpigeneticRepressed"",""aPC.EpigeneticTranscription"", + ""aPC.Conservation"",""aPC.LocalDiversity"",""aPC.Mappability"",""aPC.TF"",""aPC.Protein"") + + +##################################################### +# Rare variant meta-analysis using MetaSTAARlite +# Gene-Centric Coding: LDLR +##################################################### +chr <- 19 + +### run coding mask of LDLR +gene_name <- ""LDLR"" + +## Directories of the study-specific summary statistics file folders +file.dir <- c("""","""","""","""","""") +file.prefix <- c(""AFR_LDLR_coding"", + ""AMR_LDLR_coding"", + ""EAS_LDLR_coding"", + ""EUR_LDLR_coding"", + ""SAS_LDLR_coding"") + +sumstat.file.path <- paste0(file.dir,file.prefix,""_sumstat.Rdata"") +cov.file.path <- paste0(file.dir,file.prefix,""_cov.Rdata"") +coding_sumstat_list <- sapply(sumstat.file.path, function(x) mget(load(x)), simplify = TRUE) +coding_cov_list <- sapply(cov.file.path, function(x) mget(load(x)), simplify = TRUE) + +genes <- genes_info + +coding_sumstat_gene_list <- lapply(sumstat.file.path, function(x) { + coding_sumstat_list[[paste0(x,"".coding_sumstat"")]][[gene_name]] +}) +coding_cov_gene_list <- lapply(cov.file.path, function(x) { + coding_cov_list[[paste0(x,"".coding_cov"")]][[gene_name]] +}) +results_coding_meta <- coding_MetaSTAARlite(chr=chr,gene_name=gene_name,genes=genes, + sample.sizes=sample.sizes,coding_sumstat_gene_list=coding_sumstat_gene_list, + coding_cov_gene_list=coding_cov_gene_list, + cov_maf_cutoff=cov_maf_cutoff, + rare_maf_cutoff=0.01,rv_num_cutoff=2, + variant_type=variant_type, + Use_annotation_weights=Use_annotation_weights,Annotation_name=Annotation_name) +results_coding_meta + +##################################################### +# Rare variant meta-analysis using MetaSTAARlite +# Gene-Centric Noncoding: APOE +##################################################### +chr <- 19 + +### run noncoding mask of APOE +gene_name <- ""APOE"" + +## Directories of the study-specific summary statistics file folders +file.dir <- c("""","""","""","""","""") +file.prefix <- c(""AFR_APOE_noncoding"", + ""AMR_APOE_noncoding"", + ""EAS_APOE_noncoding"", + ""EUR_APOE_noncoding"", + ""SAS_APOE_noncoding"") + +sumstat.file.path <- paste0(file.dir,file.prefix,""_sumstat.Rdata"") +cov.file.path <- paste0(file.dir,file.prefix,""_cov.Rdata"") +noncoding_sumstat_list <- sapply(sumstat.file.path, function(x) mget(load(x)), simplify = TRUE) +noncoding_cov_list <- sapply(cov.file.path, function(x) mget(load(x)), simplify = TRUE) + +noncoding_sumstat_gene_list <- lapply(sumstat.file.path, function(x) { + noncoding_sumstat_list[[paste0(x,"".noncoding_sumstat"")]][[gene_name]] +}) +noncoding_cov_gene_list <- lapply(cov.file.path, function(x) { + noncoding_cov_list[[paste0(x,"".noncoding_cov"")]][[gene_name]] +}) +results_noncoding_meta <- noncoding_MetaSTAARlite(chr=chr,gene_name=gene_name, + sample.sizes=sample.sizes,noncoding_sumstat_gene_list=noncoding_sumstat_gene_list, + noncoding_cov_gene_list=noncoding_cov_gene_list, + cov_maf_cutoff=cov_maf_cutoff, + rare_maf_cutoff=0.01,rv_num_cutoff=2, + variant_type=variant_type, + Use_annotation_weights=Use_annotation_weights,Annotation_name=Annotation_name) +results_noncoding_meta + +","R" +"Genesis","li-lab-genetics/IGES_2025_Education_Workshop","scripts/Tutorial_scripts_part2.1_1000G_WGS_aGDS_Preparation.R",".R","3916","96","################################################################################ +# IGES 2025 Education Workshop: Navigating Biobank Data: +# Advanced Strategies for Genetic Epidemiology Research +# Session 4: Practical Tutorial – A Walkthrough of Cross-Biobank Analysis +# Part 2: Rare variant analysis of sequencing data using STAARpipeline +# - Part 2.1: Preparing 1000 Genome WGS annotated GDS files +# - Date: August 2025 +################################################################################ + +### Setup repository +setwd(""/data/williamsjacr/IGES_2025_Education_Workshop/data/"") +getOption('timeout') +options(timeout=200) + +### Download 1000 Genomes Project (1kGP) high-coverage Illumina integrated phased panel +# ETA: 1 minute +download.file(url = ""https://ftp.1000genomes.ebi.ac.uk/vol1/ftp/data_collections/1000G_2504_high_coverage/working/20220422_3202_phased_SNV_INDEL_SV/1kGP_high_coverage_Illumina.chr19.filtered.SNV_INDEL_SV_phased_panel.vcf.gz"", + destfile = ""1kGP_high_coverage_Illumina.chr19.filtered.SNV_INDEL_SV_phased_panel.vcf.gz"") + +### Convert 1000G WGS Data VCF files to Genomic Data Structure (GDS) files +library(gdsfmt) +library(SeqArray) + +# input_dir +vcf.fn <- ""1kGP_high_coverage_Illumina.chr19.filtered.SNV_INDEL_SV_phased_panel.vcf.gz"" + +# output_dir +out.fn <- ""1kGP_high_coverage_Illumina.chr19.filtered.SNV_INDEL_SV_phased_panel.gds"" + +# modify the header +h <- seqVCF_Header(vcf.fn) +# h$info +h$info$Number[h$info$ID==""SOURCE""] <- ""."" + +# ETA: 10 minutes +seqVCF2GDS(vcf.fn, out.fn, header = h, genotype.var.name = ""GT"", info.import=NULL, fmt.import=NULL, + ignore.chr.prefix=""chr"", raise.error=TRUE, verbose=TRUE) + +### Functionally annotate 1000G WGS Data GDS files to annotated GDS (aGDS) files +### using FAVOR database (https://favor.genohub.org/) and FAVORannotator +### The following scripts are used for the favorannotator app in DNAnexus +### There are also offline scripts for favorannotator available for use: +### (https://github.com/xihaoli/STAARpipeline-Tutorial#generate-annotated-gds-agds-file-using-favorannotator) + +# Upload 1000G WGS GDS files to DNAnexus RAP Cloud (Drag the files) + +#------------------------------------------------------------------- +# The following are dx-toolkit command line scripts, not R scripts +# Install DNAnexus Platform SDK (MacOS) +tar -xzf dx-toolkit-0.398.0.tar.gz +source dx-toolkit-0.398.0/environment + +##### Usage +cd dx-toolkit-0.398.0/bin +#dx upgrade + +##### Log in +dx login +#dx ls (here we assume to have a project named ukbb_lilab) + +# Clone this github repo to some directory: +git clone https://github.com/li-lab-genetics/favorannotator-rap.git + +# Navigate to a relevant directory within the project directory on the DNAnexus platform +dx cd UKB_PRS:/ + +# Compile the source code: +dx build -f favorannotator-rap + +# Create a new folder (UKB_PRS/IGES_2025_Education_Workshop/1000G/aGDS) +# on DNAnexus under the project directory +# Run favorannotator on DNAnexus (ETA: 26 minutes) +dx run UKB_PRS:/favorannotator \ +-igds_file=UKB_PRS:/IGES_2025_Education_Workshop/1000G/GDS/1kGP_high_coverage_Illumina.chr19.filtered.SNV_INDEL_SV_phased_panel.gds \ +-ichromosome=19 \ +-iuse_compression=YES \ +-ioutfile=1kGP_high_coverage_Illumina.chr19.filtered.SNV_INDEL_SV_phased_panel_annotated \ +--destination=UKB_PRS:/IGES_2025_Education_Workshop/1000G/aGDS --yes +#------------------------------------------------------------------- + +### Adds QC_label with all ""PASS"" to a post-QC aGDS file (R scripts) +gds.path <- ""1kGP_high_coverage_Illumina.chr19.filtered.SNV_INDEL_SV_phased_panel_annotated.gds"" +genofile <- seqOpen(gds.path, readonly = FALSE) +#genofile +position <- as.integer(seqGetData(genofile, ""position"")) +length(position) +Anno.folder <- index.gdsn(genofile, ""annotation/info"") +add.gdsn(Anno.folder, ""QC_label"", val=factor(rep(""PASS"", length(position))), compress=""LZMA_ra"", closezip=TRUE) +#genofile +seqClose(genofile) + + + + + +","R" +"Genesis","noinil/genesis_cg_tool","tools/interaction_check/cg_model_topology_check.jl",".jl","4153","130","#!/usr/bin/env julia + +using Printf +using ArgParse + +include(""../../src/lib/constants.jl"") +include(""../../src/lib/biomath.jl"") +include(""../../src/lib/topology.jl"") +include(""../../src/lib/conformation.jl"") +include(""../../src/lib/parser_top.jl"") +include(""../../src/lib/parser_crd.jl"") + +function main(args) + + top_filename = get(args, ""top"", """") + crd_filename = get(args, ""crd"", """") + + # ================================ + # Read in topology and coordinates + # ================================ + + mytop = read_grotop(top_filename) + mycrd = read_grocrd(crd_filename) + + # =============== + # Structure check + # =============== + + # ----------- + # bond length + # ----------- + println(""===================================================================================================="") + for bond in mytop.top_bonds + if bond.r0 < 3.0 + @printf(""Short bond length (< 3.0Å): %10d %3s %10d %3s - %6.3fÅ \n"", + bond.i, mytop.top_atoms[bond.i].atom_type, + bond.j, mytop.top_atoms[bond.j].atom_type, + bond.r0) + end + if bond.r0 > 6.0 + @printf(""Long bond length (> 6.0Å): %10d %3s %10d %3s - %6.3fÅ \n"", + bond.i, mytop.top_atoms[bond.i].atom_type, + bond.j, mytop.top_atoms[bond.j].atom_type, + bond.r0) + end + + end + + # ----- + # angle + # ----- + println(""===================================================================================================="") + for angle in mytop.top_angles + if angle.function_type < 20 && ! ( 15.0 < angle.a0 < 165.0 ) + @printf(""Large angle (close to 0 or π): %10d %3s %10d %3s %10d %3s - %6.3f° \n"", + angle.i, mytop.top_atoms[angle.i].atom_type, + angle.j, mytop.top_atoms[angle.j].atom_type, + angle.k, mytop.top_atoms[angle.k].atom_type, + angle.a0) + elseif angle.function_type == 21 && ! (0.0129 < angle.a0 > 0.7471) + coor_i = mycrd.coors[:, angle.i] + coor_j = mycrd.coors[:, angle.j] + coor_k = mycrd.coors[:, angle.k] + ang = compute_angle(coor_i, coor_j, coor_k) + @printf(""Large angle (close to π): %10d %3s %10d %3s %10d %3s - %6.3f° \n"", + angle.i, mytop.top_atoms[angle.i].atom_type, + angle.j, mytop.top_atoms[angle.j].atom_type, + angle.k, mytop.top_atoms[angle.k].atom_type, + ang) + end + end + + # --------------- + # native contacts + # --------------- + println(""===================================================================================================="") + for contact in mytop.top_pairs + if contact.r0 < 4.0 + @printf(""Short contact (< 4.0Å): %10d %3s %10d %3s - %6.3fÅ \n"", + contact.i, mytop.top_atoms[contact.i].atom_type, + contact.j, mytop.top_atoms[contact.j].atom_type, + contact.r0) + end + if contact.r0 > 18.0 + @printf(""Long contact (> 18.0Å): %10d %3s %10d %3s - %6.3fÅ \n"", + contact.i, mytop.top_atoms[contact.i].atom_type, + contact.j, mytop.top_atoms[contact.j].atom_type, + contact.r0) + end + end + + println(""===================================================================================================="") +end + +# ============================= +# Parsing Commandline Arguments +# ============================= +function parse_commandline() + s = ArgParseSettings() + + @add_arg_table s begin + + ""--top"", ""-t"" + help = ""Topology file name (gromacs style)."" + required = true + arg_type = String + + ""--crd"", ""-c"" + help = ""Coordinate file name (gromacs style)."" + required = true + arg_type = String + + ""--debug"" + help = ""DEBUG."" + action = :store_true + end + + return parse_args(s) +end + + + +if abspath(PROGRAM_FILE) == @__FILE__ + + args = parse_commandline() + + main(args) + +end +","Julia" +"Genesis","noinil/genesis_cg_tool","tools/analysis/genesis_log.jl",".jl","7536","244","#!/usr/bin/env julia + +using Printf +using ArgParse + +function read_genesis_log(genesis_log_filename::AbstractString) + + # ---------------------------- + # read in lines from data file + # ---------------------------- + # + info_line = """" + data_lines = [] + for line in eachline(genesis_log_filename) + if startswith(line, ""INFO:"") + if length(info_line) > 0 + push!(data_lines, line) + else + keywords = split(line) + if !isdigit(keywords[1][1]) + info_line = line + end + end + end + end + + num_steps = length(data_lines) + + # ---------------- + # Extract keywords + # ---------------- + keywords = split(info_line)[2:end] + + num_keywords = length(keywords) + keyword_dict = Dict(keywords .=> [i for i = 1 : num_keywords]) + + # println(keyword_dict) + + # ------------------------ + # Extract time series data + # ------------------------ + data_matrix = zeros(Float64, (num_steps, num_keywords)) + + for ( i, line ) in enumerate( data_lines ) + words = split(line)[2:end] + new_vec = [parse(Float64, w) for w in words] + data_matrix[i, :] = new_vec + end + + return (keyword_dict, data_matrix) +end + +function read_genesis_remd_log(genesis_remd_log_filename::AbstractString) + + par_dim_1 = [] + i_step = [] + exchange_data = [] + param_data = [] + replica_data = [] + + # ---------------------------- + # read in lines from data file + # ---------------------------- + # + rep2par_map_lines = [] + par2rep_map_lines = [] + exchange_info_lines = [] + + is_setup_block = false + is_log_block = false + is_exchange_block = false + for line in eachline(genesis_remd_log_filename) + if startswith(line, ""Setup_Remd>"") + is_setup_block = true + end + if startswith(line, ""[STEP5]"") + is_setup_block = false + is_log_block = true + end + if is_setup_block + words = split(line) + if length(words) > 0 && words[1] == ""Dim"" + par_dim_1 = [parse(Float64, w) for w in words[4:end]] + end + end + if is_log_block + words = split(line) + if length(words) < 1 + continue + end + if words[1] == ""RepIDtoParmID:"" + push!(rep2par_map_lines, line) + elseif words[1] == ""ParmIDtoRepID:"" + push!(par2rep_map_lines, line) + elseif words[1] == ""REMD>"" && words[2] == ""Step:"" + push!(i_step, parse(Int, words[3])) + elseif words[1] == ""Replica"" && words[2] == ""ExchangeTrial"" + is_exchange_block = true + exchange_info_lines = [] + continue + elseif words[1] == ""Parameter"" + is_exchange_block = false + end + if is_exchange_block + push!(exchange_info_lines, line) + end + end + end + + # ----------------- + # basic information + # ----------------- + num_frames = length(rep2par_map_lines) + num_params = length(par_dim_1) + + # ================ + # acceptance ratio + # ================ + exchange_from = Vector{Int}(undef, 0) + exchange_to = Vector{Int}(undef, 0) + exchange_ratio = Vector{Float64}(undef, 0) + for line in exchange_info_lines + words = split(line) + e_from = parse(Int, words[2]) + e_to = parse(Int, words[4]) + e_ratio = parse(Float64, words[6]) / parse(Float64, words[8]) + if e_from < e_to + push!(exchange_from, e_from) + push!(exchange_to, e_to) + push!(exchange_ratio, e_ratio) + end + end + sorted_indx = sortperm(exchange_from) + push!(exchange_data, exchange_from[sorted_indx]) + push!(exchange_data, exchange_to[sorted_indx]) + push!(exchange_data, exchange_ratio[sorted_indx]) + + # =============================== + # specific param on different rep + # =============================== + param_data = zeros(Int, (num_frames, 1 + num_params)) + for ( i, line ) in enumerate( par2rep_map_lines ) + words = split(line)[2:end] + new_vec = [parse(Int, w) for w in words] + param_data[i, 1] = i_step[i] + param_data[i, 2:end] = new_vec[:] + end + + # ===================================== + # specific replica with different param + # ===================================== + replica_data = zeros(Int, (num_frames, 1 + num_params)) + for ( i, line ) in enumerate( rep2par_map_lines ) + words = split(line)[2:end] + new_vec = [parse(Int, w) for w in words] + replica_data[i, 1] = i_step[i] + replica_data[i, 2:end] = new_vec[:] + end + + return (par_dim_1, exchange_data, param_data, replica_data) + +end + +if abspath(PROGRAM_FILE) == @__FILE__ + using ArgParse + + function parse_commandline() + s = ArgParseSettings() + + @add_arg_table s begin + ""--remd"", ""-R"" + help = ""Analyze REMD results."" + action = :store_true + + ""--remd-main-log"" + help = ""File name of the master log of REMD."" + arg_type = String + default = """" + + ""--output"", ""-o"" + help = ""File name of the output."" + arg_type = String + default = """" + end + + return parse_args(s) + end + + args = parse_commandline() + + run_remd_analysis = get(args, ""remd"", false) + remd_main_log_fname = args[""remd-main-log""] + if length(args[""output""]) > 0 + remd_system_name = args[""output""] + else + remd_system_name = remd_main_log_fname[1:end-4] + end + + if run_remd_analysis + # read the master log file + (param_dim_1, exchange_data, param_data, replica_data) = read_genesis_remd_log(remd_main_log_fname) + + # -------------------- + # output exchange data + # -------------------- + remd_exchange_data_fname = remd_system_name * ""_exchange.dat"" + remd_exchange_of = open(remd_exchange_data_fname, ""w"") + for i in 1:length(exchange_data[1]) + @printf(remd_exchange_of, "" %3d <-> %3d : %3.1f %% \n"", exchange_data[1][i], exchange_data[2][i], exchange_data[3][i] * 100) + end + close(remd_exchange_of) + + # -------------------------------- + # output parameter on each replica + # -------------------------------- + remd_par_on_rep_fname = remd_system_name * ""_par_on_rep.dat"" + remd_par_on_rep_of = open(remd_par_on_rep_fname, ""w"") + for t in 1:size(replica_data)[1] + @printf(remd_par_on_rep_of, "" %16d "", replica_data[t, 1]) + for p in replica_data[t, 2:end] + @printf(remd_par_on_rep_of, "" %3d "", p) + end + @printf(remd_par_on_rep_of, "" \n"") + end + close(remd_par_on_rep_of) + + # --------------------------------- + # output replica for each parameter + # --------------------------------- + remd_rep_on_par_fname = remd_system_name * ""_rep_on_par.dat"" + remd_rep_on_par_of = open(remd_rep_on_par_fname, ""w"") + for t in 1:size(param_data)[1] + @printf(remd_rep_on_par_of, "" %16d "", param_data[t, 1]) + for p in param_data[t, 2:end] + @printf(remd_rep_on_par_of, "" %3d "", p) + end + @printf(remd_rep_on_par_of, "" \n"") + end + close(remd_rep_on_par_of) + + end + +end +","Julia" +"Genesis","noinil/genesis_cg_tool","tools/analysis/plot_remd_replica_ts_param_change.py",".py","2742","67","#!/usr/bin/env python3 + +import numpy as np +import matplotlib.pyplot as plt + +def main(remd_par_on_rep_fname, fig_all_in_one, rep_indices): + remd_rep_ts_data = np.loadtxt(remd_par_on_rep_fname) + + num_timesteps = remd_rep_ts_data[-1, 0] + num_replicas = np.shape(remd_rep_ts_data)[1] - 1 + + remd_timesteps = remd_rep_ts_data[:, 0] + remd_parameters = remd_rep_ts_data[:, 1:] + + if len(rep_indices) == 0: + rep_indices = [i for i in range(num_replicas)] + + # ================== + # axis tick settings + # ================== + ax_tk_dx = num_timesteps / 5 + ax_tk_power = int( np.log10(num_timesteps) ) + ax_tk_ticks = [i * ax_tk_dx for i in range(6)] + ax_tk_tick_lbs = [round( tk / 10**ax_tk_power, 3 ) for tk in ax_tk_ticks] + + # ========= + # Plotting! + # ========= + if fig_all_in_one: + fig, ax = plt.subplots(1, 1, figsize=(9, 5), constrained_layout=True) + for i, j in enumerate( rep_indices ): + ax.plot(remd_timesteps, remd_parameters[:, j], c=[i / len(rep_indices), 0, 1 - i / len(rep_indices)]) + ax.set_xticks(ax_tk_ticks) + ax.set_xticklabels(ax_tk_tick_lbs, fontsize=12) + ax.set_xlim(0, num_timesteps) + ax.set_xlabel(r""MD steps ($10^{0}$)"".format(ax_tk_power), fontsize=16) + ax.set_ylim(1, num_replicas) + + figname = remd_par_on_rep_fname[:-4] + ""_all_in_one.svg"" + plt.savefig(figname) + else: + fig, axes = plt.subplots(len(rep_indices), 1, figsize=(9, 2 * len(rep_indices)**0.5), constrained_layout=True, sharex=True, sharey=False) + for i, j in enumerate( rep_indices ): + axes[i].plot(remd_timesteps, remd_parameters[:, j], c=[i / len(rep_indices), 0, 1 - i / len(rep_indices)]) + axes[i].set_xticks(ax_tk_ticks) + axes[i].set_xlim(0, num_timesteps) + axes[i].set_ylim(1, num_replicas) + axes[-1].set_xticklabels(ax_tk_tick_lbs, fontsize=12) + axes[-1].set_xlabel(r""MD steps ($10^{0}$)"".format(ax_tk_power), fontsize=16) + + figname = remd_par_on_rep_fname[:-4] + ""_share_x.svg"" + plt.savefig(figname) + +if __name__ == '__main__': + import argparse + + def parse_arguments(): + parser = argparse.ArgumentParser(description='Plot replica/parameter walk in param/rep space.') + parser.add_argument('filename', type=str, help=""file name of remd log"") + parser.add_argument('-A', '--all-in-one', help=""plot the all-in-one figure or not. (Default: no)"", action='store_true') + parser.add_argument('-n', '--rep-ID', metavar='N', type=int, nargs='+', help='indices of replicas to plot.', default=[]) + return parser.parse_args() + + args = parse_arguments() + + main(args.filename, args.all_in_one, args.rep_ID) +","Python" +"Genesis","noinil/genesis_cg_tool","tools/modeling/rna_artifact/cg_rna_structure_builder_HPS.jl",".jl","6349","228","#!/usr/bin/env julia + +using Random +using Printf +using ArgParse + +include(""../../../src/lib/biomath.jl"") +include(""../../../src/lib/molecule.jl"") +include(""../../../src/lib/topology.jl"") +include(""../../../src/lib/constants.jl"") +include(""../../../src/lib/selection.jl"") +include(""../../../src/lib/coarse_graining_subroutines.jl"") +include(""../../../src/lib/conformation.jl"") +include(""../../../src/lib/coarse_graining.jl"") +include(""../../../src/lib/parsers.jl"") + +function parse_commandline() + s = ArgParseSettings() + + @add_arg_table s begin + + ""--sequence"", ""-s"" + help = ""RNA sequence file."" + arg_type = String + default = """" + + ""--length"" + help = ""Number of nucleotides in the RNA with random-sequence."" + arg_type = Int + default = 100 + + ""--straightness"" + help = ""Angle threshold to create a (non)-straight chain."" + arg_type = Float64 + default = 45.0 + + end + + return parse_args(s) +end + + +function make_cg_RNA_structure(args) + + seq_name = get(args, ""sequence"", """") + + # non-straightness (because ideal straight chain could have problem...) + threshold_angle = get( args, ""straightness"", 45.0) + + println(""============================================================"") + + AA_FULLNAME_DICT = Dict( + 'A' => ""RA"", + 'C' => ""RC"", + 'G' => ""RG"", + 'U' => ""RU"" + ) + MASS_DICT = Dict( + 'A' => 329.200, + 'C' => 305.200, + 'G' => 345.200, + 'U' => 306.200 + ) + + # ======================================================== + # RNA sequence (read from file or generate random one) + # ======================================================== + if length(seq_name) > 0 + println(""> Open sequence file:"", seq_name) + + mol_name = split(basename( seq_name ), '.')[1] * ""_cg"" + + RNA_seqence = """" + num_chain = 0 + for line in eachline(seq_name) + if length(line) == 0 + continue + end + if line[1] == '>' + num_chain += 1 + continue + end + if num_chain > 1 + error(""Only support single-chain RNA!"") + end + seq = strip(line) + if length(seq) == 0 + continue + end + for b in seq + if ! haskey(AA_FULLNAME_DICT, b) + error(""Wrong RNA sequence!"") + end + end + RNA_seqence *= seq + end + RNA_length = length(RNA_seqence) + else + println(""> Generating random RNA sequence:"") + + mol_name = ""random_RNA_cg"" + + RNA_length = args[""length""] + RNA_seqence = randstring(""ACGU"", RNA_length) + end + println(""> RNA sequence: ( Length: $RNA_length ) "") + println(""> "", RNA_seqence) + + # =================== + # Preparing RNA chain + # =================== + atom_types = Vector{String}(undef, RNA_length) + atom_names = Vector{String}(undef, RNA_length) + resi_names = Vector{String}(undef, RNA_length) + atom_masss = zeros(RNA_length) + atom_coors = zeros(3, RNA_length) + + theta = rand() * threshold_angle + for i in 1 : RNA_length + short_name = RNA_seqence[i] + residue_name = AA_FULLNAME_DICT[short_name] + a_mass = MASS_DICT[short_name] + + atom_types[i] = residue_name + atom_names[i] = ""RP"" + resi_names[i] = residue_name + atom_masss[i] = a_mass + + if i == 1 + atom_coors[:, i] = [0, 0, 0] + else + if mod(i, 2) == 0 + theta = rand() * threshold_angle + phi = rand() * 360 + atom_coors[:, i] = atom_coors[:, i - 1] + [sind(theta) * cosd(phi), sind(theta) * sind(phi), cosd(theta)] * 5.0 + else + atom_coors[:, i] = atom_coors[:, i - 2] + [0, 0, cosd(theta)] * 5.0 * 2 + end + end + end + + # =============== + # Output topology + # =============== + # --- + # top + # --- + top_fname = mol_name * "".top"" + top_file = open(top_fname, ""w"") + + @printf(top_file, ""#include \""./param/atom_types.itp\"" \n"") + @printf(top_file, ""#include \""./param/flexible_local_angle.itp\"" \n"") + @printf(top_file, ""#include \""./param/flexible_local_dihedral.itp\"" \n"") + @printf(top_file, ""#include \""./param/pair_energy_MJ_96.itp\"" \n\n"") + @printf(top_file, ""#include \""./itp/%s.itp\"" \n\n"", mol_name) + + @printf(top_file, ""[ system ] \n"") + @printf(top_file, ""%s \n\n"", mol_name) + + @printf(top_file, ""[ molecules ] \n"") + @printf(top_file, ""%s 1 \n\n"", mol_name) + + @printf(top_file, ""; [ cg_ele_chain_pairs ]\n"") + @printf(top_file, ""; ON 1 - 2 : 3 - 4\n"") + + close(top_file) + + # --- + # itp + # --- + itp_fname = mol_name * "".itp"" + itp_file = open(itp_fname, ""w"") + + @printf(itp_file, ""[ moleculetype ]\n"") + @printf(itp_file, ""%s 3 \n"", mol_name) + + @printf(itp_file, ""\n[ atoms ]\n"") + @printf(itp_file, ""; +INFO+ CHAIN: 1 SEGNAME: RAND_RNA \n"") + for i in 1 : RNA_length + @printf(itp_file, ""%10d%5s%10d%5s%5s%5d %8.3f %8.3f\n"", + i, atom_types[i], i, resi_names[i], atom_names[i], 1, -1.000, atom_masss[i]) + end + + @printf(itp_file, ""\n[ bonds ]\n"") + for i in 1 : RNA_length - 1 + @printf(itp_file, ""%10d%10d%5d%18.4E%18.4E\n"", + i, i + 1, 1, 0.5, 2000.0) + end + + @printf(itp_file, ""\n[ cg_IDR_HPS_region ]\n"") + @printf(itp_file, ""%10d %10d\n"", 1, RNA_length) + + close(itp_file) + + # ================== + # Output coordinates + # ================== + crd_fname = mol_name * "".gro"" + crd_file = open(crd_fname, ""w"") + + @printf(crd_file, ""CG model for %s, t = 0.000 \n"", mol_name) + @printf(crd_file, "" %10d \n"", RNA_length) + + for i in 1 : RNA_length + @printf(crd_file, ""%5d%5s%5s%5d %8.4f %8.4f %8.4f %8.4f %8.4f %8.4f \n"", + i, resi_names[i], atom_names[i], i, + atom_coors[1, i] * 0.1, + atom_coors[2, i] * 0.1, + atom_coors[3, i] * 0.1, + 0.0, 0.0, 0.0) + end + @printf(crd_file, ""%15.4f%15.4f%15.4f \n\n"", 0.0, 0.0, 0.0) + + close(crd_file) + + return 0 +end + + +function main() + args = parse_commandline() + make_cg_RNA_structure(args) +end + +if abspath(PROGRAM_FILE) == @__FILE__ + main() +end +","Julia" +"Genesis","noinil/genesis_cg_tool","tools/modeling/duplication_modeling/duplication_generator.jl",".jl","5493","188","#!/usr/bin/env julia + +using Printf +using ArgParse +using Random + +include(""../../../src/lib/biomath.jl"") +include(""../../../src/lib/constants.jl"") +include(""../../../src/lib/topology.jl"") +include(""../../../src/lib/conformation.jl"") +include(""../../../src/lib/parser_crd.jl"") +include(""../../../src/lib/parser_top.jl"") + +function main(args) + + top_filename = get(args, ""top"", """") + crd_filename = get(args, ""crd"", """") + mol_name = get(args, ""output"", ""new_mol"") + + NUM_X = get(args, ""nx"", 1) + NUM_Y = get(args, ""ny"", 1) + NUM_Z = get(args, ""nz"", 1) + + PAD_X = get(args, ""xpadding"", 5.0) + PAD_Y = get(args, ""ypadding"", 5.0) + PAD_Z = get(args, ""zpadding"", 5.0) + + DENSE = get(args, ""density"", 1.0) + + ROT = get(args, ""random-rotation"", false) + + mol_top = read_grotop(top_filename) + mol_crd = read_grocrd(crd_filename) + + num_copies = Int( floor( NUM_X * NUM_Y * NUM_Z * DENSE ) ) + total_num_particles = mol_top.num_atom * num_copies + + REAL_INDICES = randperm(NUM_X * NUM_Y * NUM_Z)[1:num_copies] + + println(""System name:"", mol_top.system_name) + println(""Number of particles in top:"", mol_top.num_atom) + + # ============================== + # move single molecule to origin + # ============================== + mol_center = centroid(mol_crd.coors) + mol_orig_coors = mol_crd.coors .- mol_center + + # ================== + # make copies of mol + # ================== + max_coors = findmax(mol_orig_coors, dims=2)[1] + min_coors = findmin(mol_orig_coors, dims=2)[1] + MOL_SIZE = max_coors - min_coors + + gro_name = @sprintf(""%s_mul_%d_%d_%d_n_%d.gro"", mol_name, NUM_X, NUM_Y, NUM_Z, num_copies) + gro_file = open(gro_name, ""w"") + + @printf(gro_file, ""CG model %s, nx: %d, ny: %d, nz: %d, t = %16.3f \n"", mol_top.system_name, NUM_X, NUM_Y, NUM_Z, 0) + @printf(gro_file, ""%12d \n"", total_num_particles) + + @printf(""Duplicated system has %d x %d x %d x %d%% = %d copies, in total %d atoms \n"", + NUM_X, NUM_Y, NUM_Z, Int(floor(DENSE*100)), num_copies, total_num_particles) + + i_bead_global = 0 + i_mol_global = 0 + for ix in 1:NUM_X + for iy in 1:NUM_Y + for iz in 1:NUM_Z + i_mol_global += 1 + if !( i_mol_global in REAL_INDICES ) + continue + end + + if ROT + rot_matrix = generate_random_rotation() + mol_coors = rot_matrix * mol_orig_coors + else + mol_coors = mol_orig_coors + end + + shift_x = (ix - 1) * (MOL_SIZE[1] + PAD_X * 2) + shift_y = (iy - 1) * (MOL_SIZE[2] + PAD_Y * 2) + shift_z = (iz - 1) * (MOL_SIZE[3] + PAD_Z * 2) + for i_bead in 1 : mol_top.num_atom + i_bead_global += 1 + @printf(gro_file, ""%5d%5s%5s%5d %8.4f %8.4f %8.4f %8.4f %8.4f %8.4f \n"", + mol_top.top_atoms[i_bead].residue_index, + mol_top.top_atoms[i_bead].residue_name, + mol_top.top_atoms[i_bead].atom_name, + i_bead_global % 100000, + ( mol_coors[1,i_bead] + shift_x ) * 0.1, + ( mol_coors[2,i_bead] + shift_y ) * 0.1, + ( mol_coors[3,i_bead] + shift_z ) * 0.1, + 0.0, 0.0, 0.0) + end + end + end + end + @printf(gro_file, ""%15.4f%15.4f%15.4f \n\n"", 0.0, 0.0, 0.0) + + @printf(""Duplicated system size estimation (if no rotation): %16.3f x %16.3f x %16.3f \n"", + NUM_X * ( MOL_SIZE[1] + 2 * PAD_X ), + NUM_Y * ( MOL_SIZE[2] + 2 * PAD_Y ), + NUM_Z * ( MOL_SIZE[3] + 2 * PAD_Z )) + + close(gro_file) +end + +# ============================= +# Parsing Commandline Arguments +# ============================= +function parse_commandline() + s = ArgParseSettings() + + @add_arg_table s begin + + ""--top"", ""-t"" + help = ""Topology file name (gromacs style)."" + required = true + arg_type = String + + ""--crd"", ""-c"" + help = ""Coordinate file name (gromacs style)."" + required = true + arg_type = String + + ""--output"", ""-o"" + help = ""Output file name."" + arg_type = String + default = ""BIGMOL"" + + ""--nx"" + help = ""Number of x copies."" + arg_type = Int + default = 1 + + ""--ny"" + help = ""Number of y copies."" + arg_type = Int + default = 1 + + ""--nz"" + help = ""Number of z copies."" + arg_type = Int + default = 1 + + ""--xpadding"" + help = ""Padding distance in x axis (A)."" + arg_type = Float64 + default = 5.0 + + ""--ypadding"" + help = ""Padding distance in y axis (A)."" + arg_type = Float64 + default = 5.0 + + ""--zpadding"" + help = ""Padding distance in z axis (A)."" + arg_type = Float64 + default = 5.0 + + ""--density"" + help = ""Density (probability) of molecules."" + arg_type = Float64 + default = 1.0 + + ""--random-rotation"" + help = ""Perform random rotation for each duplicate."" + action = :store_true + + ""--debug"" + help = ""DEBUG."" + action = :store_true + end + + return parse_args(s) +end + + +if abspath(PROGRAM_FILE) == @__FILE__ + + args = parse_commandline() + + main(args) + +end +","Julia" +"Genesis","noinil/genesis_cg_tool","tools/modeling/DNA_nanoparticle_constructor/20220519_icosahedron_construction.jl",".jl","23778","630","#!/usr/bin/env julia + +include(""../../../src/lib/gcj.jl"") +using ArgParse +using LinearAlgebra +using Printf + +function icosahedron_construction(r_P) + + println("" ------------------------------------------------------------ "") + println("" Constructing icosahedron..."") + + # key coordinate + ξ = (sqrt(5) + 1) / 2 + # circum-radius + r_c = sqrt(ξ * ξ + 1) + # scaling factor + s_r = r_P / r_c + + # -------- + # vertices + # -------- + nodes = zeros(Float64, (3, 12)) + # nodes[:, 1] = [ 0, 1, ξ] + # nodes[:, 2] = [ 0, -1, ξ] + # nodes[:, 3] = [ ξ, 0, 1] + # nodes[:, 4] = [-ξ, 0, 1] + # nodes[:, 5] = [ 1, ξ, 0] + # nodes[:, 6] = [-1, ξ, 0] + # nodes[:, 7] = [ 1, -ξ, 0] + # nodes[:, 8] = [-1, -ξ, 0] + # nodes[:, 9] = [ ξ, 0, -1] + # nodes[:, 10] = [-ξ, 0, -1] + # nodes[:, 11] = [ 0, 1, -ξ] + # nodes[:, 12] = [ 0, -1, -ξ] + nodes[:, 1] = [ 0, 1, ξ] # 1 + nodes[:, 2] = [ 0, -1, -ξ] # 12 + nodes[:, 3] = [ 0, -1, ξ] # 2 + nodes[:, 4] = [ 0, 1, -ξ] # 11 + nodes[:, 5] = [ ξ, 0, 1] # 3 + nodes[:, 6] = [-ξ, 0, -1] # 10 + nodes[:, 7] = [-ξ, 0, 1] # 4 + nodes[:, 8] = [ ξ, 0, -1] # 9 + nodes[:, 9] = [ 1, ξ, 0] # 5 + nodes[:, 10] = [-1, -ξ, 0] # 8 + nodes[:, 11] = [-1, ξ, 0] # 6 + nodes[:, 12] = [ 1, -ξ, 0] # 7 + nodes .*= s_r + + # ----- + # edges + # ----- + edge_idx_list = [] + i_count = 1 + for i in 1:11 + for j in i + 1:12 + coor_1 = nodes[:, i] + coor_2 = nodes[:, j] + distance = norm(coor_1 - coor_2) + # 1.05 * edge_length = 1.05 * 2 * s_r + if distance < 2.1 * s_r + push!(edge_idx_list, [i, j]) + end + end + end + + # ----- + # faces + # ----- + face_idx_list = [] + i_count = 1 + for i_edge in 1:length(edge_idx_list) + i1 = edge_idx_list[i_edge][1] + i2 = edge_idx_list[i_edge][2] + for i3 in i2 + 1:12 + if [i1, i3] in edge_idx_list && [i2, i3] in edge_idx_list + push!(face_idx_list, [i1, i2, i3]) + end + end + end + + println("" > DONE! "") + + # return + return (nodes, edge_idx_list, face_idx_list) + +end + +function determine_subdivision_level(r_P) + + println("" ------------------------------------------------------------ "") + println("" Calculating subdivision level..."") + inv_ξ = 2 / (sqrt(5) + 1) + # estimated upper limit of d_0 is 15Å + n_s = Int(ceil(2 * atan(inv_ξ) * r_P / 15)) + d_0 = 2 * r_P * atan(inv_ξ) / n_s + sigma = d_0 / sqrt(3) * 2 + + @printf("" > Polyhedron radius : %8.3f Å | # subdivision level: %8d \n"", r_P, n_s) + @printf("" > Estimated d0 : %8.3f Å | σ : %8.3f Å \n"", d_0, sigma) + println("" > DONE! "") + + return n_s + +end + +function subdivided_polyhedron_construction(f_polyhedron_construction, r_P, DNA_density) + + println("" ============================================================ "") + + # construct polyhedron + (polyhedron_vertices_coors, edge_list, face_list) = f_polyhedron_construction(r_P) + n_poly_vert = size(polyhedron_vertices_coors)[2] + n_poly_edge = length(edge_list) + n_poly_face = length(face_list) + + # determine number of subdivision + n_subdivision = determine_subdivision_level(r_P) + + println("" ------------------------------------------------------------ "") + # calculate number of vertices + n_new_points_per_edge = n_subdivision - 1 + n_new_points_all_edge = n_new_points_per_edge * n_poly_edge + n_new_points_per_face = Int((n_subdivision - 1) * (n_subdivision - 2) / 2) + n_new_points_all_face = n_new_points_per_face * n_poly_face + n_nodes_total = n_poly_vert + n_new_points_all_edge + n_new_points_all_face + + @printf("" > Adding %d edge nodes and %d face nodes. \n"", n_new_points_all_edge, n_new_points_all_face) + @printf("" > Totally %d beads in the polyhedron. \n"", n_nodes_total) + + # =============== + # DATA structures + # =============== + # new coordinates + new_polyhedron_coors = zeros(Float64, (3, n_nodes_total)) + # flags for DNA-planting particles + new_polyhedron_flags = zeros(Int, n_nodes_total) + + # =========== + # subdivision + # =========== + # vertices coordinates (first n_poly_vert) + new_polyhedron_coors[:, 1:n_poly_vert] = polyhedron_vertices_coors + i_node = n_poly_vert + 1 + # ------------- + # step 1: edges + # ------------- + for i_edge in 1:length(edge_list) + v1 = edge_list[i_edge][1] + v2 = edge_list[i_edge][2] + coor_v1 = polyhedron_vertices_coors[:, v1] + coor_v2 = polyhedron_vertices_coors[:, v2] + e1 = (coor_v2 - coor_v1) ./ n_subdivision + for i in 1:n_subdivision - 1 + coor_new = coor_v1 .+ (i .* e1) + coor_new .*= r_P / norm(coor_new) + new_polyhedron_coors[:, i_node] = coor_new + i_node += 1 + end + end + + # ------------- + # step 2: faces + # ------------- + for i_face in 1:length(face_list) + v1 = face_list[i_face][1] + v2 = face_list[i_face][2] + v3 = face_list[i_face][3] + coor_v1 = polyhedron_vertices_coors[:, v1] + coor_v2 = polyhedron_vertices_coors[:, v2] + coor_v3 = polyhedron_vertices_coors[:, v3] + e1 = (coor_v2 - coor_v1) ./ n_subdivision + e2 = (coor_v3 - coor_v2) ./ n_subdivision + for i in 2:n_subdivision - 1 + for j in 1:i - 1 + coor_new = coor_v1 .+ (i .* e1) .+ (j .* e2) + coor_new .*= r_P / norm(coor_new) + new_polyhedron_coors[:, i_node] = coor_new + i_node += 1 + end + end + end + + + # ================================= + # Determine vertices to connect DNA + # ================================= + A_nanop = 4 * pi * r_P * r_P + N_DNA = Int(ceil(A_nanop * DNA_density)) + println("" ------------------------------------------------------------ "") + @printf("" > Marking %d DNA-connecting nodes in nanoparticle\n"", N_DNA) + if N_DNA <= n_poly_vert + new_polyhedron_flags[1:N_DNA] .= 1 + @printf("" >> %d vertices marked \n"", N_DNA) + else + new_polyhedron_flags[1:n_poly_vert] .= 1 + @printf("" >> %d vertices marked \n"", n_poly_vert) + # non-poly-vertex DNA=connecting particles + n_DNA_node_on_face_edge = N_DNA - n_poly_vert + n_DNA_node_on_face = Int(ceil(n_DNA_node_on_face_edge * n_new_points_all_face / (n_new_points_all_face + n_new_points_all_edge))) + n_DNA_node_on_edge = n_DNA_node_on_face_edge - n_DNA_node_on_face + @printf("" >> %d edge-nodes marked \n"", n_DNA_node_on_edge) + @printf("" >> %d face-nodes marked \n"", n_DNA_node_on_face) + # ---------------------------- + # put extra DNA nodes on edges + # ---------------------------- + n_DNA_per_edge, n_DNA_extra_edge = divrem(n_DNA_node_on_edge, n_poly_edge) + n_tmp_edge_node_count = n_poly_vert + for k_edge in 1:n_poly_edge + if k_edge <= n_DNA_extra_edge + n_tmp = n_DNA_per_edge + 1 + else + n_tmp = n_DNA_per_edge + end + if n_tmp >= 1 + n_interval_tmp = div(n_new_points_per_edge, n_tmp + 1) + new_polyhedron_flags[n_tmp_edge_node_count + n_interval_tmp:n_interval_tmp:n_tmp_edge_node_count + n_interval_tmp*n_tmp] .= 1 + n_tmp_edge_node_count += n_new_points_per_edge + end + end + # ---------------------------- + # put extra DNA nodes on faces + # ---------------------------- + n_DNA_per_face, n_DNA_extra_face = divrem(n_DNA_node_on_face, n_poly_face) + n_tmp_face_node_count = n_poly_vert + n_new_points_all_edge + for k_face in 1:n_poly_face + if k_face <= n_DNA_extra_face + n_tmp = n_DNA_per_face + 1 + else + n_tmp = n_DNA_per_face + end + if n_tmp >= 1 + n_interval_tmp = div(n_new_points_per_face, n_tmp + 1) + new_polyhedron_flags[n_tmp_face_node_count + n_interval_tmp:n_interval_tmp:n_tmp_face_node_count + n_interval_tmp*n_tmp] .= 1 + n_tmp_face_node_count += n_new_points_per_face + end + end + end + + println("" > DONE! "") + + return (new_polyhedron_coors, new_polyhedron_flags) + +end + + +function generate_nanoparticle_with_DNA(nanoP_coors, nanoP_flags, linker_length, DNA_name) + + println("" ------------------------------------------------------------ "") + println("" Constructing nanoparticle + DNA..."") + + if linker_length < 3 + println("" Please use a longer linker (at least 3 beads)!"") + exit() + end + + # ============================= + # Make a new topology structure + # ============================= + + top_default_params = GenTopDefault(0, 0, false, 0.0, 0.0) + top_default_atomtype = Vector{GenTopAtomType}(undef, 0) + top_default_CGDNA_bp = Vector{GenTopCGDNABasepairType}(undef, 0) + top_default_CGDNA_bs = Vector{GenTopCGDNABasestackType}(undef, 0) + top_default_CGDNA_cs = Vector{GenTopCGDNABasecrossType}(undef, 0) + top_default_CGDNA_exv = Vector{GenTopCGDNAExvType}(undef, 0) + top_default_CGPro_flx_angle = Vector{GenTopCGProAICGFlexAngleType}(undef, 0) + top_default_CGPro_flx_dihedral = Vector{GenTopCGProAICGFlexDihedralType}(undef, 0) + + global_index_2_local_index = Vector{Int}(undef, 0) + global_index_2_local_molid = Vector{Int}(undef, 0) + top_atoms = Vector{GenTopAtom}(undef, 0) + top_bonds = Vector{GenTopBond}(undef, 0) + top_angles = Vector{GenTopAngle}(undef, 0) + top_dihedrals = Vector{GenTopDihedral}(undef, 0) + top_pairs = Vector{GenTopPair}(undef, 0) + top_exclusions = Vector{GenTopExclusion}(undef, 0) + top_pwmcos = Vector{GenTopPWMcos}(undef, 0) + top_pwmcosns = Vector{GenTopPWMcos}(undef, 0) + top_idr_hps = Vector{GenTopRegion}(undef, 0) + top_idr_kh = Vector{GenTopRegion}(undef, 0) + top_mol_list = Vector{GenTopMolList}(undef, 0) + + # --------------------- + # read DNA top and coor + # --------------------- + tmp_top_name = @sprintf(""%s.top"", DNA_name) + tmp_crd_name = @sprintf(""%s.gro"", DNA_name) + println("" > Reading topology and coordinates from: "", tmp_top_name, "" and "", tmp_crd_name) + meta_DNA_top = read_grotop(tmp_top_name) + meta_DNA_cnf = read_grocrd(tmp_crd_name) + n_atom_DNA = meta_DNA_top.num_atom + + # ==================================== + # prepare coordinates of all particles + # ==================================== + N_DNA = count(>(0), nanoP_flags) + println("" > Number of DNAs to add: "", N_DNA) + n_atom_nanoP = length(nanoP_flags) + n_atom_total = 1 + n_atom_nanoP + N_DNA * (linker_length + n_atom_DNA) + println("" > Total number of CG beads:"", n_atom_total, "" : "") + @printf("" >> %12d for nanoparticle; \n"", 1 + n_atom_nanoP) + @printf("" >> %12d for linkers; \n"", N_DNA * linker_length) + @printf("" >> %12d for DNAs; \n"", N_DNA * n_atom_DNA) + + # ----------------- + # nanoP coordinates + # ----------------- + atom_coors = zeros(Float64, (3, n_atom_total)) + atom_coors[:, 1] = [0, 0, 0] # the central bead of nanoparticle! + atom_coors[:, 2:1 + n_atom_nanoP] = nanoP_coors[:, :] + + # ----------------------- + # adding linkers and DNAs + # ----------------------- + println("" > Adding coordinates of linkers and DNAs"") + i_atom = 1 + n_atom_nanoP + for (i_nanoP, f_nanoP) in enumerate(nanoP_flags) + if f_nanoP > 0 + anchor_coor = nanoP_coors[:, i_nanoP] + v0 = anchor_coor / norm(anchor_coor) # direction vector + if v0[3] > 0.5 + v_tmp = [v0[1], v0[3], -v0[2]] + else + v_tmp = [v0[2], -v0[1], v0[3]] + end + vx = cross(v_tmp, v0) # normal vector 1 + vx0 = vx / norm(vx) + vy = cross(v0, vx0) # normal vector 2 + vy0 = vy / norm(vy) + frame_local = [vx0 vy0 v0] + + # adding linker coordinates... + for j in 1:linker_length + i_atom += 1 + anchor_coor += v0 * 3.33 + atom_coors[:, i_atom] = anchor_coor + end + anchor_coor .+= v0 * 3.33 + + # adding DNA coordinates... + dna_coors_0 = meta_DNA_cnf.coors .- meta_DNA_cnf.coors[:, 1] + dna_coors_1 = (frame_local * dna_coors_0) .+ anchor_coor + atom_coors[:, i_atom + 1:i_atom + n_atom_DNA] = dna_coors_1 + i_atom += n_atom_DNA + end + end + + + # ======================== + # prepare topology for all + # ======================== + println("" > Preparing topology of the whole structure..."") + # --------- + # [ atoms ] + # --------- + println("" >> adding atoms"") + for i_bead in 1 : 1 + n_atom_nanoP + # new_atom = GenTopAtom(i_bead, a_type, r_indx, r_name, a_name, f_type, charge, mass, c_id, s_name) + new_atom = GenTopAtom(i_bead, ""NP"", 1, ""NP"", ""NP"", AICG_ATOM_FUNC_NR, 0, 200, 1, ""NANOP"") + push!(top_atoms, new_atom) + push!(global_index_2_local_index, i_bead) + push!(global_index_2_local_molid, 1) + end + n_tmp_res = 1 + n_tmp_bead = 1 + n_atom_nanoP + cid = 1 + for i_DNA in 1:N_DNA + for j_bead in 1 : linker_length + n_tmp_res += 1 + n_tmp_bead += 1 + # new_atom = GenTopAtom(i_bead, a_type, r_indx, r_name, a_name, f_type, charge, mass, c_id, s_name) + new_atom = GenTopAtom(n_tmp_bead, ""LNK"", n_tmp_res, ""LNK"", ""LNK"", AICG_ATOM_FUNC_NR, 0, 100, 1, ""THIOL"") + push!(top_atoms, new_atom) + push!(global_index_2_local_index, j_bead) + push!(global_index_2_local_molid, 1) + end + n_dna_res = 0 + for k_bead in 1 : n_atom_DNA + n_tmp_bead += 1 + # new_atom = GenTopAtom(i_bead, a_type, r_indx, r_name, a_name, f_type, charge, mass, c_id, s_name) + my_atom = meta_DNA_top.top_atoms[k_bead] + n_dna_res = my_atom.residue_index + new_atom = GenTopAtom(n_tmp_bead, my_atom.atom_type, + n_tmp_res + n_dna_res, + my_atom.residue_name, + my_atom.atom_name, + AICG_ATOM_FUNC_NR, + my_atom.charge, + my_atom.mass, + cid + my_atom.chain_id, my_atom.seg_name) + push!(top_atoms, new_atom) + push!(global_index_2_local_index, k_bead) + push!(global_index_2_local_molid, cid + my_atom.chain_id) + end + n_tmp_res += n_dna_res + end + # --------- + # [ bonds ] + # --------- + println("" >> adding bonds"") + # nanoparticle + r_P = norm(atom_coors[:, 1] - atom_coors[:, 2]) + # @printf("" Radius of nanoparticle: %12.3f \n"", r_P) + # 1: central bead to all the other beads + for i_bead in 2:1 + n_atom_nanoP + new_bond = GenTopBond(1, i_bead, AICG_BOND_FUNC_TYPE, r_P, 80 * JOU2CAL) + push!(top_bonds, new_bond) + end + # 2: between every two neighboring beads + # 2.1: determine the minimum distance + coor1 = atom_coors[:, 2] + d_min = 1e10 + for j_nanop in 3:1+n_atom_nanoP + coor2 = atom_coors[:, j_nanop] + d_tmp = norm(coor1 - coor2) + d_min = d_tmp < d_min ? d_tmp : d_min + end + @printf("" >>> Distance between every two beads in nanoparticle = %12.3f Å \n"", d_min) + # 2.2: add nanop-nanop bonds + for j_bead in 2:1+n_atom_nanoP + coor1 = atom_coors[:, j_bead] + for k_bead in j_bead + 1:1+n_atom_nanoP + coor2 = atom_coors[:, k_bead] + d_tmp = norm(coor1 - coor2) + if d_tmp < 1.5 * d_min + new_bond = GenTopBond(j_bead, k_bead, AICG_BOND_FUNC_TYPE, d_tmp, 80 * JOU2CAL) + push!(top_bonds, new_bond) + end + end + end + # 3: B-L; L-L; L-L + n_DNA_connecting_node = 0 + for (i_nanoP, f_nanoP) in enumerate(nanoP_flags) + if f_nanoP > 0 + n_DNA_connecting_node += 1 + j_shift = 1 + n_atom_nanoP + (n_DNA_connecting_node - 1) * (linker_length + n_atom_DNA) + # add B-L + i_bead = 1 + i_nanoP + j_bead = j_shift + 1 + d_tmp = compute_distance(atom_coors[:, i_bead], atom_coors[:, j_bead]) + new_bond = GenTopBond(i_bead, j_bead, DNA3SPN_BOND_FUNC4_TYPE, d_tmp, 0.6 * JOU2CAL) + push!(top_bonds, new_bond) + # add L-L and L-S + for k in 1:linker_length + i_bead = j_shift + k + j_bead = i_bead + 1 + d_tmp = compute_distance(atom_coors[:, i_bead], atom_coors[:, j_bead]) + new_bond = GenTopBond(i_bead, j_bead, DNA3SPN_BOND_FUNC4_TYPE, d_tmp, 0.6 * JOU2CAL) + push!(top_bonds, new_bond) + end + end + end + # 4: DNA bonds + for i_dna in 1:N_DNA + i_shift = 1 + n_atom_nanoP + (i_dna - 1) * (linker_length + n_atom_DNA) + linker_length + n_bond_DNA = length(meta_DNA_top.top_bonds) + for i_bond in 1:n_bond_DNA + b = meta_DNA_top.top_bonds[i_bond] + new_bond = GenTopBond(i_shift + b.i, i_shift + b.j, b.function_type, b.r0, b.coef) + push!(top_bonds, new_bond) + end + end + + # ---------- + # [ angles ] + # ---------- + println("" >> adding angles"") + # 1: B-B-L; B-L-L; L-L-L; L-L-S; L-S-Base + n_DNA_connecting_node = 0 + for (i_nanoP, f_nanoP) in enumerate(nanoP_flags) + if f_nanoP > 0 + n_DNA_connecting_node += 1 + j_shift = 1 + n_atom_nanoP + (n_DNA_connecting_node - 1) * (linker_length + n_atom_DNA) + # add B-B-L + i_bead = 1 + i_nanoP + j_bead = j_shift + 1 + coor1 = atom_coors[:, i_bead] + for h_bead in 2:1+n_atom_nanoP + if h_bead == i_bead + continue + end + coor2 = atom_coors[:, h_bead] + d_tmp = norm(coor1 - coor2) + if d_tmp < 1.5 * d_min + new_angle = GenTopAngle(h_bead, i_bead, j_bead, DNA3SPN_ANG_FUNC_TYPE, 105.19, 200.0 * JOU2CAL, 0.0) + push!(top_angles, new_angle) + end + end + # add B-L-L + new_angle = GenTopAngle(i_bead, j_bead, j_bead + 1, DNA3SPN_ANG_FUNC_TYPE, 178.0, 200.0 * JOU2CAL, 0.0) + push!(top_angles, new_angle) + # add L-L-L + for k in 1:linker_length - 2 + j_bead = j_shift + k + new_angle = GenTopAngle(j_bead, j_bead + 1, j_bead + 2, DNA3SPN_ANG_FUNC_TYPE, 176.0, 200.0 * JOU2CAL, 0.0) + push!(top_angles, new_angle) + end + # add L-L-S + k = linker_length - 1 + j_bead = j_shift + k + new_angle = GenTopAngle(j_bead, j_bead + 1, j_bead + 2, DNA3SPN_ANG_FUNC_TYPE, 178.0, 200.0 * JOU2CAL, 0.0) + push!(top_angles, new_angle) + # add L-S-Base + k = linker_length + j_bead = j_shift + k + new_angle = GenTopAngle(j_bead, j_bead + 1, j_bead + 2, DNA3SPN_ANG_FUNC_TYPE, 103.28, 200.0 * JOU2CAL, 0.0) + push!(top_angles, new_angle) + # add L-S-P + new_angle = GenTopAngle(j_bead, j_bead + 1, j_bead + 3, DNA3SPN_ANG_FUNC_TYPE, 134.03, 200.0 * JOU2CAL, 0.0) + push!(top_angles, new_angle) + end + end + # 2: DNA + for i_dna in 1:N_DNA + i_shift = 1 + n_atom_nanoP + (i_dna - 1) * (linker_length + n_atom_DNA) + linker_length + n_angle_DNA = length(meta_DNA_top.top_angles) + for i_angle in 1:n_angle_DNA + a = meta_DNA_top.top_angles[i_angle] + new_angle = GenTopAngle(i_shift + a.i, i_shift + a.j, i_shift + a.k, DNA3SPN_ANG_FUNC_TYPE, a.a0, a.coef, 0.0) + push!(top_angles, new_angle) + end + end + + # ------------- + # [ dihedrals ] + # ------------- + println("" >> adding dihedrals"") + # DNA only... + for i_dna in 1:N_DNA + i_shift = 1 + n_atom_nanoP + (i_dna - 1) * (linker_length + n_atom_DNA) + linker_length + n_dihedral_DNA = length(meta_DNA_top.top_dihedrals) + for i_dihedral in 1:n_dihedral_DNA + d = meta_DNA_top.top_dihedrals[i_dihedral] + new_dihedral = GenTopDihedral(i_shift + d.i, i_shift + d.j, i_shift + d.k, i_shift + d.l, d.function_type, + d.d0, d.coef, d.w, d.n) + push!(top_dihedrals, new_dihedral) + end + end + + + # =================== + # Assemble everything + # =================== + println("" ------------------------------------------------------------ "") + println("" > Output files..."") + mol_name = @sprintf(""NANOPARTICLE_%s"", DNA_name) + mytop = GenTopology(mol_name, n_atom_total, + top_default_params, + top_default_atomtype, + top_default_CGDNA_bp, + top_default_CGDNA_bs, + top_default_CGDNA_cs, + top_default_CGDNA_exv, + top_default_CGPro_flx_angle, + top_default_CGPro_flx_dihedral, + global_index_2_local_index, + global_index_2_local_molid, + top_atoms, + top_bonds, + top_angles, + top_dihedrals, + top_pairs, + top_exclusions, + top_pwmcos, + top_pwmcosns, + top_idr_hps, + top_idr_kh, + top_mol_list) + myconf = Conformation(n_atom_total, atom_coors) + + # ----------------- + # output some files + # ----------------- + write_grotop(mytop, mol_name) + write_grocrd(mytop, myconf, mol_name) + write_mmCIF(mytop, myconf, mol_name) + + pdb_out_args = Dict(""cgconnect""=>true) + write_pdb(mytop, myconf, mol_name, pdb_out_args) + +end + + +if abspath(PROGRAM_FILE) == @__FILE__ + + + struct_args = ArgParseSettings() + + @add_arg_table struct_args begin + ""--r_P"", ""-r"" + help = ""Radius of nanoparticle (Å)"" + arg_type = Float64 + default = 100.0 + + ""--DNA_density"", ""-d"" + help = ""Area density of DNA (nm^-2)"" + arg_type = Float64 + default = 0.2 + + ""--linker_length"", ""-l"" + help = ""Length of linker (>=3)"" + arg_type = Int + default = 5 + + ""--DNA_file_name"", ""-D"" + help = ""File name for DNA topology and coordinates (basename of .top, .gro)"" + arg_type = String + default = """" + + ""--debug"" + help = ""Debug mode"" + action = :store_true + end + + main_args = parse_args(struct_args) + + r_P = main_args[""r_P""] + DNA_density = main_args[""DNA_density""] * 0.01 # unit: Å^-2 + len_linker = main_args[""linker_length""] + DNA_name = get(main_args, ""DNA_file_name"", ""bdna_cg"") + + # ============= + # main function + # ============= + (my_polyhedron_coors, my_polyhedron_flags) = subdivided_polyhedron_construction(icosahedron_construction, r_P, DNA_density) + generate_nanoparticle_with_DNA(my_polyhedron_coors, my_polyhedron_flags, len_linker, DNA_name) + +end +","Julia" +"Genesis","noinil/genesis_cg_tool","tools/modeling/protein_artifact/cg_protein_structure_builder.jl",".jl","5865","193","#!/usr/bin/env julia + +using Random +using Printf +using ArgParse + +include(""../../../src/lib/gcj.jl"") + +function parse_commandline() + s = ArgParseSettings() + + @add_arg_table s begin + + ""--sequence"", ""-s"" + help = ""Protein sequence file."" + arg_type = String + default = """" + + ""--length"" + help = ""Number of amino acids in the protein with random-sequence."" + arg_type = Int + default = 100 + + ""--IDR-model"" + help = ""IDR-model: 1) HPS; 2) KH; 3) AICG2+ LOCAL"" + arg_type = Int + default = 1 + + ""--straightness"" + help = ""Angle threshold to create a (non)-straight chain."" + arg_type = Float64 + default = 45.0 + + ""--strategy"" + help = ""Strategy to construct the conformation of protein."" + arg_type = String + default = ""straight"" + + ""--force-field-protein"" + help = ""Force field for protein."" + arg_type = String + default = ""AICG2+"" + + end + + return parse_args(s) +end + + +function make_cg_protein_structure(args) + + seq_name = get(args, ""sequence"", """") + + # protein model + ff_protein_name = get(args, ""force-field-protein"", """") + if haskey(FF_PRO_DICT, ff_protein_name) + ff_pro = FF_PRO_DICT[ff_protein_name] + else + error(""Wrong force field for protein."") + end + + # non-straightness (because ideal straight chain could have problem...) + threshold_angle = get( args, ""straightness"", 45.0) + + idr_model = get(args, ""IDR-model"", 1) + + println(""============================================================"") + + AA_FULLNAME_DICT = Dict( + 'A' => ""ALA"", + 'R' => ""ARG"", + 'N' => ""ASN"", + 'D' => ""ASP"", + 'C' => ""CYS"", + 'Q' => ""GLN"", + 'E' => ""GLU"", + 'G' => ""GLY"", + 'H' => ""HIS"", + 'I' => ""ILE"", + 'L' => ""LEU"", + 'K' => ""LYS"", + 'M' => ""MET"", + 'F' => ""PHE"", + 'P' => ""PRO"", + 'S' => ""SER"", + 'T' => ""THR"", + 'W' => ""TRP"", + 'Y' => ""TYR"", + 'V' => ""VAL"" + ) + + chain_id_lib = ""_ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"" + + # ======================================================== + # Protein sequence (read from file or generate random one) + # ======================================================== + if length(seq_name) > 0 + println(""> Open sequence file:"", seq_name) + + mol_name = split(basename( seq_name ), '.')[1] * ""_cg"" + + # ----------------------------------- + # read in protein sequence from fasta + # ----------------------------------- + num_chain, seq_list = read_fasta(seq_name) + protein_seqence = seq_list[1] + protein_length = length(protein_seqence) + + else + println(""> Generating random protein sequence:"") + + mol_name = ""random_protein_cg"" + + # -------------------------------- + # generate random protein sequence + # -------------------------------- + protein_length = args[""length""] + protein_seqence = randstring(""ARNDCQEGHILKMFPSTWYVX"", protein_length) + end + println(""> Protein sequence: ( Length: $protein_length ) "") + println(""> "", protein_seqence) + + # ======================================= + # Generating AAMolecule based on sequence + # ======================================= + atom_names = Vector{String}(undef, protein_length) + atom_coors = zeros(3, protein_length) + residues = Vector{AAResidue}(undef, protein_length) + chain0 = Vector{AAChain}(undef, 1) + + if args[""strategy""] == ""straight"" + theta = rand() * threshold_angle + for i in 1 : protein_length + aa_short_name = protein_seqence[i] + aa_residue_name = AA_FULLNAME_DICT[aa_short_name] + + atom_names[i] = ""CA"" + if i == 1 + atom_coors[:, i] = [0, 0, 0] + else + if mod(i, 2) == 0 + theta = rand() * threshold_angle + phi = rand() * 360 + atom_coors[:, i] = atom_coors[:, i - 1] + [sind(theta) * cosd(phi), sind(theta) * sind(phi), cosd(theta)] * 3.8 + else + atom_coors[:, i] = atom_coors[:, i - 2] + [0, 0, cosd(theta)] * 3.8 * 2 + end + end + residues[i] = AAResidue(aa_residue_name, [i]) + end + new_chain = AAChain(""""*chain_id_lib[1], rpad(mol_name, 6)[1:6], MOL_PROTEIN, [i for i = 1 : protein_length]) + + chain0[1] = new_chain + elseif args[""strategy""] == ""random-walk"" + println(""Self-avoiding random walk not support yet."") + end + + new_mol0 = AAMolecule(atom_names[1:protein_length], atom_coors[1:3, 1:protein_length], residues[1:protein_length], chain0) + + # =============================== + # coarse graining from AAMolecule + # =============================== + force_field = ForceFieldCG(ff_pro, 1, 1, 0, 0, 0) + if idr_model == 1 + args[""modeling-options""] = Dict(""IDR"" => Dict(""HPS_region"" => ""1 to $protein_length"")) + elseif idr_model == 2 + args[""modeling-options""] = Dict(""IDR"" => Dict(""KH_region"" => ""1 to $protein_length"")) + elseif idr_model == 3 + args[""modeling-options""] = Dict(""IDR"" => Dict(""AICG2p_IDR_local"" => ""1 to $protein_length"", + ""AICG2p_IDR_nonlocal"" => ""1 to $protein_length"")) + end + args[""cgconnect""] = true + + cg_top0, cg_conf0 = coarse_graining(new_mol0, force_field, args) + + write_grotop(cg_top0, mol_name, args) + write_grocrd(cg_top0, cg_conf0, mol_name, args) + write_pdb(cg_top0, cg_conf0, mol_name, args) + write_psf(cg_top0, mol_name, args) + + return 0 +end + + +function main() + args = parse_commandline() + make_cg_protein_structure(args) +end + +if abspath(PROGRAM_FILE) == @__FILE__ + main() +end +","Julia" +"Genesis","noinil/genesis_cg_tool","tools/modeling/DNA_general/build_dna.jl",".jl","18933","473","#!/usr/bin/env julia + +using Printf +using ArgParse + +include(""../../../src/lib/gcj.jl"") + +function read_DNA_standard_base(fname) + pdb_atom_names = [] + pdb_coors = [] + for line in eachline(fname) + if startswith(line, ""ATOM"") + push!(pdb_atom_names, line[13:16]) + x = parse(Float64, line[31:38]) + y = parse(Float64, line[39:46]) + z = parse(Float64, line[47:54]) + push!(pdb_coors, [x, y, z]) + end + end + coor_array = zeros(Float64, (3, length(pdb_coors))) + for j in 1:length(pdb_coors) + coor_array[:, j] = pdb_coors[j][:] + end + return [pdb_atom_names, coor_array] +end + +function read_param_template(fname) + base_pair_params = Dict() + base_step_params = Dict() + for line in eachline(fname) + if startswith(line, ""#"") || length(strip(line)) == 0 + continue + else + words = split(line) + if length(words[1]) == 1 + base_pair_params[words[1][1]] = [words[2], [parse(Float64, words[i]) for i in 3:8]] + elseif length(words[1]) == 2 + base_step_params[words[1]] = [parse(Float64, words[i]) for i in 2:7] + end + end + end + return [base_pair_params, base_step_params] +end + +function read_DNA_sequence(fname) + dna_seq = """" + for line in eachline(fname) + if line[1] == '>' + continue + end + seq = strip(line) + for b in seq + if ! in(b, ""ACGT"") + error(""Wrong DNA sequence!"") + end + end + dna_seq *= seq + end + return dna_seq +end + +# main +function generate_NA_structure(args) + + par_fname = get(args, ""param"", """") + out_fname = get(args, ""output"", ""_DNA_constructed_"") + seq_fname = get(args, ""sequence"", """") + i_template = get(args, ""template"", 1) + do_debug = get(args, ""debug"", false) + + pdb_fname = out_fname * "".pdb"" + + bp_names = [] + bp_parms = [] + bs_parms = [] + + # ======================================= + # generate base-pair and base-step params + # ======================================= + # ---------------- + # read in template + # ---------------- + lib_path = @__DIR__ + if i_template == 0 # ideally regular B-type dsDNA + template_fname = lib_path * ""/lib/regular_B_DNA_params.dat"" + elseif i_template == 1 # averaged B-type dsDNA + template_fname = lib_path * ""/lib/average_B_DNA_params.dat"" + else + error(""TEMPLATE type not supported ..."") + end + template_base_pair_params, template_base_step_params = read_param_template(template_fname) + # -------------------- + # read in DNA sequence + # -------------------- + if length(par_fname) == 0 && length(seq_fname) == 0 + error(""Either sequence file or user-defined parameter file should be given!"") + end + if length(seq_fname) > 0 + seq_DNA = read_DNA_sequence(seq_fname) + for ( i, b ) in enumerate(seq_DNA) + # base-pair + tmp_bp_param = template_base_pair_params[b] + push!(bp_names, tmp_bp_param[1]) + push!(bp_parms, tmp_bp_param[2]) + # base-step + tmp_bs_param = zeros(Float64, 6) + if i > 1 + base_step = seq_DNA[i-1:i] + tmp_bs_param = template_base_step_params[base_step] + end + push!(bs_parms, tmp_bs_param) + end + end + + # ========================= + # user-specified parameters + # ========================= + # !!! NOTE !!! this part will overwrite the ""templated"" params!!! + if length(par_fname) > 0 + # println(""\e[0;31;40m ERROR: Please specify the DNA structural parameter file. \e[0m"") + # exit() + for line in eachline(par_fname) + if line[1] in ""ACGT"" + words = split(line) + local_bp_parms = [parse(Float64, words[i]) for i in 2:7] + local_bs_parms = [parse(Float64, words[i]) for i in 8:13] + push!(bp_names, words[1]) + push!(bp_parms, local_bp_parms) + push!(bs_parms, local_bs_parms) + end + end + end + + # ------------------ + # read in structures + # ------------------ + std_base_A = read_DNA_standard_base(lib_path * ""/lib/aa_A.pdb"") + std_base_C = read_DNA_standard_base(lib_path * ""/lib/aa_C.pdb"") + std_base_G = read_DNA_standard_base(lib_path * ""/lib/aa_G.pdb"") + std_base_T = read_DNA_standard_base(lib_path * ""/lib/aa_T.pdb"") + map_base_atom_coors = Dict('A' => std_base_A[2], + 'C' => std_base_C[2], + 'G' => std_base_G[2], + 'T' => std_base_T[2]) + map_base_atom_names = Dict('A' => std_base_A[1], + 'C' => std_base_C[1], + 'G' => std_base_G[1], + 'T' => std_base_T[1]) + + + ########################################################################### + # Build base-pair frames # + ########################################################################### + if do_debug + of_bp_frames = open(""DEBUG_basepair_reference_frames.dat"", ""w"") + of_base_frames = open(""DEBUG_base_reference_frames.dat"", ""w"") + end + + num_bp = length(bp_parms) + if do_debug + @printf(of_bp_frames, ""%5d base-pairs\n"", num_bp) + end + + bp_frame_orign = zeros(Float64, (3, num_bp)) + bp_frame_basis = zeros(Float64, (3, 3, num_bp)) + + # ---------------------- + # set the frame for BP 1 + # ---------------------- + bp_frame_basis[:, :, 1] = diagm([1., 1., 1.]) + + # -------------------- + # propagate to all bps + # -------------------- + for ibp in 2:num_bp + # parameters + tmp_shift = bs_parms[ibp][1] + tmp_slide = bs_parms[ibp][2] + tmp_rise = bs_parms[ibp][3] + tmp_tilt = bs_parms[ibp][4] + tmp_roll = bs_parms[ibp][5] + tmp_twist = bs_parms[ibp][6] + # angles + tmp_RollTilt = sqrt(tmp_tilt * tmp_tilt + tmp_roll * tmp_roll) + tmp_phi_sign = tmp_tilt > 0 ? 1 : -1 + if abs(tmp_RollTilt) < 1.e-10 + tmp_phi = 0 + else + tmp_phi = acosd(tmp_roll / tmp_RollTilt) * tmp_phi_sign + end + + # ----------------------------------------------- + # calculate frames with respect to the current bp + # ----------------------------------------------- + # basis of middle frame + local_middle_basis = rotation_matrix_around_z(tmp_twist/2 - tmp_phi) * rotation_matrix_around_y(tmp_RollTilt/2) * rotation_matrix_around_z(tmp_phi) + # basis of next bp frame + local_bp2_basis = rotation_matrix_around_z(tmp_twist/2 - tmp_phi) * rotation_matrix_around_y(tmp_RollTilt) * rotation_matrix_around_z(tmp_twist/2 + tmp_phi) + # origin of next bp + local_bp2_orign = local_middle_basis * [tmp_shift, tmp_slide, tmp_rise] + + # ---------------------------------- + # remap frames to global coordinates + # ---------------------------------- + global_bp1_orign = bp_frame_orign[:, ibp - 1] + global_bp1_basis = bp_frame_basis[:, :, ibp - 1] + global_bp2_orign = global_bp1_orign + global_bp1_basis * local_bp2_orign + global_bp2_basis = global_bp1_basis * local_bp2_basis + + bp_frame_orign[:, ibp] = global_bp2_orign + bp_frame_basis[:, :, ibp] = global_bp2_basis + end + + # --------------------------- + # output the reference frames + # --------------------------- + if do_debug + for ibp in 1:num_bp + @printf(of_bp_frames, ""... %5d %s ...\n"", ibp, bp_names[ibp]) + @printf(of_bp_frames, ""%10.4f%10.4f%10.4f\n"", bp_frame_orign[1,ibp], bp_frame_orign[2,ibp], bp_frame_orign[3,ibp]) + @printf(of_bp_frames, ""%10.4f%10.4f%10.4f\n"", bp_frame_basis[1,1,ibp], bp_frame_basis[1,2,ibp], bp_frame_basis[1,3,ibp]) + @printf(of_bp_frames, ""%10.4f%10.4f%10.4f\n"", bp_frame_basis[2,1,ibp], bp_frame_basis[2,2,ibp], bp_frame_basis[2,3,ibp]) + @printf(of_bp_frames, ""%10.4f%10.4f%10.4f\n"", bp_frame_basis[3,1,ibp], bp_frame_basis[3,2,ibp], bp_frame_basis[3,3,ibp]) + end + end + + + ########################################################################### + # Build DNA structures # + ########################################################################### + # -------------------------------- + # count number of atoms in strands + # -------------------------------- + # strand A and B + num_atom_strand_A = 0 + num_atom_strand_B = 0 + for ibp in 1:num_bp + num_atom_strand_A += length(map_base_atom_names[bp_names[ibp][1]]) + num_atom_strand_B += length(map_base_atom_names[bp_names[ibp][3]]) + end + coors_strand_A = zeros(Float64, (3, num_atom_strand_A)) + coors_strand_B = zeros(Float64, (3, num_atom_strand_B)) + aname_strand_A = ["""" for j in 1:num_atom_strand_A] # atom names + aname_strand_B = ["""" for j in 1:num_atom_strand_B] # atom names + rname_strand_A = ["""" for j in 1:num_atom_strand_A] # resid names + rname_strand_B = ["""" for j in 1:num_atom_strand_B] # resid names + resid_strand_A = [0 for j in 1:num_atom_strand_A] # resid index + resid_strand_B = [0 for j in 1:num_atom_strand_B] # resid index + + i_start_strand_A = 1 + i_end_strand_B = num_atom_strand_B + for ibp in 1:num_bp + # ======================= + # Build local base frames + # ======================= + # parameters + tmp_shear = bp_parms[ibp][1] + tmp_stretch = bp_parms[ibp][2] + tmp_stagger = bp_parms[ibp][3] + tmp_buckle = bp_parms[ibp][4] + tmp_twist = bp_parms[ibp][5] + tmp_opening = bp_parms[ibp][6] + # angles + if true # give the correct output as 3DNA v2.4 + tmp_BucTwist = sqrt(tmp_buckle * tmp_buckle + tmp_twist * tmp_twist) + tmp_phi_sign = tmp_buckle > 0 ? 1 : -1 + tmp_phi = acosd(tmp_twist / tmp_BucTwist) * tmp_phi_sign + else # OLD STRATEGY + # tmp_BuckOpen = sqrt(tmp_buckle * tmp_buckle + tmp_opening * tmp_opening) + # tmp_phi_sign = tmp_opening > 0 ? 1 : -1 + # tmp_phi = acosd(tmp_buckle / tmp_BuckOpen) * tmp_phi_sign + end + + # ----------------------------------------------- + # calculate frames with respect to the current bp + # ----------------------------------------------- + if true # give the same structure as 3DNA v2.4 + # basis of base 1 + local_base1_basis = rotation_matrix_around_z(-tmp_phi) * rotation_matrix_around_y(+tmp_BucTwist / 2) * rotation_matrix_around_z(+tmp_phi + tmp_opening / 2) + # basis of base 2 + local_base2_basis = rotation_matrix_around_z(-tmp_phi) * rotation_matrix_around_y(-tmp_BucTwist / 2) * rotation_matrix_around_z(+tmp_phi - tmp_opening / 2) + else # OLD STRATEGY + # local_base1_basis = rotation_matrix_around_y(-tmp_phi) * rotation_matrix_around_x( tmp_BuckOpen / 2) * rotation_matrix_around_y(tmp_phi + tmp_twist / 2) + # local_base2_basis = rotation_matrix_around_y(-tmp_phi) * rotation_matrix_around_x(-tmp_BuckOpen / 2) * rotation_matrix_around_y(tmp_phi - tmp_twist / 2) + end + # rotate base 2 around x-axis by 180 + local_base2_basis = local_base2_basis * diagm([1, -1, -1]) + + # origin of next bp + global_bp_orign = bp_frame_orign[:, ibp] + global_bp_basis = bp_frame_basis[:, :, ibp] + local_base1_orign = global_bp_basis * [tmp_shear, tmp_stretch, tmp_stagger] ./ 2 + + # ---------------------------------- + # remap frames to global coordinates + # ---------------------------------- + global_base1_orign = global_bp_orign + local_base1_orign + global_base1_basis = global_bp_basis * local_base1_basis + global_base2_orign = global_bp_orign - local_base1_orign + global_base2_basis = global_bp_basis * local_base2_basis + + if do_debug + @printf(of_base_frames, ""=============================================\n"") + @printf(of_base_frames, ""... %5d %s ...\n"", ibp, bp_names[ibp]) + @printf(of_base_frames, ""---------------------------------------------\n"") + @printf(of_base_frames, ""%10.4f%10.4f%10.4f\n"", global_base1_orign[1], global_base1_orign[2], global_base1_orign[3]) + @printf(of_base_frames, ""%10.4f%10.4f%10.4f\n"", global_base1_basis[1,1], global_base1_basis[1,2], global_base1_basis[1,3]) + @printf(of_base_frames, ""%10.4f%10.4f%10.4f\n"", global_base1_basis[2,1], global_base1_basis[2,2], global_base1_basis[2,3]) + @printf(of_base_frames, ""%10.4f%10.4f%10.4f\n"", global_base1_basis[3,1], global_base1_basis[3,2], global_base1_basis[3,3]) + @printf(of_base_frames, ""---------------------------------------------\n"") + @printf(of_base_frames, ""%10.4f%10.4f%10.4f\n"", global_base2_orign[1], global_base2_orign[2], global_base2_orign[3]) + @printf(of_base_frames, ""%10.4f%10.4f%10.4f\n"", global_base2_basis[1,1], global_base2_basis[1,2], global_base2_basis[1,3]) + @printf(of_base_frames, ""%10.4f%10.4f%10.4f\n"", global_base2_basis[2,1], global_base2_basis[2,2], global_base2_basis[2,3]) + @printf(of_base_frames, ""%10.4f%10.4f%10.4f\n"", global_base2_basis[3,1], global_base2_basis[3,2], global_base2_basis[3,3]) + end + + # ======================= + # fill in the coordinates + # ======================= + b_name_1 = bp_names[ibp][1] + b_name_2 = bp_names[ibp][3] + coor_std_base1 = map_base_atom_coors[b_name_1] + coor_std_base2 = map_base_atom_coors[b_name_2] + + coor_new_base1 = global_base1_basis * coor_std_base1 .+ global_base1_orign + coor_new_base2 = global_base2_basis * coor_std_base2 .+ global_base2_orign + + num_atom_base1 = length(map_base_atom_names[b_name_1]) + num_atom_base2 = length(map_base_atom_names[b_name_2]) + + coors_strand_A[:, i_start_strand_A:i_start_strand_A + num_atom_base1 - 1] = coor_new_base1[:, :] + coors_strand_B[:, i_end_strand_B - num_atom_base2 + 1:i_end_strand_B] = coor_new_base2[:, :] + aname_strand_A[i_start_strand_A:i_start_strand_A + num_atom_base1 - 1] = map_base_atom_names[b_name_1] + aname_strand_B[i_end_strand_B - num_atom_base2 + 1:i_end_strand_B] = map_base_atom_names[b_name_2] + rname_strand_A[i_start_strand_A:i_start_strand_A + num_atom_base1 - 1] .= "" D"" * b_name_1 + rname_strand_B[i_end_strand_B - num_atom_base2 + 1:i_end_strand_B] .= "" D"" * b_name_2 + resid_strand_A[i_start_strand_A:i_start_strand_A + num_atom_base1 - 1] .= ibp + resid_strand_B[i_end_strand_B - num_atom_base2 + 1:i_end_strand_B] .= 2 * num_bp - ibp + 1 + + i_start_strand_A += num_atom_base1 + i_end_strand_B -= num_atom_base2 + end + + # ============= + # output to PDB + # ============= + new_PDB_file = open(pdb_fname, ""w"") + # -------- + # strand A + # -------- + for iatm in 1:num_atom_strand_A + atm_name = aname_strand_A[iatm] + x = coors_strand_A[1, iatm] + y = coors_strand_A[2, iatm] + z = coors_strand_A[3, iatm] + res_name = rpad(rname_strand_A[iatm], 4) + res_id = resid_strand_A[iatm] + @printf(new_PDB_file, ""ATOM %5d %4s %4s%1s%4d %8.3f%8.3f%8.3f%6.2f%6.2f%10s%2s \n"", + iatm, atm_name, res_name, + ""A"", + res_id, + x, y, z, + 1.0, + 1.0, + ""DNA1"", + strip(atm_name)[1]) + end + println(new_PDB_file, ""TER"") + + # -------- + # strand B + # -------- + for iatm in 1:num_atom_strand_B + atm_name = aname_strand_B[iatm] + x = coors_strand_B[1, iatm] + y = coors_strand_B[2, iatm] + z = coors_strand_B[3, iatm] + res_name = rpad(rname_strand_B[iatm], 4) + res_id = resid_strand_B[iatm] + @printf(new_PDB_file, ""ATOM %5d %4s %4s%1s%4d %8.3f%8.3f%8.3f%6.2f%6.2f%10s%2s \n"", + iatm + num_atom_strand_A, atm_name, res_name, + ""B"", + res_id, + x, y, z, + 1.0, + 1.0, + ""DNA2"", + strip(atm_name)[1]) + end + println(new_PDB_file, ""END"") + + # ============== + # ending work... + # ============== + close(new_PDB_file) + + if do_debug + close(of_bp_frames) + close(of_base_frames) + end + +end + +if abspath(PROGRAM_FILE) == @__FILE__ + + struct_args = ArgParseSettings() + + @add_arg_table struct_args begin + ""--param"", ""-p"" + help = ""User-defined parameter file of base-pair and base-step local structures. (will OVERWRITE the templated params)"" + arg_type = String + default = """" + + ""--template"", ""-t"" + help = ""Template of base-pair and base-step parameters: 0) ideally regular B-DNA; 1) average B-DNA "" + arg_type = Int + default = 1 + + ""--sequence"", ""-s"" + help = ""File name for DNA sequence (.fasta)"" + arg_type = String + default = """" + + ""--output"", ""-o"" + help = ""PDB file name (output) "" + arg_type = String + default = ""_DNA_constructed_"" + + ""--cg"", ""-C"" + help = ""Generate CG topology and coordinates (3SPN.2C)"" + action = :store_true + + ""--5P"" + help = ""CG: Starting from Phosphate at the 5-end."" + action = :store_true + + ""--infinite"" + help = ""CG: Prepare an infinite DNA model."" + action = :store_true + + ""--debug"" + help = ""Debug mode"" + action = :store_true + end + + main_args = parse_args(struct_args) + + # ---------------------------- + # Generate atomistic structure + # ---------------------------- + generate_NA_structure(main_args) + + # ----------------------- + # Generate CG top and crd + # ----------------------- + do_cg = get(main_args, ""cg"", false) + if do_cg + aa_mol_name = main_args[""output""] + aa_dna_mol = read_PDB(aa_mol_name*"".pdb"") + + cg_mol_name = aa_mol_name * ""_cg"" + cg_args = main_args + cg_args[""3spn-param""] = 2 + cg_args[""3spn-use-5-phos""] = get(main_args, ""5P"", false) + cg_args[""3spn-circular""] = get(main_args, ""infinite"", false) + force_field = ForceFieldCG(1, 1, 1, 0, 0, 0) + cg_top, cg_conf = coarse_graining(aa_dna_mol, force_field, cg_args) + write_grotop(cg_top, cg_mol_name, cg_args) + write_grocrd(cg_top, cg_conf, cg_mol_name, cg_args) + write_mmCIF(cg_top, cg_conf, cg_mol_name, cg_args) + end +end +","Julia" +"Genesis","noinil/genesis_cg_tool","tools/modeling/DNA_general/analysis_dna.jl",".jl","14594","348","#!/usr/bin/env julia + +using Printf +using ArgParse + +include(""../../../src/lib/gcj.jl"") + +function read_DNA_standard_base(fname, base_type) + if base_type == ""purine"" + # purine: 9 atoms + list_key_atoms = ["" N9 "", "" C8 "", "" N7 "", "" C5 "", "" C6 "", "" N1 "", "" C2 "", "" N3 "", "" C4 ""] + elseif base_type == ""pyrimidine"" + # pyrimidine: 6 atoms + list_key_atoms = ["" N1 "", "" C2 "", "" N3 "", "" C4 "", "" C5 "", "" C6 ""] + end + pdb_atom_names = [] + pdb_coors = [] + for line in eachline(fname) + if startswith(line, ""ATOM"") + atom_name = line[13:16] + if !(atom_name in list_key_atoms) + continue + end + push!(pdb_atom_names, strip(atom_name)) + x = parse(Float64, line[31:38]) + y = parse(Float64, line[39:46]) + z = parse(Float64, line[47:54]) + push!(pdb_coors, [x, y, z]) + end + end + coor_array = zeros(Float64, (3, length(pdb_coors))) + for j in 1:length(pdb_coors) + coor_array[:, j] = pdb_coors[j][:] + end + return [pdb_atom_names, coor_array] +end + +function analyze_NA_structure(args) + + # ---- + # args + # ---- + pdb_name = get(args, ""PDB"", """") + out_pref = get(args, ""output"", ""_DNA_analysis_"") + is_mmCIF = get(args, ""mmCIF"", false) + do_debug = get(args, ""debug"", false) + + # ====================== + # read in standard bases + # ====================== + lib_path = @__DIR__ + std_base_A = read_DNA_standard_base(lib_path * ""/lib/aa_A.pdb"", ""purine"") + std_base_C = read_DNA_standard_base(lib_path * ""/lib/aa_C.pdb"", ""pyrimidine"") + std_base_G = read_DNA_standard_base(lib_path * ""/lib/aa_G.pdb"", ""purine"") + std_base_T = read_DNA_standard_base(lib_path * ""/lib/aa_T.pdb"", ""pyrimidine"") + map_base_atom_coors = Dict(""DA"" => std_base_A[2], + ""DC"" => std_base_C[2], + ""DG"" => std_base_G[2], + ""DT"" => std_base_T[2]) + map_base_atom_names = Dict(""DA"" => std_base_A[1], + ""DC"" => std_base_C[1], + ""DG"" => std_base_G[1], + ""DT"" => std_base_T[1]) + + # ==================== + # prepare output files + # ==================== + of_base_fname = out_pref * ""_base_reference_frames.dat"" + of_bp_fname = out_pref * ""_basepair_reference_frames.dat"" + of_parm_fname = out_pref * ""_structure_parameters.dat"" + if do_debug + of_base_frames = open(of_base_fname, ""w"") + of_bp_frames = open(of_bp_fname, ""w"") + end + of_parameters = open(of_parm_fname, ""w"") + + + ########################################################################### + # Analyze PDB # + ########################################################################### + if is_mmCIF + cif_data = read_mmCIF(pdb_name) + aa_molecule = mmCIF_to_AAMolecule(cif_data) + else + aa_molecule = read_PDB(pdb_name) + end + + # get number of bp + num_residues_chain_A = length(aa_molecule.chains[1].residues) + num_residues_chain_B = length(aa_molecule.chains[2].residues) + if num_residues_chain_A != num_residues_chain_B + println(""Inconsistent number of residues in chains 1 and 2."") + exit() + else + num_bp = num_residues_chain_A + end + + # --------------- + # data structures + # --------------- + bp_frame_orign = zeros(Float64, (3, num_bp)) + bp_frame_basis = zeros(Float64, (3, 3, num_bp)) + bp_names = ["""" for i in 1:num_bp] + + bp_params = zeros(Float64, (6, num_bp)) # base-pair quantities + bs_params = zeros(Float64, (6, num_bp)) # base-step quantities + + # ============================= + # get base and base-pair frames + # ============================= + for ibp in 1:num_bp + i_base1 = aa_molecule.chains[1].residues[ibp] + i_base2 = aa_molecule.chains[2].residues[num_bp - ibp + 1] + resname_base1 = aa_molecule.residues[i_base1].name + resname_base2 = aa_molecule.residues[i_base2].name + bp_names[ibp] = resname_base1[end] * ""-"" * resname_base2[end] + # ------------------- + # get coors of base 1 + # ------------------- + tmp_dict = map_base_atom_names[resname_base1] + num_atom_base1 = length(tmp_dict) + pdb_coor_base1 = zeros(Float64, (3, num_atom_base1)) + for iatm in aa_molecule.residues[i_base1].atoms + atom_name = aa_molecule.atom_names[iatm] + if atom_name in tmp_dict + ii = findfirst(x->x==atom_name, tmp_dict) + pdb_coor_base1[:, ii] = aa_molecule.atom_coors[:, iatm] + end + end + # ------------------- + # get coors of base 2 + # ------------------- + tmp_dict = map_base_atom_names[resname_base2] + num_atom_base2 = length(tmp_dict) + pdb_coor_base2 = zeros(Float64, (3, num_atom_base2)) + for iatm in aa_molecule.residues[i_base2].atoms + atom_name = aa_molecule.atom_names[iatm] + if atom_name in tmp_dict + ii = findfirst(x->x==atom_name, tmp_dict) + pdb_coor_base2[:, ii] = aa_molecule.atom_coors[:, iatm] + end + end + + # --------------- + # superpose bases + # --------------- + base1_fit = compute_superimposition_transformation(map_base_atom_coors[resname_base1], pdb_coor_base1) + base2_fit = compute_superimposition_transformation(map_base_atom_coors[resname_base2], pdb_coor_base2) + + # ------------------------- + # get base reference frames + # ------------------------- + base1_orign = base1_fit.translation + base1_basis = base1_fit.rotation + base2_orign = base2_fit.translation + base2_basis = base2_fit.rotation + if base1_basis[:, 3]' * base2_basis[:, 3] < 0 + base2_basis *= Diagonal([1, -1, -1]); + end + + if do_debug + @printf(of_base_frames, ""=============================================\n"") + @printf(of_base_frames, ""... %5d %s ...\n"", ibp, bp_names[ibp]) + @printf(of_base_frames, ""---------------------------------------------\n"") + @printf(of_base_frames, ""%10.4f%10.4f%10.4f\n"", base1_orign[1], base1_orign[2], base1_orign[3]) + @printf(of_base_frames, ""%10.4f%10.4f%10.4f\n"", base1_basis[1,1], base1_basis[1,2], base1_basis[1,3]) + @printf(of_base_frames, ""%10.4f%10.4f%10.4f\n"", base1_basis[2,1], base1_basis[2,2], base1_basis[2,3]) + @printf(of_base_frames, ""%10.4f%10.4f%10.4f\n"", base1_basis[3,1], base1_basis[3,2], base1_basis[3,3]) + @printf(of_base_frames, ""---------------------------------------------\n"") + @printf(of_base_frames, ""%10.4f%10.4f%10.4f\n"", base2_orign[1], base2_orign[2], base2_orign[3]) + @printf(of_base_frames, ""%10.4f%10.4f%10.4f\n"", base2_basis[1,1], base2_basis[1,2], base2_basis[1,3]) + @printf(of_base_frames, ""%10.4f%10.4f%10.4f\n"", base2_basis[2,1], base2_basis[2,2], base2_basis[2,3]) + @printf(of_base_frames, ""%10.4f%10.4f%10.4f\n"", base2_basis[3,1], base2_basis[3,2], base2_basis[3,3]) + end + + # =================================== + # calculate base-pair reference frame + # =================================== + # base-pair frame origin!!! + bp_frame_orign[:, ibp] = ( base1_orign .+ base2_orign ) .* 0.5 + + # base-pair frame basis!!! + if true # give the correct result as latest 3DNA + base1_z = base1_basis[:, 3] + base2_z = base2_basis[:, 3] + bp_hinge = normalize(cross(base2_z, base1_z)) + bp_BucTwist_angle = acosd(base2_z' * base1_z) + base1_basis_new = rotation_matrix_around_axis(bp_hinge, -bp_BucTwist_angle/2) * base1_basis + base2_basis_new = rotation_matrix_around_axis(bp_hinge, +bp_BucTwist_angle/2) * base2_basis + else # OLD STATEGY: align y-axis + # base1_y = base1_basis[:, 2] + # base2_y = base2_basis[:, 2] + # bp_hinge = normalize(cross(base2_y, base1_y)) + # bp_BucOpen_angle = acosd(base2_y' * base1_y) + # base1_basis_new = rotation_matrix_around_axis(bp_hinge, -bp_BucOpen_angle/2) * base1_basis + # base2_basis_new = rotation_matrix_around_axis(bp_hinge, +bp_BucOpen_angle/2) * base2_basis + end + + bp_x = normalize(base1_basis_new[:, 1] + base2_basis_new[:, 1]) + bp_y = normalize(base1_basis_new[:, 2] + base2_basis_new[:, 2]) + bp_z = normalize(base1_basis_new[:, 3] + base2_basis_new[:, 3]) + + bp_frame_basis[:, :, ibp] = hcat(bp_x, bp_y, bp_z) + + # ---------------------------- + # analyze base pair quantities + # ---------------------------- + if true # gives correct results as 3DNA... + # translation + (tmp_shear, tmp_stretch, tmp_stagger) = (base1_orign - base2_orign)' * bp_frame_basis[:, :, ibp] + # opening + opening_sign = cross(base2_basis_new[:, 2], base1_basis_new[:, 2])' * bp_z > 0 ? 1 : -1 + tmp_opening = acosd(base2_basis_new[:, 1]' * base1_basis_new[:, 1]) * opening_sign + # phase + phase_sign = cross(bp_hinge, bp_y)' * bp_z > 0 ? 1 : -1 + tmp_phase = acosd(bp_y' * bp_hinge) * phase_sign + # twist + tmp_twist = bp_BucTwist_angle * cosd(tmp_phase) + # buckle + tmp_buckle = bp_BucTwist_angle * sind(tmp_phase) + else # OLD STRATEGY... + # twist_sign = cross(base2_basis_new[:, 1], base1_basis_new[:, 1])' + # * bp_y > 0 ? 1 : -1 + # tmp_twist = acosd(base2_basis_new[:, 1]' * base1_basis_new[:, 1]) + # * twist_sign + # phase_sign = cross(bp_hinge, bp_x)' * bp_y > 0 ? 1 : -1 + # tmp_phase = acosd(bp_x' * bp_hinge) * phase_sign + # tmp_buckle = bp_BucOpen_angle * cosd(tmp_phase) + # tmp_opening = bp_BucOpen_angle * sind(tmp_phase) + end + # fill to array of params + bp_params[:, ibp] = [tmp_shear, tmp_stretch, tmp_stagger, tmp_buckle, tmp_twist, tmp_opening] + end + + # ==================== + # get base-step params + # ==================== + for ibp in 2:num_bp + bp1_orign = bp_frame_orign[:, ibp - 1] + bp1_basis = bp_frame_basis[:, :, ibp - 1] + bp2_orign = bp_frame_orign[:, ibp] + bp2_basis = bp_frame_basis[:, :, ibp] + + # bs (base-step) origin is the average of bp1 and bp2 + bs_orign = ( bp1_orign + bp2_orign ) .* 0.5 + + # bp orientation is a bit complicated... + bp1_z = bp1_basis[:, 3] + bp2_z = bp2_basis[:, 3] + bs_hinge = normalize(cross(bp1_z, bp2_z)) + bs_RollTilt_angle = acosd(bp1_z' * bp2_z) + bp1_basis_new = rotation_matrix_around_axis(bs_hinge, +bs_RollTilt_angle/2) * bp1_basis + bp2_basis_new = rotation_matrix_around_axis(bs_hinge, -bs_RollTilt_angle/2) * bp2_basis + bs_x = normalize(bp1_basis_new[:, 1] + bp2_basis_new[:, 1]) + bs_y = normalize(bp1_basis_new[:, 2] + bp2_basis_new[:, 2]) + bs_z = normalize(bp1_basis_new[:, 3] + bp2_basis_new[:, 3]) + + bs_frame_basis = hcat(bs_x, bs_y, bs_z) + + # ---------------------------- + # analyze base-step quantities + # ---------------------------- + # translation + (tmp_shift, tmp_slide, tmp_rise) = (bp2_orign - bp1_orign)' * bs_frame_basis + # twist + twist_sign = cross(bp1_basis_new[:, 2], bp2_basis_new[:, 2])' * bs_z > 0 ? 1 : -1 + tmp_twist = acosd(bp1_basis_new[:, 1]' * bp2_basis_new[:, 1]) * twist_sign + # phase + phase_sign = cross(bs_hinge, bs_y)' * bs_z > 0 ? 1 : -1 + tmp_phase = acosd(bs_y' * bs_hinge) * phase_sign + # roll + tmp_roll = bs_RollTilt_angle * cosd(tmp_phase) + # tilt + tmp_tilt = bs_RollTilt_angle * sind(tmp_phase) + # fill to array of params + bs_params[:, ibp] = [tmp_shift, tmp_slide, tmp_rise, tmp_tilt, tmp_roll, tmp_twist] + end + + + # --------------------------- + # output the reference frames + # --------------------------- + if do_debug + for ibp in 1:num_bp + @printf(of_bp_frames, ""... %5d %s ...\n"", ibp, bp_names[ibp]) + @printf(of_bp_frames, ""%10.4f%10.4f%10.4f\n"", bp_frame_orign[1,ibp], bp_frame_orign[2,ibp], bp_frame_orign[3,ibp]) + @printf(of_bp_frames, ""%10.4f%10.4f%10.4f\n"", bp_frame_basis[1,1,ibp], bp_frame_basis[1,2,ibp], bp_frame_basis[1,3,ibp]) + @printf(of_bp_frames, ""%10.4f%10.4f%10.4f\n"", bp_frame_basis[2,1,ibp], bp_frame_basis[2,2,ibp], bp_frame_basis[2,3,ibp]) + @printf(of_bp_frames, ""%10.4f%10.4f%10.4f\n"", bp_frame_basis[3,1,ibp], bp_frame_basis[3,2,ibp], bp_frame_basis[3,3,ibp]) + end + end + + + # ------------------------------- + # output the structure quantities + # ------------------------------- + @printf(of_parameters, ""%3s %10s%10s%10s%10s%10s%10s%10s%10s%10s%10s%10s%10s\n"", + ""# "", + ""Shear"", ""Stretch"", ""Stagger"", ""Buckle"", ""Prop-Tw"", ""Opening"", + ""Shift"", ""Slide"", ""Rise"", ""Tilt"", ""Roll"", ""Twist"") + for ibp in 1:num_bp + bp_shear, bp_stretch, bp_stagger, bp_buckle, bp_twist, bp_opening = bp_params[:, ibp] + bs_shift, bs_slide, bs_rise, bs_tilt, bs_roll, bs_twist = bs_params[:, ibp] + @printf(of_parameters, ""%3s %10.3f%10.3f%10.3f%10.3f%10.3f%10.3f%10.3f%10.3f%10.3f%10.3f%10.3f%10.3f\n"", + bp_names[ibp], + bp_shear, bp_stretch, bp_stagger, bp_buckle, bp_twist, bp_opening, + bs_shift, bs_slide, bs_rise, bs_tilt, bs_roll, bs_twist) + end + + + # ============== + # ending work... + # ============== + if do_debug + close(of_base_frames) + close(of_bp_frames) + end + close(of_parameters) + +end + +if abspath(PROGRAM_FILE) == @__FILE__ + args = ArgParseSettings() + + @add_arg_table args begin + + ""PDB"" + help = ""PDB file of DNA structures."" + required = true + arg_type = String + + ""--mmCIF"" + help = ""PDB file of DNA structures."" + action = :store_true + + ""--output"", ""-o"" + help = ""Output name prefix."" + arg_type = String + default = ""_DNA_analysis_"" + + ""--debug"" + help = ""DEBUG."" + action = :store_true + end + + analyze_NA_structure(parse_args(args)) +end +","Julia" +"Genesis","noinil/genesis_cg_tool","tools/fileformat_conversion/pdb_2_gro.jl",".jl","2605","106","#!/usr/bin/env julia + +using Printf +using ArgParse + +include(""../../src/lib/constants.jl"") +include(""../../src/lib/topology.jl"") +include(""../../src/lib/molecule.jl"") +include(""../../src/lib/conformation.jl"") +include(""../../src/lib/parser_top.jl"") +include(""../../src/lib/parser_crd.jl"") +include(""../../src/lib/parser_pdb.jl"") + +function main(args) + + verbose = get(args, ""verbose"", false) + + top_filename = get(args, ""top"", """") + pdb_filename = get(args, ""pdb"", """") + crd_filename = get(args, ""output"", """") + pdb_no_ter = get(args, ""pdb-noTER"", false) + + # ============= + # Read topology + # ============= + mytop = read_grotop(top_filename) + + # ========================= + # Read coordinates from PDB + # ========================= + if pdb_no_ter + aa_coor = zeros(Float64, (3, mytop.num_atom)) + i_atom = 0 + for line in eachline(pdb_filename) + if startswith(line, ""ATOM"") + i_atom += 1 + new_atom_data = parse_PDB_line(rpad(line, 80)) + aa_coor[1, i_atom] = new_atom_data.coor_x + aa_coor[2, i_atom] = new_atom_data.coor_y + aa_coor[3, i_atom] = new_atom_data.coor_z + end + end + new_conf = Conformation(mytop.num_atom, aa_coor) + else + new_molecule = read_PDB(pdb_filename) + num_atoms = length(new_molecule.atom_coors[1, :]) + new_conf = Conformation(num_atoms, new_molecule.atom_coors) + end + + if length(crd_filename) == 0 + system_name = top_filename[1:end-4] + else + system_name = crd_filename[1:end-4] + end + + write_grocrd(mytop, new_conf, system_name, args) + + if verbose + println(""> converting from *.gro to *.pdb : DONE!"") + end +end + +# ============================= +# Parsing Commandline Arguments +# ============================= +function parse_commandline() + s = ArgParseSettings() + + @add_arg_table s begin + + ""--top"", ""-t"" + help = ""Topology file name (gromacs style)."" + required = true + arg_type = String + + ""--pdb"", ""-p"" + help = ""PDB file name."" + required = true + arg_type = String + + ""--pdb-noTER"" + help = ""PDB file does not have TER lines."" + action = :store_true + + ""--output"", ""-o"" + help = ""Output file name."" + arg_type = String + default = """" + + ""--debug"" + help = ""DEBUG."" + action = :store_true + end + + return parse_args(s) +end + + +if abspath(PROGRAM_FILE) == @__FILE__ + + args = parse_commandline() + + main(args) + +end +","Julia" +"Genesis","noinil/genesis_cg_tool","tools/fileformat_conversion/gro_2_pdb.jl",".jl","2267","103","#!/usr/bin/env julia + +using Printf +using ArgParse + +include(""../../src/lib/constants.jl"") +include(""../../src/lib/topology.jl"") +include(""../../src/lib/conformation.jl"") +include(""../../src/lib/parser_top.jl"") +include(""../../src/lib/parser_crd.jl"") +include(""../../src/lib/parser_pdb.jl"") +include(""../../src/lib/parser_cif.jl"") + +function main(args) + + verbose = get(args, ""verbose"", false) + + top_filename = get(args, ""top"", """") + crd_filename = get(args, ""crd"", """") + pdb_filename = get(args, ""output"", """") + out_mmcif = get(args, ""mmCIF"", false) + + # ================================ + # Read in topology and coordinates + # ================================ + + mytop = read_grotop(top_filename) + mycrd = read_grocrd(crd_filename) + + # ================= + # Write to PDB file + # ================= + + if length(pdb_filename) == 0 + system_name = crd_filename[1:end-4] * ""_new"" + else + system_name = pdb_filename[1:end-4] + end + + if out_mmcif + write_mmCIF(mytop, mycrd, system_name, args) + else + write_pdb(mytop, mycrd, system_name, args) + end + + if verbose + println(""> converting from *.gro to *.pdb : DONE!"") + end +end + +# ============================= +# Parsing Commandline Arguments +# ============================= +function parse_commandline() + s = ArgParseSettings() + + @add_arg_table s begin + + ""--top"", ""-t"" + help = ""Topology file name (gromacs style)."" + required = true + arg_type = String + + ""--crd"", ""-c"" + help = ""Coordinate file name (gromacs style)."" + required = true + arg_type = String + + ""--output"", ""-o"" + help = ""Output PDB file name."" + arg_type = String + default = """" + + ""--cgconnect"" + help = ""Output CONECTs in CG PDB."" + action = :store_true + + ""--mmCIF"", ""-M"" + help = ""Output as mmCIF/PDBx."" + action = :store_true + + ""--verbose"" + help = ""Output more information."" + action = :store_true + + ""--debug"" + help = ""DEBUG."" + action = :store_true + end + + return parse_args(s) +end + + + +if abspath(PROGRAM_FILE) == @__FILE__ + + args = parse_commandline() + + main(args) + +end +","Julia" +"Genesis","noinil/genesis_cg_tool","src/aa_2_cg.jl",".jl","10111","356","#!/usr/bin/env julia + +############################################################################### +# README +# +# This program read PDB structures and prepare toppology and coordinate files +# for CG MD simulations in Genesis. +# +# PDB format: +# 1. Atoms startswith ""ATOM "" +# 2. Chains should end with ""TER"" and have different IDs +# +# Unit in the script: kcal/mol, Å +# Unit for output: kJ/mol, nm +############################################################################### + +using Printf +using ArgParse + +include(""./lib/biomath.jl"") +include(""./lib/constants.jl"") +include(""./lib/molecule.jl"") +include(""./lib/topology.jl"") +include(""./lib/selection.jl"") +include(""./lib/conformation.jl"") +include(""./lib/coarse_graining_subroutines.jl"") +include(""./lib/parsers.jl"") +include(""./lib/coarse_graining.jl"") + + +function aa_2_cg(args) + + # ----------------- + # Parsing arguments + # ----------------- + verbose = get(args, ""verbose"", false) + pdb_name = get(args, ""pdb"", """") + is_mmCIF = get(args, ""mmCIF"", false) + gen_pwmcos_itp = get(args, ""pwmcos"", false) + gen_pwmcos_ns_itp = get(args, ""pwmcos-ns"", false) + do_output_psf = get(args, ""psf"", false) + do_output_cgpdb = get(args, ""cgpdb"", false) + do_output_sequence = get(args, ""show-sequence"", false) + ff_protein_name = get(args, ""force-field-protein"", ""AICG2+"") + ff_DNA_name = get(args, ""force-field-DNA"", ""3SPN.2C"") + ff_RNA_name = get(args, ""force-field-RNA"", ""HT"") + + # --------------- + # Set force field + # --------------- + if haskey(FF_PRO_DICT, ff_protein_name) + ff_pro = FF_PRO_DICT[ff_protein_name] + else + error(""Wrong force field for protein."") + end + if haskey(FF_DNA_DICT, ff_DNA_name) + ff_dna = FF_DNA_DICT[ff_DNA_name] + else + error(""Wrong force field for DNA."") + end + if haskey(FF_RNA_DICT, ff_RNA_name) + ff_rna = FF_RNA_DICT[ff_RNA_name] + else + error(""Wrong force field for RNA."") + end + + if gen_pwmcos_itp + ff_pro_dna = FF_PWMcos + elseif gen_pwmcos_ns_itp + ff_pro_dna = FF_PWMcos_ns + elseif get(args, ""protein-DNA-Go"", false) + ff_pro_dna = FF_pro_DNA_Go + else + ff_pro_dna = FF_UNKNOWN + end + ff_pro_rna = FF_pro_RNA_Go + ff_dna_rna = FF_UNKNOWN + + force_field = ForceFieldCG(ff_pro, ff_dna, ff_rna, ff_pro_dna, ff_pro_rna, ff_dna_rna) + + # ----------------------------------- + # FF modeling options from .toml file + # ----------------------------------- + toml_filename = get(args, ""config"", """") + if length(toml_filename) > 0 + new_toml_config = read_TOML(toml_filename) + args[""modeling-options""] = new_toml_config + end + + + ########################################################################### + # Core Functions # + ########################################################################### + + mol_name = get(args, ""output-name"", """") + if length(mol_name) == 0 + mol_name = pdb_name[1:end-4] * ""_cg"" + end + + # -------- + # Read PDB + # -------- + if verbose + println(""============================================================"") + println(""> Open PDB file:"") + end + + if is_mmCIF + cif_data = read_mmCIF(pdb_name) + aa_molecule = mmCIF_to_AAMolecule(cif_data) + else + aa_molecule = read_PDB(pdb_name) + end + + aa_num_atom = length(aa_molecule.atom_names) + aa_num_residue = length(aa_molecule.residues) + aa_num_chain = length(aa_molecule.chains) + + if verbose + println("" > Number of atoms : $(aa_num_atom)"") + println("" > Number of residues : $(aa_num_residue)"") + println("" > Number of chains : $(aa_num_chain)"") + end + + if do_output_sequence + write_sequence(aa_molecule, mol_name) + + if verbose + println(""------------------------------------------------------------"") + println("" DONE!  "") + println(""============================================================"") + end + + return 0 + end + + # ----------------------------------- + # Make a CG topology from AA molecule + # ----------------------------------- + cg_top, cg_conf = coarse_graining(aa_molecule, force_field, args) + + rg_all = radius_of_gyration(cg_conf) + rc_all = radius_of_circumshpere(cg_conf) + + ########################################################################### + # Output CG Topology # + ########################################################################### + + if gen_pwmcos_itp || gen_pwmcos_ns_itp + do_output_top = false + do_output_gro = false + do_output_pwmcos = true + else + do_output_top = true + do_output_gro = true + do_output_pwmcos = false + end + + if verbose + println(""============================================================"") + println(""> Output CG .itp and .gro files."") + end + + if do_output_top + write_grotop(cg_top, mol_name, args) + end + if do_output_pwmcos + write_grotop_pwmcos(cg_top, mol_name, args) + end + + if do_output_gro + write_grocrd(cg_top, cg_conf, mol_name, args) + end + + if do_output_psf + write_psf(cg_top, mol_name, args) + end + + if do_output_cgpdb + write_pdb(cg_top, cg_conf, mol_name, args) + end + + if verbose + println(""------------------------------------------------------------"") + println(""------------------------------------------------------------"") + println(""> FINISH!  "") + println("" Please check the .itp and .gro files."") + println(""============================================================"") + end + +end + +# ============================= +# Parsing Commandline Arguments +# ============================= +function parse_commandline() + s = ArgParseSettings() + + @add_arg_table s begin + + ""pdb"" + help = ""PDB file name."" + required = true + arg_type = String + + ""--force-field-protein"" + help = ""Force field for protein. (AICG2+, Clementi)"" + arg_type = String + default = ""AICG2+"" + + ""--force-field-DNA"" + help = ""Force field for DNA. (3SPN.2C)"" + arg_type = String + default = ""3SPN.2C"" + + ""--force-field-RNA"" + help = ""Force field for RNA. (HT)"" + arg_type = String + default = ""HT"" + + ""--config"", ""-f"" + help = ""Force field configuration details."" + arg_type = String + default = """" + + ""--CCGO-contact-scale"" + help = ""Scaling native contact interaction coefficient."" + arg_type = Float64 + default = 1.0 + + ""--respac"", ""-c"" + help = ""RESPAC protein charge distribution data file."" + arg_type = String + default = """" + + ""--aicg-scale"" + help = ""Scale AICG2+ local interactions: 0) average; 1) general (default)."" + arg_type = Int + default = 1 + + ""--use-safe-dihedral"" + help = ""Safe dih potential: 0) do nothing; 1) cos^2(kθ) type (default); 2) remove dih w/ large angles; 3) sin^3(kθ) type."" + arg_type = Int + default = 1 + + ""--3spn-param"" + help = ""Generate 3SPN.2C parameters from atomic PDB structure or standard library? 0) no params; 1) PDB structure based params; 2) standard DNA params."" + arg_type = Int + default = 0 + + ""--3spn-use-5-phos"" + help = ""Generate 3SPN.2C parameters with 5-phosphate."" + action = :store_true + + ""--protein-DNA-Go"" + help = ""Generate parameters for protein-DNA Go-like contact interactions."" + action = :store_true + + ""--pwmcos"" + help = ""Generate parameters for protein-DNA sequence-specific interactions."" + action = :store_true + + ""--pwmcos-scale"" + help = ""Energy scaling factor for PWMcos."" + arg_type = Float64 + default = 1.0 + + ""--pwmcos-shift"" + help = ""Energy shifting factor for PWMcos."" + arg_type = Float64 + default = 0.0 + + ""--pwmcos-ns"" + help = ""Generate parameters for protein-DNA sequence-NON-specific interactions (hydrogen-bond)."" + action = :store_true + + ""--pwmcos-ns-ene"" + help = ""Interaction strength for PWMcos-ns (hydrogen-bond)."" + arg_type = Float64 + default = -1.0 + + ""--psf"" + help = ""Prepare PSF file."" + action = :store_true + + ""--cgpdb"" + help = ""Prepare CG PDB file."" + action = :store_true + + ""--cgconnect"" + help = ""Prepare CG PDB file with CONECTed bonds."" + action = :store_true + + ""--cgRNA-phosphate-Go"" + help = ""Include phosphate in Go-type contact interactions."" + action = :store_true + + ""--pfm"", ""-p"" + help = ""Position frequency matrix file for protein-DNA sequence-specific interactions."" + arg_type = String + default = """" + + ""--test-local-only"" + help = ""TEST: only generate local interaction parameters."" + action = :store_true + + ""--patch"" + help = ""Append (apply patch) to .itp file."" + arg_type = String + default = """" + + ""--show-sequence"" + help = ""Show sequence of molecules in PDB."" + action = :store_true + + ""--mmCIF"" + help = ""Use mmCIF format PDB file as input."" + action = :store_true + + ""--output-name"" + help = ""Specify the system name for output."" + arg_type = String + default = """" + + ""--verbose"", ""-v"" + help = ""Output more information."" + action = :store_true + + ""--log"" + help = ""Output information to log file."" + action = :store_true + + ""--debug"" + help = ""DEBUG."" + action = :store_true + end + + return parse_args(s) +end + +# ==== +# Main +# ==== + +function main() + + args = parse_commandline() + + aa_2_cg(args) + +end + +if abspath(PROGRAM_FILE) == @__FILE__ + main() +end +","Julia" +"Genesis","noinil/genesis_cg_tool","src/lib/coarse_graining_subroutines.jl",".jl","24524","576","############################################################################### +# Functions related to CG models # +############################################################################### + + +############################################################################### +# ____ _ _ # +# | _ \ _ __ ___ | |_ ___(_)_ __ # +# | |_) | '__/ _ \| __/ _ \ | '_ \ # +# | __/| | | (_) | || __/ | | | | # +# |_| |_| \___/ \__\___|_|_| |_| # +# # +############################################################################### + +# ------------------------------ +# General Protein Native Contact +# ------------------------------ + +function is_protein_native_contact(resid1_atoms::Vector{Int}, resid2_atoms::Vector{Int}, atom_names::Vector{String}, atom_coors::Array{<:Real, 2}) + for i in resid1_atoms + atom_name_1 = atom_names[i] + if atom_name_1[1] == 'H' + continue + end + coor_1 = atom_coors[:, i] + for j in resid2_atoms + atom_name_2 = atom_names[j] + if atom_name_2[1] == 'H' + continue + end + coor_2 = atom_coors[:, j] + dist_12 = compute_distance(coor_1, coor_2) + if dist_12 < AICG_GO_ATOMIC_CUTOFF + return true + end + end + end + return false +end + + +# -------------------- +# AICG2+ Protein Model +# -------------------- + +function is_protein_backbone(atom_name::String) + if in(atom_name, (""N"", ""C"", ""O"", ""OXT"", ""CA"")) + return true + end + return false +end + +function is_protein_hb_donor(atom_name::String, res_name::String) + if atom_name[1] == 'N' + return true + elseif atom_name[1] == 'S' && res_name == ""CYS"" + return true + elseif atom_name[1] == 'O' + if ( res_name == ""SER"" && atom_name == ""OG"" ) || + ( res_name == ""THR"" && atom_name == ""OG1"" ) || + ( res_name == ""TYR"" && atom_name == ""OH"" ) + return true + end + end + return false +end + +function is_protein_hb_acceptor(atom_name::String) + if atom_name[1] == 'O' || atom_name[1] == 'S' + return true + end + return false +end + +function is_protein_cation(atom_name::String, res_name::String) + if atom_name[1] == 'N' + if ( res_name == ""ARG"" && atom_name == ""NH1"" ) || + ( res_name == ""ARG"" && atom_name == ""NH2"" ) || + ( res_name == ""LYS"" && atom_name == ""NZ"" ) + return true + end + end + return false +end + +function is_protein_anion(atom_name::String, res_name::String) + if atom_name[1] == 'O' + if ( res_name == ""GLU"" && atom_name == ""OE1"" ) || + ( res_name == ""GLU"" && atom_name == ""OE2"" ) || + ( res_name == ""ASP"" && atom_name == ""OD1"" ) || + ( res_name == ""ASP"" && atom_name == ""OD2"" ) + return true + end + end + return false +end + +function is_protein_hb_pair(atom_name_1::String, res_name_1::String, atom_name_2::String, res_name_2::String) + if is_protein_hb_acceptor(atom_name_1) && + is_protein_hb_donor(atom_name_2, res_name_2) + return true + elseif is_protein_hb_acceptor(atom_name_2) && + is_protein_hb_donor(atom_name_1, res_name_1) + return true + end + return false +end + +function is_protein_sb_pair(atom_name_1::String, res_name_1::String, atom_name_2::String, res_name_2::String) + if is_protein_cation(atom_name_1, res_name_1) && + is_protein_anion(atom_name_2, res_name_2) + return true + elseif is_protein_cation(atom_name_2, res_name_2) && + is_protein_anion(atom_name_1, res_name_1) + return true + end + return false +end + +function is_protein_nonsb_charge_pair(atom_name_1::String, res_name_1::String, atom_name_2::String, res_name_2::String) + if is_protein_cation(atom_name_1, res_name_1) || + is_protein_anion(atom_name_1, res_name_1) || + is_protein_cation(atom_name_2, res_name_2) || + is_protein_anion(atom_name_2, res_name_2) + return true + end + return false +end + +function count_aicg_atomic_contact(resid1_atoms::Vector{Int}, resid2_atoms::Vector{Int}, res_name_1::String, res_name_2::String, atom_names::Vector{String}, atom_coors::Array{<:Real, 2}) + contact_count = zeros(Int, 17) + contact_count[AICG_ITYPE_OFFST] = 1 + num_short_range_contact = 0 + for i in resid1_atoms + atom_name_1 = atom_names[i] + if atom_name_1[1] == 'H' + continue + end + coor_1 = atom_coors[:, i] + for j in resid2_atoms + atom_name_2 = atom_names[j] + if atom_name_2[1] == 'H' + continue + end + coor_2 = atom_coors[:, j] + dist_12 = compute_distance(coor_1, coor_2) + + is_hb = is_protein_hb_pair(atom_name_1, res_name_1, atom_name_2, res_name_2) + is_sb = is_protein_sb_pair(atom_name_1, res_name_1, atom_name_2, res_name_2) + is_nonsb_charge = is_protein_nonsb_charge_pair(atom_name_1, res_name_1, atom_name_2, res_name_2) + is_1_backbone = is_protein_backbone(atom_name_1) + is_2_backbone = is_protein_backbone(atom_name_2) + if dist_12 < AICG_GO_ATOMIC_CUTOFF + contact_count[AICG_ITYPE_LR_CT] += 1 + end + if dist_12 < AICG_ATOMIC_CUTOFF + num_short_range_contact += 1 + if is_1_backbone && is_2_backbone + if is_hb + if dist_12 < AICG_HYDROGEN_BOND_CUTOFF + contact_count[AICG_ITYPE_BB_HB] += 1 + else + contact_count[AICG_ITYPE_BB_DA] += 1 + end + elseif atom_name_1[1] == 'C' || atom_name_2[1] == 'C' + contact_count[AICG_ITYPE_BB_CX] += 1 + else + contact_count[AICG_ITYPE_BB_XX] += 1 + end + elseif ( !is_1_backbone ) && ( !is_2_backbone ) + if is_hb + if is_sb + if dist_12 < AICG_SALT_BRIDGE_CUTOFF + contact_count[AICG_ITYPE_SS_SB] += 1 + else + contact_count[AICG_ITYPE_SS_QX] += 1 + end + elseif dist_12 < AICG_HYDROGEN_BOND_CUTOFF + contact_count[AICG_ITYPE_SS_HB] += 1 + elseif is_nonsb_charge + contact_count[AICG_ITYPE_SS_QX] += 1 + else + contact_count[AICG_ITYPE_SS_DA] += 1 + end + elseif is_nonsb_charge + contact_count[AICG_ITYPE_SS_QX] += 1 + elseif atom_name_1[1] == 'C' || atom_name_2[1] == 'C' + contact_count[AICG_ITYPE_SS_CX] += 1 + else + contact_count[AICG_ITYPE_SS_XX] += 1 + end + elseif ( is_1_backbone && ( !is_2_backbone ) ) || + ( is_2_backbone && ( !is_1_backbone ) ) + if is_hb + if dist_12 < AICG_HYDROGEN_BOND_CUTOFF + contact_count[AICG_ITYPE_SB_HB] += 1 + elseif is_nonsb_charge + contact_count[AICG_ITYPE_SB_QX] += 1 + else + contact_count[AICG_ITYPE_SB_DA] += 1 + end + elseif is_nonsb_charge + contact_count[AICG_ITYPE_SB_QX] += 1 + elseif atom_name_1[1] == 'C' || atom_name_2[1] == 'C' + contact_count[AICG_ITYPE_SB_CX] += 1 + else + contact_count[AICG_ITYPE_SB_XX] += 1 + end + end + end + end + end + + # control the number of long-range contacts + if AICG_GO_ATOMIC_CUTOFF > AICG_ATOMIC_CUTOFF + contact_count[AICG_ITYPE_LR_CT] -= num_short_range_contact + else + contact_count[AICG_ITYPE_LR_CT] = 0 + end + + # control the number of salty bridge + if contact_count[AICG_ITYPE_SS_SB] >= 2 + contact_count[AICG_ITYPE_SS_QX] += contact_count[AICG_ITYPE_SS_SB] - 1 + contact_count[AICG_ITYPE_SS_SB] = 1 + end + + return contact_count +end + + +############################################################################### +# ____ _ _ _ # +# | _ \ | \ | | / \ # +# | | | || \| | / _ \ # +# | |_| || |\ | / ___ \ # +# |____/ |_| \_|/_/ \_\ # +# # +############################################################################### + +# ----------------- +# 3SPN.2C DNA model +# ----------------- + +function get_DNA3SPN_bond_length(bond_type::String, base_step::String) + # Sugar-Base + SB_length = Dict(""A "" => 4.864, ""C "" => 4.300, ""G "" => 4.973, ""T "" => 4.379) + # Sugar-Phosphate + SP_length = Dict( + ""AA"" => 3.688, ""AC"" => 3.018, ""AG"" => 3.836, ""AT"" => 3.287, + ""CA"" => 4.386, ""CC"" => 3.538, ""CG"" => 4.676, ""CT"" => 3.999, + ""GA"" => 3.736, ""GC"" => 3.256, ""GG"" => 3.633, ""GT"" => 3.285, + ""TA"" => 4.191, ""TC"" => 3.707, ""TG"" => 4.391, ""TT"" => 3.868 + ) + # Phosphate-Sugar + PS_length = Dict( + ""XA"" => 3.747, ""XC"" => 3.725, ""XG"" => 3.723, ""XT"" => 3.758, + ""AA"" => 3.745, ""AC"" => 3.704, ""AG"" => 3.725, ""AT"" => 3.729, + ""CA"" => 3.753, ""CC"" => 3.786, ""CG"" => 3.686, ""CT"" => 3.784, + ""GA"" => 3.740, ""GC"" => 3.700, ""GG"" => 3.766, ""GT"" => 3.760, + ""TA"" => 3.751, ""TC"" => 3.710, ""TG"" => 3.716, ""TT"" => 3.759 + ) + bond_length_data = Dict(""SB""=>SB_length, ""SP""=>SP_length, ""PS""=>PS_length) + + return bond_length_data[bond_type][base_step] +end + +function get_DNA3SPN_angle_equilibrium(angle_type::String, base_step::String) + # Base-Sugar-Phosphate + BSP_params = Dict( + ""AA"" => 113.855, ""AC"" => 114.226, ""AG"" => 112.201, ""AT"" => 111.931, + ""CA"" => 113.822, ""CC"" => 112.056, ""CG"" => 116.081, ""CT"" => 111.008, + ""GA"" => 114.665, ""GC"" => 118.269, ""GG"" => 110.102, ""GT"" => 111.146, + ""TA"" => 113.984, ""TC"" => 115.457, ""TG"" => 113.397, ""TT"" => 113.606 + ) + # Phosphate-Sugar-Base + PSB_params = Dict( + ""XA"" => 108.200, ""XC"" => 103.850, ""XG"" => 111.750, ""XT"" => 98.523, + ""AA"" => 108.826, ""AC"" => 105.066, ""AG"" => 112.796, ""AT"" => 99.442, + ""CA"" => 107.531, ""CC"" => 103.509, ""CG"" => 110.594, ""CT"" => 97.807, + ""GA"" => 108.064, ""GC"" => 103.135, ""GG"" => 112.654, ""GT"" => 98.577, + ""TA"" => 108.414, ""TC"" => 103.853, ""TG"" => 111.732, ""TT"" => 98.271 + ) + # Phosphate-Sugar-Phosphate + PSP_params = Dict( + # TODO: currently using ""X"" as ""A"", should be changed to average??? + ""XAA"" => 120.685, ""XAC"" => 112.882, ""XAG"" => 113.827, ""XAT"" => 117.435, + ""XCA"" => 119.061, ""XCC"" => 120.353, ""XCG"" => 113.240, ""XCT"" => 121.103, + ""XGA"" => 122.182, ""XGC"" => 118.658, ""XGG"" => 120.489, ""XGT"" => 122.928, + ""XTA"" => 117.235, ""XTC"" => 112.084, ""XTG"" => 111.714, ""XTT"" => 119.324, + ""AAA"" => 120.685, ""AAC"" => 112.882, ""AAG"" => 113.827, ""AAT"" => 117.435, + ""ACA"" => 119.061, ""ACC"" => 120.353, ""ACG"" => 113.240, ""ACT"" => 121.103, + ""AGA"" => 122.182, ""AGC"" => 118.658, ""AGG"" => 120.489, ""AGT"" => 122.928, + ""ATA"" => 117.235, ""ATC"" => 112.084, ""ATG"" => 111.714, ""ATT"" => 119.324, + ""CAA"" => 122.866, ""CAC"" => 115.083, ""CAG"" => 116.036, ""CAT"" => 119.640, + ""CCA"" => 120.442, ""CCC"" => 121.712, ""CCG"" => 114.602, ""CCT"" => 122.446, + ""CGA"" => 124.721, ""CGC"" => 121.204, ""CGG"" => 122.937, ""CGT"" => 125.429, + ""CTA"" => 119.317, ""CTC"" => 114.156, ""CTG"" => 113.756, ""CTT"" => 121.413, + ""GAA"" => 120.809, ""GAC"" => 112.897, ""GAG"" => 113.816, ""GAT"" => 117.461, + ""GCA"" => 119.550, ""GCC"" => 120.788, ""GCG"" => 113.687, ""GCT"" => 121.506, + ""GGA"" => 121.512, ""GGC"" => 118.019, ""GGG"" => 119.634, ""GGT"" => 122.157, + ""GTA"" => 117.087, ""GTC"" => 111.922, ""GTG"" => 111.501, ""GTT"" => 119.185, + ""TAA"" => 122.361, ""TAC"" => 114.671, ""TAG"" => 115.653, ""TAT"" => 119.219, + ""TCA"" => 121.235, ""TCC"" => 122.532, ""TCG"" => 115.417, ""TCT"" => 123.284, + ""TGA"" => 123.936, ""TGC"" => 120.395, ""TGG"" => 122.319, ""TGT"" => 124.730, + ""TTA"" => 119.004, ""TTC"" => 113.847, ""TTG"" => 113.465, ""TTT"" => 121.093 + ) + # Sugar-Phosphate-Sugar + SPS_params = Dict( + ""AA"" => 94.805, ""AC"" => 94.462, ""AG"" => 95.308, ""AT"" => 95.232, + ""CA"" => 95.110, ""CC"" => 98.906, ""CG"" => 92.244, ""CT"" => 97.476, + ""GA"" => 94.973, ""GC"" => 92.666, ""GG"" => 97.929, ""GT"" => 97.640, + ""TA"" => 94.886, ""TC"" => 93.066, ""TG"" => 93.999, ""TT"" => 95.122 + ) + angle_data = Dict( + ""BSP"" => BSP_params, + ""PSB"" => PSB_params, + ""PSP"" => PSP_params, + ""SPS"" => SPS_params + ) + + return angle_data[angle_type][base_step] +end + +function get_DNA3SPN_angle_param(angle_type::String, base_step::String) + # Base-Sugar-Phosphate + BSP_params = Dict( + ""AA"" => 460, ""AT"" => 370, ""AC"" => 442, ""AG"" => 358, + ""TA"" => 120, ""TT"" => 460, ""TC"" => 383, ""TG"" => 206, + ""CA"" => 206, ""CT"" => 358, ""CC"" => 278, ""CG"" => 278, + ""GA"" => 383, ""GT"" => 442, ""GC"" => 336, ""GG"" => 278 + ) + # Phosphate-Sugar-Base + PSB_params = Dict( + ""XA"" => 292, ""XT"" => 407, ""XC"" => 359, ""XG"" => 280, + ""AA"" => 460, ""TA"" => 120, ""CA"" => 206, ""GA"" => 383, + ""AT"" => 370, ""TT"" => 460, ""CT"" => 358, ""GT"" => 442, + ""AC"" => 442, ""TC"" => 383, ""CC"" => 278, ""GC"" => 336, + ""AG"" => 358, ""TG"" => 206, ""CG"" => 278, ""GG"" => 278 + ) + # Phosphate-Sugar-Phosphate + PSP_params = Dict( + ""all"" => 300 + ) + # Sugar-Phosphate-Sugar + SPS_params = Dict( + ""AA"" => 355, ""AT"" => 147, ""AC"" => 464, ""AG"" => 368, + ""TA"" => 230, ""TT"" => 355, ""TC"" => 442, ""TG"" => 273, + ""CA"" => 273, ""CT"" => 368, ""CC"" => 165, ""CG"" => 478, + ""GA"" => 442, ""GT"" => 464, ""GC"" => 228, ""GG"" => 165 + ) + angle_params = Dict( + ""BSP"" => BSP_params, + ""PSB"" => PSB_params, + ""PSP"" => PSP_params, + ""SPS"" => SPS_params + ) + + return angle_params[angle_type][base_step] * JOU2CAL +end + +function get_DNA3SPN_dihedral_equilibrium(angle_type::String, base_step::String) + # Base-Sugar-Phosphate-Sugar + BSPS_params = Dict( + ""AA"" => -23.347, ""AC"" => -27.858, ""AG"" => -27.117, ""AT"" => -29.246, + ""CA"" => -31.608, ""CC"" => -31.364, ""CG"" => -34.383, ""CT"" => -33.819, + ""GA"" => -16.641, ""GC"" => -17.077, ""GG"" => -20.529, ""GT"" => -21.472, + ""TA"" => -36.960, ""TC"" => -39.034, ""TG"" => -39.283, ""TT"" => -38.799 + ) + # Sugar-Phosphate-Sugar-Base + SPSB_params = Dict( + ""AA"" => 45.425, ""AC"" => 54.789, ""AG"" => 46.984, ""AT"" => 57.208, + ""CA"" => 45.195, ""CC"" => 49.771, ""CG"" => 44.547, ""CT"" => 53.367, + ""GA"" => 41.089, ""GC"" => 45.515, ""GG"" => 43.923, ""GT"" => 51.560, + ""TA"" => 47.078, ""TC"" => 52.838, ""TG"" => 46.053, ""TT"" => 54.408 + ) + # Sugar-Phosphate-Sugar-Phosphate + SPSP_params = Dict( + ""AAA"" => 179.785, ""AAC"" => 173.331, ""AAG"" => 171.377, ""AAT"" => 173.860, + ""ACA"" => -176.300, ""ACC"" => -177.745, ""ACG"" => -177.543, ""ACT"" => -178.626, + ""AGA"" => -169.949, ""AGC"" => -168.414, ""AGG"" => 179.834, ""AGT"" => -175.422, + ""ATA"" => -179.491, ""ATC"" => 179.733, ""ATG"" => 177.177, ""ATT"" => -178.801, + ""CAA"" => -179.648, ""CAC"" => 173.730, ""CAG"" => 171.730, ""CAT"" => 174.273, + ""CCA"" => 178.306, ""CCC"" => 176.814, ""CCG"" => 177.164, ""CCT"" => 175.901, + ""CGA"" => -172.058, ""CGC"" => -170.458, ""CGG"" => 177.459, ""CGT"" => -177.699, + ""CTA"" => 176.566, ""CTC"" => 175.846, ""CTG"" => 173.254, ""CTT"" => 177.252, + ""GAA"" => 174.706, ""GAC"" => 168.411, ""GAG"" => 166.426, ""GAT"" => 168.841, + ""GCA"" => 173.035, ""GCC"" => 171.523, ""GCG"" => 172.053, ""GCT"" => 170.601, + ""GGA"" => -174.234, ""GGC"" => -172.619, ""GGG"" => 175.674, ""GGT"" => -179.682, + ""GTA"" => 174.167, ""GTC"" => 173.514, ""GTG"" => 170.969, ""GTT"" => 174.788, + ""TAA"" => -177.232, ""TAC"" => 176.044, ""TAG"" => 174.067, ""TAT"" => 176.665, + ""TCA"" => -177.663, ""TCC"" => -179.135, ""TCG"" => -178.953, ""TCT"" => 179.965, + ""TGA"" => -169.881, ""TGC"" => -168.369, ""TGG"" => 179.680, ""TGT"" => -175.458, + ""TTA"" => 177.790, ""TTC"" => 177.037, ""TTG"" => 174.444, ""TTT"" => 178.479 + ) + # Phosphate-Sugar-Phosphate-Sugar + PSPS_params = Dict( + # TODO: currently using ""X"" as ""A"", should be changed to average??? + ""XAA"" => -155.622, ""XAC"" => -152.885, ""XAG"" => -151.259, ""XAT"" => -156.185, + ""XCA"" => -156.388, ""XCC"" => -155.577, ""XCG"" => -156.063, ""XCT"" => -157.660, + ""XGA"" => -159.083, ""XGC"" => -159.751, ""XGG"" => -154.497, ""XGT"" => -159.668, + ""XTA"" => -152.487, ""XTC"" => -151.938, ""XTG"" => -150.672, ""XTT"" => -155.597, + ""AAA"" => -155.622, ""AAC"" => -152.885, ""AAG"" => -151.259, ""AAT"" => -156.185, + ""ACA"" => -156.388, ""ACC"" => -155.577, ""ACG"" => -156.063, ""ACT"" => -157.660, + ""AGA"" => -159.083, ""AGC"" => -159.751, ""AGG"" => -154.497, ""AGT"" => -159.668, + ""ATA"" => -152.487, ""ATC"" => -151.938, ""ATG"" => -150.672, ""ATT"" => -155.597, + ""CAA"" => -156.021, ""CAC"" => -152.981, ""CAG"" => -151.273, ""CAT"" => -156.309, + ""CCA"" => -155.364, ""CCC"" => -154.499, ""CCG"" => -155.058, ""CCT"" => -156.547, + ""CGA"" => -158.746, ""CGC"" => -159.509, ""CGG"" => -153.638, ""CGT"" => -159.033, + ""CTA"" => -151.817, ""CTC"" => -151.269, ""CTG"" => -149.902, ""CTT"" => -154.955, + ""GAA"" => -154.534, ""GAC"" => -151.854, ""GAG"" => -150.223, ""GAT"" => -155.116, + ""GCA"" => -154.009, ""GCC"" => -153.155, ""GCG"" => -153.791, ""GCT"" => -155.211, + ""GGA"" => -157.783, ""GGC"" => -158.478, ""GGG"" => -153.379, ""GGT"" => -158.439, + ""GTA"" => -151.220, ""GTC"" => -150.726, ""GTG"" => -149.471, ""GTT"" => -154.288, + ""TAA"" => -156.903, ""TAC"" => -153.864, ""TAG"" => -152.178, ""TAT"" => -157.225, + ""TCA"" => -156.627, ""TCC"" => -155.754, ""TCG"" => -156.236, ""TCT"" => -157.799, + ""TGA"" => -159.780, ""TGC"" => -160.478, ""TGG"" => -154.803, ""TGT"" => -160.164, + ""TTA"" => -152.217, ""TTC"" => -151.655, ""TTG"" => -150.303, ""TTT"" => -155.342 + ) + angle_data = Dict( + ""BSPS"" => BSPS_params, + ""SPSB"" => SPSB_params, + ""SPSP"" => SPSP_params, + ""PSPS"" => PSPS_params + ) + + return angle_data[angle_type][base_step] + +end + + +############################################################################### +# ____ _ _ _ # +# | _ \ | \ | | / \ # +# | |_) || \| | / _ \ # +# | _ < | |\ | / ___ \ # +# |_| \_\|_| \_|/_/ \_\ # +# # +############################################################################### + +# ------------------------- +# RNA structure-based model +# ------------------------- +function is_RNA_hydrogen_bond(atom_name_1::Char, atom_name_2::Char) + special_atom_list = ['F', 'O', 'N'] + if atom_name_1 in special_atom_list && atom_name_2 in special_atom_list + return true + end + return false +end + +function compute_RNA_native_contact(resid1_atoms::Vector{Int}, resid2_atoms::Vector{Int}, atom_names::Vector{String}, atom_coors::Array{<:Real, 2}) + hb_count = 0 + min_dist = 1.0e50 + for i in resid1_atoms + atom_name_1 = atom_names[i] + if atom_name_1[1] == 'H' + continue + end + coor_1 = atom_coors[:, i] + for j in resid2_atoms + atom_name_2 = atom_names[j] + if atom_name_2[1] == 'H' + continue + end + coor_2 = atom_coors[:, j] + dist_12 = compute_distance(coor_1, coor_2) + if dist_12 < RNA_GO_ATOMIC_CUTOFF && is_RNA_hydrogen_bond(atom_name_1[1], atom_name_2[1]) + hb_count += 1 + end + if dist_12 < min_dist + min_dist = dist_12 + end + end + end + return (min_dist, hb_count) +end + + +############################################################################### +# _ _ ____ _ _ _ # +# _ __ _ __ ___ | |_ ___ (_) _ __ | _ \ | \ | | / \ # +# | '_ \ | '__|/ _ \ | __|/ _ \| || '_ \ _____ | | | || \| | / _ \ # +# | |_) || | | (_) || |_| __/| || | | ||_____|| |_| || |\ | / ___ \ # +# | .__/ |_| \___/ \__|\___||_||_| |_| |____/ |_| \_|/_/ \_\ # +# |_| # +############################################################################### + +# ============== +# PWMcos contact +# ============== + +function is_PWMcos_contact(resid1_atoms::Vector{Int}, resid2_atoms::Vector{Int}, atom_names::Vector{String}, atom_coors::Array{<:Real, 2}) + for i in resid1_atoms + atom_name_1 = atom_names[i] + if atom_name_1[1] == 'H' + continue + end + coor_1 = atom_coors[:, i] + for j in resid2_atoms + atom_name_2 = atom_names[j] + if atom_name_2[1] == 'H' + continue + end + coor_2 = atom_coors[:, j] + dist_12 = compute_distance(coor_1, coor_2) + if dist_12 < PWMCOS_ATOMIC_CUTOFF + return true + end + end + end + return false +end + +# ============== +# PWMcos contact +# ============== + +function is_protein_DNA_Go_contact(resid1_atoms::Vector{Int}, resid2_atoms::Vector{Int}, atom_names::Vector{String}, atom_coors::Array{<:Real, 2}) + for i in resid1_atoms + atom_name_1 = atom_names[i] + if atom_name_1[1] == 'H' + continue + end + coor_1 = atom_coors[:, i] + for j in resid2_atoms + atom_name_2 = atom_names[j] + if atom_name_2[1] == 'H' + continue + end + coor_2 = atom_coors[:, j] + dist_12 = compute_distance(coor_1, coor_2) + if dist_12 < pro_DNA_GO_ATOMIC_CUTOFF + return true + end + end + end + return false +end + + + +############################################################################### +# _ _ ____ _ _ _ # +# _ __ _ __ ___ | |_ ___ (_) _ __ | _ \ | \ | | / \ # +# | '_ \ | '__|/ _ \ | __|/ _ \| || '_ \ _____ | |_) || \| | / _ \ # +# | |_) || | | (_) || |_| __/| || | | ||_____|| _ < | |\ | / ___ \ # +# | .__/ |_| \___/ \__|\___||_||_| |_| |_| \_\|_| \_|/_/ \_\ # +# |_| # +############################################################################### + +function is_protein_RNA_native_contact(resid1_atoms::Vector{Int}, resid2_atoms::Vector{Int}, atom_names::Vector{String}, atom_coors::Array{<:Real, 2}) + for i in resid1_atoms + atom_name_1 = atom_names[i] + if atom_name_1[1] == 'H' + continue + end + coor_1 = atom_coors[:, i] + for j in resid2_atoms + atom_name_2 = atom_names[j] + if atom_name_2[1] == 'H' + continue + end + coor_2 = atom_coors[:, j] + dist_12 = compute_distance(coor_1, coor_2) + if dist_12 < AICG_GO_ATOMIC_CUTOFF + return true + end + end + end + return false +end + + + + +","Julia" +"Genesis","noinil/genesis_cg_tool","src/lib/gcj.jl",".jl","294","13","include(""./biomath.jl"") +include(""./constants.jl"") +include(""./molecule.jl"") +include(""./topology.jl"") +include(""./selection.jl"") +include(""./conformation.jl"") +include(""./parsers.jl"") +include(""./coarse_graining_subroutines.jl"") +include(""./coarse_graining.jl"") +include(""./collective_variables.jl"") + + +","Julia" +"Genesis","noinil/genesis_cg_tool","src/lib/parser_toml.jl",".jl","3400","98","############################################################################### +# _____ ___ __ __ _ # +# |_ _/ _ \| \/ | | # +# | || | | | |\/| | | # +# | || |_| | | | | |___ # +# |_| \___/|_| |_|_____| # +# # +############################################################################### + +function read_TOML_normal_line(toml_line::AbstractString) + # remove comments + if occursin('#', toml_line) + sep = findfirst(""#"", toml_line) + toml_line = toml_line[1 : sep[1] - 1] + end + + # split by first ""="" + sep = findfirst(""="", toml_line) + if isnothing( sep ) + error(""ERROR: wrong format in toml: $toml_line"") + end + + t_key_str = strip( toml_line[1 : sep[1] - 1]) + t_val_str = strip( toml_line[sep[end] + 1 : end] ) + + t_key = strip( t_key_str, ['\'', '\""']) + + # t_val type: String, Int, Float, Bool + if t_val_str == ""true"" + t_val = true + elseif t_val_str == ""false"" + t_val = false + elseif t_val_str[1] == '\""' || t_val_str[1] == '\'' + t_val = strip(t_val_str, ['\'', '\""']) + elseif occursin(r""^[+-]?[1-9][0-9_]*$"", t_val_str) + t_int_str = replace(t_val_str, ""_"" => """") + t_val = parse(Int, t_int_str) + elseif occursin(r""^[+-]?(([0-9]*\.[0-9]+)|([0-9]+\.))([eE][+-]?[0-9]+)?$"", t_val_str) + t_val = parse(Float64, t_val_str) + elseif t_val_str[1] == '[' + try + t_expression = Meta.parse(t_val_str) + t_val = eval(t_expression) + catch + error(""BUG: cannot understand complex array format in TOML: $toml_line"") + end + else + error(""ERROR: data type not supported by TOML: $toml_line"") + end + + return (t_key, t_val) +end + +""A rather simple subroutine to parse TOML data."" +function read_TOML(toml_filename::String) + + # TODO: date + # TODO: dotted key + # TODO: multi-line string, array, table... + # TODO: array + + root_dict = Dict() + + tmp_dict = root_dict + for line in eachline(toml_filename) + toml_line = strip(line) + + if length(toml_line) == 0 + continue + end + + if toml_line[1] == '[' + if toml_line[2] == '[' + sep = findfirst(""]]"", toml_line) + dict_name = strip(toml_line[3 : sep[1] - 1]) + if haskey(root_dict, dict_name) + push!(root_dict[dict_name], Dict()) + tmp_dict = root_dict[dict_name][end] + else + root_dict[dict_name] = [] + push!(root_dict[dict_name], Dict()) + tmp_dict = root_dict[dict_name][end] + end + else + sep = findfirst(""]"", toml_line) + dict_name = strip(toml_line[2 : sep[1] - 1]) + root_dict[dict_name] = Dict() + tmp_dict = root_dict[dict_name] + end + elseif occursin('=', toml_line) + t_key, t_val = read_TOML_normal_line(toml_line) + tmp_dict[t_key] = t_val + end + end + + return root_dict +end +","Julia" +"Genesis","noinil/genesis_cg_tool","src/lib/constants.jl",".jl","14646","561","############################################################################### +# Parameters # +############################################################################### + +# ================== +# Physical Constants +# ================== +const CAL2JOU = 4.184 +const JOU2CAL = 1.0 / CAL2JOU + +# ============ +# Force fields +# ============ + +struct ForceFieldCG + ff_protein::Int + ff_DNA::Int + ff_RNA::Int + ff_protein_DNA::Int + ff_protein_RNA::Int + ff_DNA_RNA::Int +end + +# protein +const FF_pro_AICG2p = 1 +const FF_pro_Clementi_Go = 2 +const FF_pro_KB_Go = 3 +# DNA +const FF_DNA_3SPN2C = 1 +# RNA +const FF_RNA_HT = 1 +# protein-DNA +const FF_PWMcos = 1 +const FF_pro_DNA_Go = 2 +const FF_PWMcos_ns = 3 +# protein-RNA +const FF_pro_RNA_Go = 1 +# unknown +const FF_UNKNOWN = 0 + +FF_PRO_DICT = Dict( + ""AICG2+"" => FF_pro_AICG2p, + ""Clementi"" => FF_pro_Clementi_Go, + ""KB-Go"" => FF_pro_KB_Go +) + +FF_DNA_DICT = Dict( + ""3SPN.2C"" => FF_DNA_3SPN2C +) + +FF_RNA_DICT = Dict( + ""HT"" => FF_RNA_HT +) + +# ===================================== +# General Parameters: Mass, Charge, ... +# ===================================== + +ATOM_MASS_DICT = Dict( + 'C' => 12.011, + 'N' => 14.001, + 'O' => 15.999, + 'P' => 30.974, + 'S' => 32.065, + 'H' => 1.008 +) + +RES_MASS_DICT = Dict( + ""ALA"" => 71.09, + ""ARG"" => 156.19, + ""ASN"" => 114.11, + ""ASP"" => 115.09, + ""CYS"" => 103.15, + ""CYM"" => 103.15, + ""GLN"" => 128.14, + ""GLU"" => 129.12, + ""GLY"" => 57.05, + ""HIS"" => 137.14, + ""HSD"" => 137.14, + ""HSE"" => 137.14, + ""HSP"" => 138.14, + ""HID"" => 137.14, + ""HIE"" => 137.14, + ""HIP"" => 138.14, + ""ILE"" => 113.16, + ""LEU"" => 113.16, + ""LYS"" => 128.17, + ""MET"" => 131.19, + ""PHE"" => 147.18, + ""PRO"" => 97.12, + ""SER"" => 87.08, + ""THR"" => 101.11, + ""TRP"" => 186.21, + ""TYR"" => 163.18, + ""VAL"" => 99.14, + ""UNK"" => 100.00, + ""DA"" => 134.10, + ""DC"" => 110.10, + ""DG"" => 150.10, + ""DT"" => 125.10, + ""DP"" => 94.97, + ""DS"" => 83.11, + ""RA"" => 134.10, + ""RC"" => 110.10, + ""RG"" => 150.10, + ""RU"" => 111.10, + ""RT"" => 125.10, + ""RP"" => 62.97, + ""RS"" => 131.11 +) + +RES_CHARGE_DICT = Dict( + ""ALA"" => 0.0, + ""ARG"" => 1.0, + ""ASN"" => 0.0, + ""ASP"" => -1.0, + ""CYS"" => 0.0, + ""CYM"" => 0.0, + ""GLN"" => 0.0, + ""GLU"" => -1.0, + ""GLY"" => 0.0, + ""HIS"" => 0.0, + ""HSD"" => 0.0, + ""HSE"" => 0.0, + ""HSP"" => 1.0, + ""HID"" => 0.0, + ""HIE"" => 0.0, + ""HIP"" => 1.0, + ""ILE"" => 0.0, + ""LEU"" => 0.0, + ""LYS"" => 1.0, + ""MET"" => 0.0, + ""PHE"" => 0.0, + ""PRO"" => 0.0, + ""SER"" => 0.0, + ""THR"" => 0.0, + ""TRP"" => 0.0, + ""TYR"" => 0.0, + ""VAL"" => 0.0, + ""UNK"" => 0.0, + ""DA"" => 0.0, + ""DC"" => 0.0, + ""DG"" => 0.0, + ""DT"" => 0.0, + ""DP"" => -0.6, + ""DS"" => 0.0, + ""RA"" => 0.0, + ""RC"" => 0.0, + ""RG"" => 0.0, + ""RU"" => 0.0, + ""RT"" => 0.0, + ""RP"" => -1.0, + ""RS"" => 0.0 +) + +RES_SHORTNAME_DICT = Dict( + ""ALA"" => ""A"", + ""ARG"" => ""R"", + ""ASN"" => ""N"", + ""ASP"" => ""D"", + ""CYS"" => ""C"", + ""CYM"" => ""C"", + ""GLN"" => ""Q"", + ""GLU"" => ""E"", + ""GLY"" => ""G"", + ""HIS"" => ""H"", + ""HSD"" => ""H"", + ""HSE"" => ""H"", + ""HSP"" => ""H"", + ""HID"" => ""H"", + ""HIE"" => ""H"", + ""HIP"" => ""H"", + ""ILE"" => ""I"", + ""LEU"" => ""L"", + ""LYS"" => ""K"", + ""MET"" => ""M"", + ""PHE"" => ""F"", + ""PRO"" => ""P"", + ""SER"" => ""S"", + ""THR"" => ""T"", + ""TRP"" => ""W"", + ""TYR"" => ""Y"", + ""VAL"" => ""V"", + ""UNK"" => ""X"", + ""DA"" => ""A"", + ""DC"" => ""C"", + ""DG"" => ""G"", + ""DT"" => ""T"", + ""RA"" => ""A"", + ""RC"" => ""C"", + ""RG"" => ""G"", + ""RU"" => ""U"", + ""ADE"" => ""A"", + ""CYT"" => ""C"", + ""GUA"" => ""G"", + ""URA"" => ""U"", + ""THY"" => ""T"", + ""A"" => ""A"", + ""C"" => ""C"", + ""G"" => ""G"", + ""U"" => ""U"", + ""T"" => ""T"" +) + +RES_NAME_LIST_PROTEIN = ( + ""ALA"", ""ARG"", ""ASN"", ""ASP"", + ""CYS"", ""GLN"", ""GLU"", ""GLY"", + ""HIS"", ""ILE"", ""LEU"", ""LYS"", + ""MET"", ""PHE"", ""PRO"", ""SER"", + ""THR"", ""TRP"", ""TYR"", ""VAL"", + ""HSD"", ""HSE"", ""HSP"", + ""HID"", ""HIE"", ""HIP"", + ""CYM"", ""UNK"") +RES_NAME_PROTEIN_DICT = Dict( + ""ALA"" => ""ALA"", + ""ARG"" => ""ARG"", + ""ASN"" => ""ASN"", + ""ASP"" => ""ASP"", + ""CYS"" => ""CYS"", + ""CYM"" => ""CYS"", + ""GLN"" => ""GLN"", + ""GLU"" => ""GLU"", + ""GLY"" => ""GLY"", + ""HIS"" => ""HIS"", + ""HSD"" => ""HIS"", + ""HSE"" => ""HIS"", + ""HSP"" => ""HIS"", + ""HID"" => ""HIS"", + ""HIE"" => ""HIS"", + ""HIP"" => ""HIS"", + ""ILE"" => ""ILE"", + ""LEU"" => ""LEU"", + ""LYS"" => ""LYS"", + ""MET"" => ""MET"", + ""PHE"" => ""PHE"", + ""PRO"" => ""PRO"", + ""SER"" => ""SER"", + ""THR"" => ""THR"", + ""TRP"" => ""TRP"", + ""TYR"" => ""TYR"", + ""VAL"" => ""VAL"", + ""UNK"" => ""UNK"" +) + +RES_NAME_LIST_DNA = (""DA"", ""DC"", ""DG"", ""DT"") +RES_NAME_DNA_DICT = Dict( + ""DA"" => ""DA"", + ""DC"" => ""DC"", + ""DG"" => ""DG"", + ""DT"" => ""DT"", + ""A"" => ""DA"", + ""C"" => ""DC"", + ""G"" => ""DG"", + ""T"" => ""DT"", + ""ADE"" => ""DA"", + ""CYT"" => ""DC"", + ""GUA"" => ""DG"", + ""THY"" => ""DT"" +) + +RES_NAME_LIST_RNA = (""RA"", ""RC"", ""RG"", ""RU"") +RES_NAME_RNA_DICT = Dict( + ""RA"" => ""RA"", + ""RC"" => ""RC"", + ""RG"" => ""RG"", + ""RT"" => ""RT"", + ""RU"" => ""RU"", + ""A"" => ""RA"", + ""C"" => ""RC"", + ""G"" => ""RG"", + ""T"" => ""RT"", + ""U"" => ""RU"", + ""ADE"" => ""RA"", + ""CYT"" => ""RC"", + ""GUA"" => ""RG"", + ""URA"" => ""RU"", + ""THY"" => ""RT"" +) + + +# DNA CG residue atom names +ATOM_NAME_LIST_DP = (""P"", ""OP1"", ""OP2"", ""O5'"", ""O1P"", ""O2P"") +ATOM_NAME_LIST_DS = (""C5'"", ""C4'"", ""C3'"", ""C2'"", ""C1'"", ""O4'"") + +# RNA CG residue atom names +ATOM_NAME_LIST_RP = (""P"", ""OP1"", ""OP2"", ""O1P"", ""O2P"") +ATOM_NAME_LIST_RS = (""C5'"", ""C4'"", ""C3'"", ""C2'"", ""C1'"", ""O5'"", ""O4'"", ""O3'"", ""O2'"") + +RES_FASTA_LONGNAME_DICT_PRO = Dict( + 'A' => ""ALA"", + 'R' => ""ARG"", + 'N' => ""ASN"", + 'D' => ""ASP"", + 'C' => ""CYS"", + 'Q' => ""GLN"", + 'E' => ""GLU"", + 'G' => ""GLY"", + 'H' => ""HIS"", + 'I' => ""ILE"", + 'L' => ""LEU"", + 'K' => ""LYS"", + 'M' => ""MET"", + 'F' => ""PHE"", + 'P' => ""PRO"", + 'S' => ""SER"", + 'T' => ""THR"", + 'W' => ""TRP"", + 'Y' => ""TYR"", + 'V' => ""VAL"" +) + + +# ============== +# Molecule Types +# ============== + +const MOL_DNA = 1 +const MOL_RNA = 2 +const MOL_PROTEIN = 3 +const MOL_OTHER = 4 +MOL_TYPE_LIST = (""DNA"", ""RNA"", ""protein"", ""other"", ""unknown"") + +# =========================== +# General thresholds, cutoffs +# =========================== + +const CG_MOL_CONTACT_CUTOFF = 20.0 + +const DIHEDRAL_SAFE_CUTOFF = 150.0 +const DIHEDRAL_GAUS_MOD_TYPE = Dict( + 0 => 21, # use-dafe-dihedral = 0 + 1 => 41, # use-safe-dihedral = 1; cos^2(kθ) type + 3 => 43 # use-safe-dihedral = 3; sin^3(kθ) type +) +const DIHEDRAL_PERI_MOD_TYPE = Dict( + 0 => 1, # use-dafe-dihedral = 0 + 1 => 32, # use-safe-dihedral = 1; cos^2(kθ) type + 2 => 31, # use-safe-dihedral = 2; remove dangerous dih + 3 => 33 # use-safe-dihedral = 3; sin^3(θ) type +) +const DIHEDRAL_TABU_MOD_TYPE = Dict( + 0 => 22, # use-dafe-dihedral = 0 + 1 => 52 # use-safe-dihedral = 1; cos^2(kθ) type +) + +############################################################################### +# Molecule specific parameters # +############################################################################### + +# ==================================== +# Protein Clementi Go Model Parameters +# ==================================== + +# Clementi Go energy unit: epsilon +const CCGO_EPSILON = 1.0 +# Clementi Go bond force constant +const CCGO_BOND_K = 100.00 * CCGO_EPSILON +# Clementi Go angle force constant +const CCGO_ANGL_K = 20.00 * CCGO_EPSILON +# Clementi Go dihedral force constant +const CCGO_DIHE_K_1 = CCGO_EPSILON +const CCGO_DIHE_K_3 = CCGO_EPSILON * 0.5 +# Clementi Go native contact eps +const CCGO_NATIVE_EPSILON = CCGO_EPSILON + +# =============================== +# Protein AICG2+ Model Parameters +# =============================== + +# AICG2+ bond force constant +const AICG_BOND_K = 110.40 +# AICG2+ sigma for Gaussian angle +const AICG_13_SIGMA = 0.15 # A +# AICG2+ sigma for Gaussian dihedral +const AICG_14_SIGMA = 0.15 # Rad ?? +# AICG2+ atomistic contact cutoff +const AICG_GO_ATOMIC_CUTOFF = 6.5 +# AICG2+ pairwise interaction cutoff +const AICG_ATOMIC_CUTOFF = 5.0 +# AICG2+ hydrogen bond cutoff +const AICG_HYDROGEN_BOND_CUTOFF = 3.2 +# AICG2+ salt bridge cutoff +const AICG_SALT_BRIDGE_CUTOFF = 3.5 +# AICG2+ energy cutoffs +const AICG_ENE_UPPER_LIM = -0.5 +const AICG_ENE_LOWER_LIM = -5.0 +# average and general AICG2+ energy values +const AICG_13_AVE = 1.72 +const AICG_14_AVE = 1.23 +const AICG_CONTACT_AVE = 0.55 +const AICG_13_GEN = 1.11 +const AICG_14_GEN = 0.87 +const AICG_CONTACT_GEN = 0.32 + +# AICG2+ pairwise interaction pairs +const AICG_ITYPE_BB_HB = 1 # B-B hydrogen bonds +const AICG_ITYPE_BB_DA = 2 # B-B donor-accetor contacts +const AICG_ITYPE_BB_CX = 3 # B-B carbon-X contacts +const AICG_ITYPE_BB_XX = 4 # B-B other +const AICG_ITYPE_SS_HB = 5 # S-S hydrogen bonds +const AICG_ITYPE_SS_SB = 6 # S-S salty bridge +const AICG_ITYPE_SS_DA = 7 # S-S donor-accetor contacts +const AICG_ITYPE_SS_CX = 8 # S-S carbon-X contacts +const AICG_ITYPE_SS_QX = 9 # S-S charge-X contacts +const AICG_ITYPE_SS_XX = 10 # S-S other +const AICG_ITYPE_SB_HB = 11 # S-B hydrogen bonds +const AICG_ITYPE_SB_DA = 12 # S-B donor-accetor contacts +const AICG_ITYPE_SB_CX = 13 # S-B carbon-X contacts +const AICG_ITYPE_SB_QX = 14 # S-B charge-X contacts +const AICG_ITYPE_SB_XX = 15 # S-B other +const AICG_ITYPE_LR_CT = 16 # long range contacts +const AICG_ITYPE_OFFST = 17 # offset + +AICG_PAIRWISE_ENERGY = zeros(Float64, 17) +AICG_PAIRWISE_ENERGY[AICG_ITYPE_BB_HB] = - 1.4247 # B-B hydrogen bonds +AICG_PAIRWISE_ENERGY[AICG_ITYPE_BB_DA] = - 0.4921 # B-B donor-accetor contacts +AICG_PAIRWISE_ENERGY[AICG_ITYPE_BB_CX] = - 0.2404 # B-B carbon-X contacts +AICG_PAIRWISE_ENERGY[AICG_ITYPE_BB_XX] = - 0.1035 # B-B other +AICG_PAIRWISE_ENERGY[AICG_ITYPE_SS_HB] = - 5.7267 # S-S hydrogen bonds +AICG_PAIRWISE_ENERGY[AICG_ITYPE_SS_SB] = -12.4878 # S-S salty bridge +AICG_PAIRWISE_ENERGY[AICG_ITYPE_SS_DA] = - 0.0308 # S-S donor-accetor contacts +AICG_PAIRWISE_ENERGY[AICG_ITYPE_SS_CX] = - 0.1113 # S-S carbon-X contacts +AICG_PAIRWISE_ENERGY[AICG_ITYPE_SS_QX] = - 0.2168 # S-S charge-X contacts +AICG_PAIRWISE_ENERGY[AICG_ITYPE_SS_XX] = 0.2306 # S-S other +AICG_PAIRWISE_ENERGY[AICG_ITYPE_SB_HB] = - 3.4819 # S-B hydrogen bonds +AICG_PAIRWISE_ENERGY[AICG_ITYPE_SB_DA] = - 0.1809 # S-B donor-accetor contacts +AICG_PAIRWISE_ENERGY[AICG_ITYPE_SB_CX] = - 0.1209 # S-B carbon-X contacts +AICG_PAIRWISE_ENERGY[AICG_ITYPE_SB_QX] = - 0.2984 # S-B charge-X contacts +AICG_PAIRWISE_ENERGY[AICG_ITYPE_SB_XX] = - 0.0487 # S-B other +AICG_PAIRWISE_ENERGY[AICG_ITYPE_LR_CT] = - 0.0395 # long range contacts +AICG_PAIRWISE_ENERGY[AICG_ITYPE_OFFST] = - 0.1051 # offset + +# ============================ +# DNA 3SPN.2C Model Parameters +# ============================ + +# 3SPN.2C bond force constant +const DNA3SPN_BOND_K_2 = 0.6 * JOU2CAL +# 3SPN.2C force constant for Gaussian dihedral +const DNA3SPN_DIH_G_K = 7.0 * JOU2CAL +# 3SPN.2C sigma for Gaussian dihedral +const DNA3SPN_DIH_G_SIGMA = 0.3 +# 3SPN.2C force constant for Gaussian dihedral +const DNA3SPN_DIH_P_K = 2.0 * JOU2CAL + +# ==================================== +# RNA Structure-based Model Parameters +# ==================================== + +# RNA atomistic contact cutoff +const RNA_GO_ATOMIC_CUTOFF = 5.5 +# RNA stacking interaction dihedral cutoff +const RNA_STACK_DIH_CUTOFF = 40.0 +# RNA stacking interaction distance cutoff +const RNA_STACK_DIST_CUTOFF = 6.0 +# RNA stacking interaction epsilon +const RNA_STACK_EPSILON = 2.06 +# RNA base pairing epsilon +const RNA_BPAIR_EPSILON_2HB = 2.94 +const RNA_BPAIR_EPSILON_3HB = 5.37 + +RNA_BOND_K_LIST = Dict( + ""PS"" => 26.5, + ""SR"" => 40.3, + ""SY"" => 62.9, + ""SP"" => 84.1 +) +RNA_ANGLE_K_LIST = Dict( + ""PSR"" => 18.0, + ""PSY"" => 22.8, + ""PSP"" => 22.1, + ""SPS"" => 47.8 +) +RNA_DIHEDRAL_K_LIST = Dict( + ""PSPS"" => 1.64, + ""SPSR"" => 1.88, + ""SPSY"" => 2.82, + ""SPSP"" => 2.98 +) +RNA_PAIR_EPSILON_OTHER = Dict( + ""SS"" => 1.48, + ""BS"" => 0.98, + ""SB"" => 0.98, + ""BB"" => 0.93, + # undetermined... + ""PP"" => 1.00, + ""PS"" => 1.00, + ""SP"" => 1.00, + ""PB"" => 1.00, + ""BP"" => 1.00 +) + +# ================= +# PWMcos parameters +# ================= +# PWMcos atomistic contact cutoff +const PWMCOS_ATOMIC_CUTOFF = 4.0 +const pro_DNA_GO_ATOMIC_CUTOFF = 6.5 + +# ====================== +# Protein-RNA parameters +# ====================== +# protein-RNA Go-term coefficient +const PRO_RNA_GO_EPSILON_B = 0.62 +const PRO_RNA_GO_EPSILON_S = 0.74 +const PRO_RNA_GO_EPSILON_P = 0.50 + + +# ==================== +# GRO TOP File Options +# ==================== + +# ""NREXCL"" in ""[moleculetype]"" +const MOL_NR_EXCL = 3 +# ""CGNR"" in ""[atoms]"" +const AICG_ATOM_FUNC_NR = 1 +const DNA3SPN_ATOM_FUNC_NR = 1 +const RNA_ATOM_FUNC_NR = 1 +# ""f"" in ""[bonds]"" +const AICG_BOND_FUNC_TYPE = 1 +const CCGO_BOND_FUNC_TYPE = 1 +const DNA3SPN_BOND_FUNC2_TYPE = 1 +const DNA3SPN_BOND_FUNC4_TYPE = 21 +const RNA_BOND_FUNC_TYPE = 1 +# ""f"" in AICG-type ""[angles]"" +const AICG_ANG_G_FUNC_TYPE = 21 +# ""f"" in CCGO-type ""[angles]"" +const CCGO_ANG_FUNC_TYPE = 1 +# ""f"" in Flexible-type ""[angles]"" +const AICG_ANG_F_FUNC_TYPE = 22 +# ""f"" in DNA ""[angles]"" +const DNA3SPN_ANG_FUNC_TYPE = 1 +# ""f"" in RNA ""[angles]"" +const RNA_ANG_FUNC_TYPE = 1 +# ""f"" in AICG-type ""[dihedral]"" +const AICG_DIH_G_FUNC_TYPE = 21 +# ""f"" in CCGO-type ""[dihedral]"" +const CCGO_DIH_P_FUNC_TYPE = 1 +# ""f"" in Flexible-type ""[dihedral]"" +const AICG_DIH_F_FUNC_TYPE = 22 +# ""f"" in DNA Gaussian ""[dihedral]"" +const DNA3SPN_DIH_G_FUNC_TYPE = 21 +# ""f"" in DNA Periodic ""[dihedral]"" +const DNA3SPN_DIH_P_FUNC_TYPE = 1 +const DNA3SPN_DIH_P_FUNC_PERI = 1 +# ""f"" in RNA Periodic ""[dihedral]"" +const RNA_DIH_FUNC_TYPE = 1 +# ""f"" in Go-contacts ""[pairs]"" +const AICG_CONTACT_FUNC_TYPE = 2 +# ""f"" in Go-contacts ""[pairs]"" +const CCGO_CONTACT_FUNC_TYPE = 2 +# ""f"" in RNA Go-contacts ""[pairs]"" +const RNA_CONTACT_FUNC_TYPE = 2 +# ""f"" in pro-RNA Go-contacts ""[pairs]"" +const RNP_CONTACT_FUNC_TYPE = 2 +# ""f"" in protein-DNA PWMcos ""[pwmcos]"" +const PWMCOS_FUNC_TYPE = 1 +const PWMCOS_NS_FUNC_TYPE = 2 + + +","Julia" +"Genesis","noinil/genesis_cg_tool","src/lib/parser_top.jl",".jl","44080","1184","############################################################################### +# _ # +# __ _ _ __ ___ | |_ ___ _ __ # +# / _` || '__|/ _ \ | __|/ _ \ | '_ \ # +# | (_| || | | (_) || |_| (_) || |_) | # +# \__, ||_| \___/ \__|\___/ | .__/ # +# |___/ |_| # +# # +############################################################################### + + +############################################################################### +# function lists +# write_grotop(top::GenTopology, system_name::AbstractString, args::Dict{String, Any}) +# write_grotop_pwmcos(top::GenTopology, system_name::AbstractString, args::Dict{String, Any}) +# read_groitp(itp_filename::AbstractString) +# read_grotop(top_filename::AbstractString) +# write_psf(top::GenTopology, sys_name::AbstractString, args::Dict{String, Any}) +# read_psf(psf_filename::AbstractString) +############################################################################### + +using Printf + +function write_grotop(top::GenTopology, system_name::AbstractString, args::Dict{String, <:Any}=Dict{String, Any}()) + + verbose = get(args, ""verbose"", false) + + top_name = system_name * "".top"" + itp_name = system_name * "".itp"" + + # ======== + # top file + # ======== + top_file = open(top_name, ""w"") + + print(top_file, ""; common interaction parameters for CG models\n"") + print(top_file, ""#include \""./param/atom_types.itp\"" \n"") + print(top_file, ""; AICG2+ flexible local angle parameters \n"") + print(top_file, ""#include \""./param/flexible_local_angle.itp\"" \n"") + print(top_file, ""; AICG2+ flexible local dihedral parameters \n"") + print(top_file, ""#include \""./param/flexible_local_dihedral.itp\"" \n"") + print(top_file, ""; residue-residue potential parameters (Miyazawa-Jernigan-1996-JMB) \n"") + print(top_file, ""#include \""./param/pair_energy_MJ_96.itp\"" \n"") + print(top_file, ""\n"") + + print(top_file, ""; Molecule topology \n"") + print(top_file, ""#include \"""", itp_name, ""\"" \n\n"") + + print(top_file, ""[ system ] \n"") + print(top_file, system_name, "" \n\n"") + + print(top_file, ""[ molecules ] \n"") + print(top_file, system_name, "" 1 \n\n"") + + print(top_file, ""; [ cg_ele_chain_pairs ] \n"") + print(top_file, ""; ON 1 - 2 : 3 - 4 \n"") + print(top_file, ""; OFF 1 - 1 : 3 - 3 \n"") + print(top_file, ""; OFF 1 - 1 \n\n"") + + print(top_file, ""; [ pwmcos_chain_pairs ] \n"") + print(top_file, ""; ON 1 - 2 : 3 - 4 \n"") + print(top_file, ""; OFF 1 - 1 : 3 - 3 \n"") + print(top_file, ""; OFF 2 - 3 \n\n"") + + print(top_file, ""; [ pwmcosns_chain_pairs ] \n"") + print(top_file, ""; ON 1 - 2 : 3 - 4 \n"") + print(top_file, ""; OFF 1 - 1 : 3 - 3 \n"") + print(top_file, ""; OFF 2 - 3 \n\n"") + + print(top_file, ""; [ cg_KH_chain_pairs ] \n"") + print(top_file, ""; A 1 - 2 : 3 - 4 \n"") + print(top_file, ""; OFF 1 - 1 : 3 - 3 \n"") + print(top_file, ""; OFF 2 - 3 \n\n"") + + close(top_file) + + # ======== + # itp file + # ======== + itp_file = open(itp_name, ""w"") + + + + ########################################################################### + # define output functions # + ########################################################################### + + # ---------------- + # [ moleculetype ] + # ---------------- + wr_itp_mol_head(io::IO) = print(io, ""[ moleculetype ]\n"") + wr_itp_mol_comm(io::IO) = @printf(io, "";%15s %6s\n"", rpad(""name"", 15), ""nrexcl"") + wr_itp_mol_line(io::IO, name::AbstractString, n::Int) = @printf(io, ""%16s %6d\n"", rpad(name, 16), n) + + # --------- + # [ atoms ] + # --------- + wr_itp_atm_head(io::IO) = print(io, ""[ atoms ]\n"") + wr_itp_atm_comm(io::IO) = @printf(io, "";%9s%5s%10s%5s%5s%5s %8s %8s\n"", ""nr"", ""type"", ""resnr"", ""res"", ""atom"", ""cg"", ""charge"", ""mass"") + function wr_itp_atm_chain_info(io::IO, a::GenTopAtom) + @printf(io, ""; +INFO+ CHAIN: %6d SEGNAME: %5s\n"", a.chain_id, a.seg_name) + end + function wr_itp_atm_line(io::IO, a::GenTopAtom) + @printf(io, ""%10d%5s%10d%5s%5s%5d %8.3f %8.3f\n"", + a.atom_index, a.atom_type, a.residue_index, a.residue_name, a.atom_name, + a.function_type, a.charge, a.mass) + end + + # --------- + # [ bonds ] + # --------- + wr_itp_bnd_head(io::IO) = print(io, ""[ bonds ]\n"") + wr_itp_bnd_comm(io::IO) = @printf(io, "";%9s%10s%5s%18s%18s\n"", ""i"", ""j"", ""f"", ""eq"", ""coef"") + function wr_itp_bnd_line(io::IO, b::GenTopBond) + @printf(io, ""%10d%10d%5d%18.4E%18.4E\n"", b.i, b.j, b.function_type, b.r0 * 0.1, b.coef * CAL2JOU * 100.0 * 2.0) + end + + # ---------- + # [ angles ] + # ---------- + wr_itp_ang_head(io::IO) = print(io, ""[ angles ]\n"") + wr_itp_ang_comm(io::IO) = @printf(io, "";%9s%10s%10s%5s%15s%15s%15s\n"", ""i"", ""j"", ""k"", ""f"", ""eq"", ""coef"", ""w"") + function wr_itp_ang_line(io::IO, a::GenTopAngle) + f = a.function_type + if f == 1 + @printf(io, ""%10d%10d%10d%5d%15.4E%15.4E\n"", a.i, a.j, a.k, f, a.a0, a.coef * 2.0 * CAL2JOU) + elseif f == 21 + @printf(io, ""%10d%10d%10d%5d%15.4E%15.4E%15.4E\n"", a.i, a.j, a.k, f, a.a0 * 0.1, a.coef * CAL2JOU, a.w * 0.1) + elseif f == 22 + @printf(io, ""%10d%10d%10d%5d\n"", a.i, a.j, a.k, f) + end + end + + # ------------- + # [ dihedrals ] + # ------------- + wr_itp_dih_head(io::IO) = print(io, ""[ dihedrals ]\n"") + wr_itp_dih_comm(io::IO) = @printf(io, "";%9s%10s%10s%10s%5s%15s%15s%15s\n"", ""i"", ""j"", ""k"", ""l"", ""f"", ""eq"", ""coef"", ""w/n"") + function wr_itp_dih_line(io::IO, d::GenTopDihedral) + f = d.function_type + if f == 1 || f == 32 || f == 33 + @printf(io, ""%10d%10d%10d%10d%5d%15.4E%15.4E%15d\n"", d.i, d.j, d.k, d.l, f, d.d0, d.coef * CAL2JOU, d.n) + elseif f == 21 || f == 41 || f == 43 + @printf(io, ""%10d%10d%10d%10d%5d%15.4E%15.4E%15.4E\n"", d.i, d.j, d.k, d.l, f, d.d0, d.coef * CAL2JOU, d.w) + elseif f == 22 || f == 52 + @printf(io, ""%10d%10d%10d%10d%5d\n"", d.i, d.j, d.k, d.l, f) + elseif f == 31 + @printf(io, ""%10d%10d%10d%10d%5d%15.4E%15.4E%15d\n"", d.i, d.j, d.k, d.l, f, d.d0, 0.0, d.n) + end + end + + # --------- + # [ pairs ] + # --------- + wr_itp_pair_head(io::IO) = @printf(io, ""[ pairs ]\n"") + wr_itp_pair_comm(io::IO) = @printf(io, "";%9s%10s%10s%15s%15s\n"", ""i"", ""j"", ""f"", ""eq"", ""coef"") + function wr_itp_pair_line(io::IO, c::GenTopPair) + @printf(io, ""%10d%10d%10d%15.4E%15.4E\n"", c.i, c.j, c.function_type, c.r0 * 0.1, c.coef * CAL2JOU) + end + + # ------------- + # [ exclusions] + # ------------- + wr_itp_exc_head(io::IO) = print(io, ""[ exclusions ] ; Genesis exclusion list\n"") + wr_itp_exc_comm(io::IO) = @printf(io, "";%9s%10s\n"", ""i"", ""j"") + wr_itp_exc_line(io::IO, e::GenTopExclusion) = @printf(io, ""%10d%10d\n"", e.i, e.j) + + # ---------- + # [ pwmcos ] + # ---------- + wr_itp_pwmcos_head(io::IO) = print(io, ""[ pwmcos ] ; PWMcos parameter list\n"") + wr_itp_pwmcos_comm(io::IO) = @printf(io, "";%5s%4s%9s%9s%9s%9s%12s%12s%12s%12s%8s%8s\n"", + ""i"", ""f"", ""r0"", ""theta1"", ""theta2"", ""theta3"", + ""ene_A"", ""ene_C"", ""ene_G"", ""ene_T"", + ""gamma"", ""eps'"") + wr_itp_pwmcos_line(io::IO, e::GenTopPWMcos) = + @printf(io, ""%6d %3d %8.5f %8.3f %8.3f %8.3f%12.6f%12.6f%12.6f%12.6f%8.3f%8.3f \n"", + e.i, e.function_type, e.r0, e.theta1, e.theta2, e.theta3, + e.ene_A, e.ene_C, e.ene_G, e.ene_T, e.gamma, e.eps) + + # ---------- + # [ pwmcosns ] + # ---------- + wr_itp_pwmcosns_head(io::IO) = print(io, ""[ pwmcosns ] ; PWMcos-ns parameter list\n"") + wr_itp_pwmcosns_comm(io::IO) = @printf(io, "";%5s%4s%9s%9s%9s%8s\n"", + ""i"", ""f"", ""r0"", ""theta1"", ""theta3"", ""eps"") + wr_itp_pwmcosns_line(io::IO, e::GenTopPWMcos) = + @printf(io, ""%6d %3d %8.5f %8.3f %8.3f %8.3f \n"", + e.i, e.function_type, e.r0 * 0.1, e.theta1, e.theta3, e.eps) + + # --------------------- + # [ cg_IDR_HPS_region ] + # --------------------- + wr_itp_idr_hps_head(io::IO) = print(io, ""[ cg_IDR_HPS_region ] ; IDR HPS model \n"") + wr_itp_idr_hps_comm(io::IO) = @printf(io, "";%9s to %10s\n"", ""i"", ""j"") + wr_itp_idr_hps_line(io::IO, e::GenTopRegion) = @printf(io, ""%10d %10d\n"", e.istart, e.iend) + + # -------------------- + # [ cg_IDR_KH_region ] + # -------------------- + wr_itp_idr_kh_head(io::IO) = print(io, ""[ cg_IDR_KH_region ] ; IDR KH model \n"") + wr_itp_idr_kh_comm(io::IO) = @printf(io, "";%9s to %10s\n"", ""i"", ""j"") + wr_itp_idr_kh_line(io::IO, e::GenTopRegion) = @printf(io, ""%10d %10d\n"", e.istart, e.iend) + + + ########################################################################### + # Begin writing to file... # + ########################################################################### + + # ---------------- + # [ moleculetype ] + # ---------------- + wr_itp_mol_head(itp_file) + wr_itp_mol_comm(itp_file) + wr_itp_mol_line(itp_file, system_name, MOL_NR_EXCL) + print(itp_file,""\n"") + + # --------- + # [ atoms ] + # --------- + cg_num_particles = top.num_atom + wr_itp_atm_head(itp_file) + wr_itp_atm_comm(itp_file) + tmp_chain_id = 0 + tmp_seg_name = """" + for atom in top.top_atoms + i_chain = atom.chain_id + i_segnm = atom.seg_name + if i_chain != tmp_chain_id || i_segnm != tmp_seg_name + wr_itp_atm_chain_info(itp_file, atom) + tmp_chain_id = i_chain + tmp_seg_name = i_segnm + end + wr_itp_atm_line(itp_file, atom) + end + print(itp_file,""\n"") + + # --------- + # [ bonds ] + # --------- + if length(top.top_bonds) > 0 + wr_itp_bnd_head(itp_file) + wr_itp_bnd_comm(itp_file) + for bond in top.top_bonds + wr_itp_bnd_line(itp_file, bond) + end + print(itp_file, ""\n"") + end + + # ---------- + # [ angles ] + # ---------- + if length(top.top_angles) > 0 + wr_itp_ang_head(itp_file) + wr_itp_ang_comm(itp_file) + for angle in top.top_angles + wr_itp_ang_line(itp_file, angle) + end + print(itp_file, ""\n"") + end + + # ------------- + # [ dihedrals ] + # ------------- + if length(top.top_dihedrals) > 0 + wr_itp_dih_head(itp_file) + wr_itp_dih_comm(itp_file) + for dih in top.top_dihedrals + wr_itp_dih_line(itp_file, dih) + end + print(itp_file, ""\n"") + end + + # --------- + # [ pairs ] + # --------- + if length(top.top_pairs) > 0 + wr_itp_pair_head(itp_file) + wr_itp_pair_comm(itp_file) + for pair in top.top_pairs + wr_itp_pair_line(itp_file, pair) + end + print(itp_file, ""\n"") + end + + # -------------- + # [ exclusions ] + # -------------- + if length(top.top_exclusions) > 0 + wr_itp_exc_head(itp_file) + wr_itp_exc_comm(itp_file) + for exclusion in top.top_exclusions + wr_itp_exc_line(itp_file, exclusion) + end + print(itp_file, ""\n"") + end + + # ---------- + # [ pwmcos ] + # ---------- + if length(top.top_pwmcos) > 0 + wr_itp_pwmcos_head(itp_file) + wr_itp_pwmcos_comm(itp_file) + for pwmcos in top.top_pwmcos + wr_itp_pwmcos_line(itp_file, pwmcos) + end + print(itp_file, ""\n"") + end + + # ------------ + # [ pwmcosns ] + # ------------ + if length(top.top_pwmcosns) > 0 + wr_itp_pwmcosns_head(itp_file) + wr_itp_pwmcosns_comm(itp_file) + for pwmcosns in top.top_pwmcosns + wr_itp_pwmcosns_line(itp_file, pwmcosns) + end + print(itp_file, ""\n"") + end + + # --------------------- + # [ cg_IDR_HPS_region ] + # --------------------- + if length(top.top_idr_hps) > 0 + wr_itp_idr_hps_head(itp_file) + wr_itp_idr_hps_comm(itp_file) + for idr in top.top_idr_hps + wr_itp_idr_hps_line(itp_file, idr) + end + print(itp_file, ""\n"") + end + + # -------------------- + # [ cg_IDR_KH_region ] + # -------------------- + if length(top.top_idr_kh) > 0 + wr_itp_idr_kh_head(itp_file) + wr_itp_idr_kh_comm(itp_file) + for idr in top.top_idr_kh + wr_itp_idr_kh_line(itp_file, idr) + end + print(itp_file, ""\n"") + end + + close(itp_file) + + if verbose + println(""> ... .top: DONE!"") + end +end + + +function write_grotop_pwmcos(top::GenTopology, system_name::AbstractString, args::Dict{String, <:Any}=Dict{String, Any}()) + + verbose = get(args, ""verbose"", false) + appendto_filename = get(args, ""patch"", """") + + do_output_pwmcos = get(args, ""pwmcos"", true) + do_output_pwmcosns = get(args, ""pwmcos-ns"", true) + + if length( appendto_filename ) == 0 + itp_pwmcos_name = system_name * ""_pwmcos.itp_patch"" + itp_pwmcos_file = open(itp_pwmcos_name, ""w"") + else + itp_pwmcos_name = appendto_filename + itp_pwmcos_file = open(itp_pwmcos_name, ""a"") + end + + + if do_output_pwmcos + itp_pwmcos_head = ""[ pwmcos ]\n"" + itp_pwmcos_comm = @sprintf("";%5s%4s%9s%9s%9s%9s%12s%12s%12s%12s%8s%8s\n"", + ""i"", ""f"", ""r0"", ""theta1"", ""theta2"", ""theta3"", + ""ene_A"", ""ene_C"", ""ene_G"", ""ene_T"", + ""gamma"", ""eps'"") + elseif do_output_pwmcosns + itp_pwmcos_head = ""[ pwmcosns ] ; PWMcos-ns parameter list\n"" + itp_pwmcos_comm = @sprintf("";%5s%4s%9s%9s%9s%8s\n"", + ""i"", ""f"", ""r0"", ""theta1"", ""theta3"", ""eps"") + end + + print(itp_pwmcos_file, itp_pwmcos_head) + print(itp_pwmcos_file, itp_pwmcos_comm) + for p in top.top_pwmcos + if p.function_type == PWMCOS_FUNC_TYPE + @printf(itp_pwmcos_file, + ""%6d %3d %8.5f %8.3f %8.3f %8.3f%12.6f%12.6f%12.6f%12.6f%8.3f%8.3f \n"", + p.i, p.function_type, p.r0 * 0.1, p.theta1, p.theta2, p.theta3, + p.ene_A, p.ene_C, p.ene_G, p.ene_T, p.gamma, p.eps) + end + end + + for p in top.top_pwmcosns + if p.function_type == PWMCOS_NS_FUNC_TYPE + @printf(itp_pwmcos_file, + ""%6d %3d %8.5f %8.3f %8.3f %8.3f \n"", + p.i, p.function_type, p.r0 * 0.1, p.theta1, p.theta3, + p.eps) + end + end + print(itp_pwmcos_file, ""\n"") + + close(itp_pwmcos_file) + if verbose + println(""> ... "", itp_pwmcos_name, "" pwmcos.itp: DONE!"") + end +end + +# ================================== +# General Topology in Gromacs format +# ================================== + +function read_groitp(itp_filename::AbstractString) + + top_mols = Vector{GenTopMolecule}(undef, 0) + + mol_name = """" + nonlocal_interval = 0 + num_atom = 0 + + top_atoms = Vector{GenTopAtom}(undef, 0) + top_bonds = Vector{GenTopBond}(undef, 0) + top_angles = Vector{GenTopAngle}(undef, 0) + top_dihedrals = Vector{GenTopDihedral}(undef, 0) + top_pairs = Vector{GenTopPair}(undef, 0) + top_exclusions = Vector{GenTopExclusion}(undef, 0) + top_pwmcos = Vector{GenTopPWMcos}(undef, 0) + top_pwmcosns = Vector{GenTopPWMcos}(undef, 0) + top_idr_hps = Vector{GenTopRegion}(undef, 0) + top_idr_kh = Vector{GenTopRegion}(undef, 0) + + function read_top_atoms(line::AbstractString, c_id::Int, s_name::AbstractString) + words = split(line) + a_indx = parse(Int, words[1]) + a_type = words[2] + r_indx = parse(Int, words[3]) + r_name = words[4] + a_name = words[5] + f_type = parse(Int, words[6]) + charge = parse(Float64, words[7]) + if length(words) >=8 + mass = parse(Float64, words[8]) + else + mass = 1.0 + end + new_atom = GenTopAtom(a_indx, a_type, r_indx, r_name, + a_name, f_type, charge, mass, c_id, s_name) + push!(top_atoms, new_atom) + end + + function read_top_bonds(line::AbstractString) + words = split(line) + i = parse(Int, words[1]) + j = parse(Int, words[2]) + f_type = parse(Int, words[3]) + if length(words) == 5 + r0 = parse(Float64, words[4]) * 10.0 + coef = parse(Float64, words[5]) * 0.005 * JOU2CAL + else + # TODO: fix this part! + r0 = 0.0 + coef = 0.0 + end + new_bond = GenTopBond(i, j, f_type, r0, coef) + push!(top_bonds, new_bond) + end + + function read_top_angles(line::AbstractString) + words = split(line) + i = parse(Int, words[1]) + j = parse(Int, words[2]) + k = parse(Int, words[3]) + f_type = parse(Int, words[4]) + eq = 0.0 + coef = 0.0 + w = 0.0 + if f_type == 1 + if length(words) == 6 + eq = parse(Float64, words[5]) + coef = parse(Float64, words[6]) * 0.5 * JOU2CAL + w = 0.0 + end + elseif f_type == 21 + eq = parse(Float64, words[5]) * 10.0 + coef = parse(Float64, words[6]) * JOU2CAL + w = parse(Float64, words[7]) * 10.0 + elseif f_type == 22 + eq = 0.0 + coef = 0.0 + w = 0.0 + end + new_angle = GenTopAngle(i, j, k, f_type, eq, coef, w) + push!(top_angles, new_angle) + end + + function read_top_dihedrals(line::AbstractString) + words = split(line) + i = parse(Int, words[1]) + j = parse(Int, words[2]) + k = parse(Int, words[3]) + l = parse(Int, words[4]) + f_type = parse(Int, words[5]) + eq = 0.0 + coef = 0.0 + w = 0.0 + n = 0 + if f_type == 1 + eq = parse(Float64, words[6]) + coef = parse(Float64, words[7]) * JOU2CAL + w = 0.0 + n = parse(Int, words[8]) + elseif f_type == 21 || f_type == 41 + eq = parse(Float64, words[6]) + coef = parse(Float64, words[7]) * JOU2CAL + w = parse(Float64, words[8]) + n = 0 + elseif f_type == 22 || f_type == 52 + eq = 0.0 + coef = 0.0 + w = 0.0 + n = 0 + elseif f_type == 31 || f_type == 32 || f_type == 33 + eq = parse(Float64, words[6]) + coef = parse(Float64, words[7]) * JOU2CAL + w = 0.0 + n = parse(Int, words[8]) + end + new_dihedral = GenTopDihedral(i, j, k, l, f_type, eq, coef, w, n) + push!(top_dihedrals, new_dihedral) + end + + function read_top_pairs(line::AbstractString) + words = split(line) + i = parse(Int, words[1]) + j = parse(Int, words[2]) + f_type = parse(Int, words[3]) + if length(words) == 5 + r0 = parse(Float64, words[4]) * 10.0 + coef = parse(Float64, words[5]) * JOU2CAL + else + # TODO: fix this part! + r0 = 0.0 + coef = 0.0 + end + new_pair = GenTopPair(i, j, f_type, r0, coef) + push!(top_pairs, new_pair) + end + + function read_top_exclusions(line::AbstractString) + words = split(line) + i = parse(Int, words[1]) + j = parse(Int, words[2]) + new_ex = GenTopExclusion(i, j) + push!(top_exclusions, new_ex) + end + + function read_top_pwmcos(line::AbstractString) + words = split(line) + i = parse(Int, words[1]) + f_type = parse(Int, words[2]) + r0 = parse(Float64, words[3]) * 10.0 + t1 = parse(Float64, words[4]) + t2 = parse(Float64, words[5]) + t3 = parse(Float64, words[6]) + eA = parse(Float64, words[7]) + eC = parse(Float64, words[8]) + eG = parse(Float64, words[9]) + eT = parse(Float64, words[10]) + gm = parse(Float64, words[11]) + ep = parse(Float64, words[12]) + new_pwmcos = GenTopPWMcos(i, f_type, r0, t1, t2, t3, + eA, eC, eG, eT, gm, ep) + push!(top_pwmcos, new_pwmcos) + end + + function read_top_pwmcosns(line::AbstractString) + words = split(line) + i = parse(Int, words[1]) + f_type = parse(Int, words[2]) + r0 = parse(Float64, words[3]) * 10.0 + t1 = parse(Float64, words[4]) + t3 = parse(Float64, words[5]) + ep = parse(Float64, words[6]) + new_pwmcosns = GenTopPWMcos(i, f_type, r0, t1, 0.0, t3, + 0.0, 0.0, 0.0, 0.0, 0.0, ep) + push!(top_pwmcosns, new_pwmcosns) + end + + function read_top_idr_hps(line::AbstractString) + words = split(line) + i = parse(Int, words[1]) + j = parse(Int, words[2]) + new_idr = GenTopRegion(i, j) + push!(top_idr_hps, new_idr) + end + + function read_top_idr_kh(line::AbstractString) + words = split(line) + i = parse(Int, words[1]) + j = parse(Int, words[2]) + new_idr = GenTopRegion(i, j) + push!(top_idr_kh, new_idr) + end + + # --------- + # main part + # --------- + section_name = """" + c_id_tmp = 0 + s_name_tmp = """" + for line in eachline(itp_filename) + if section_name == ""atoms"" && startswith(line, ""; +INFO+"") + words = split( line[10:end] ) + if words[1] == ""CHAIN:"" + c_id_tmp = parse(Int, words[2]) + end + if words[3] == ""SEGNAME:"" && length(words) > 3 + s_name_tmp = words[4] + end + end + + sep = findfirst("";"", line) + if sep != nothing + line = strip(line[1 : sep[1] - 1]) + else + line = strip(line) + end + if length(line) == 0 + continue + end + + if line[1] == '[' + sep = findfirst(""]"", line) + section_name = strip(line[2 : sep[1] - 1]) + continue + end + + # -------------------------------------------- + # TODO: conditional reading not implemented... + # -------------------------------------------- + if line[1] == '#' + continue + end + + if section_name == ""moleculetype"" + num_atom = length(top_atoms) + if num_atom > 0 + new_top_mol = GenTopMolecule(mol_name, nonlocal_interval, num_atom, + top_atoms, + top_bonds, + top_angles, + top_dihedrals, + top_pairs, + top_exclusions, + top_pwmcos, + top_pwmcosns, + top_idr_hps, + top_idr_kh) + + push!(top_mols, new_top_mol) + + top_atoms = Vector{GenTopAtom}(undef, 0) + top_bonds = Vector{GenTopBond}(undef, 0) + top_angles = Vector{GenTopAngle}(undef, 0) + top_dihedrals = Vector{GenTopDihedral}(undef, 0) + top_pairs = Vector{GenTopPair}(undef, 0) + top_exclusions = Vector{GenTopExclusion}(undef, 0) + top_pwmcos = Vector{GenTopPWMcos}(undef, 0) + top_pwmcosns = Vector{GenTopPWMcos}(undef, 0) + top_idr_hps = Vector{GenTopRegion}(undef, 0) + top_idr_kh = Vector{GenTopRegion}(undef, 0) + c_id_tmp = 0 + s_name_tmp = """" + end + + words = split(line) + mol_name = words[1] + nonlocal_interval = parse(Int, words[2]) + else + # read_function_name = ""read_top_"" * section_name * ""(line)"" + # read_expression = Meta.parse(read_function_name) + # eval(read_expression) + if section_name == ""atoms"" + read_top_atoms(line, c_id_tmp, s_name_tmp) + elseif section_name == ""bonds"" + read_top_bonds(line) + elseif section_name == ""angles"" + read_top_angles(line) + elseif section_name == ""dihedrals"" + read_top_dihedrals(line) + elseif section_name == ""pairs"" + read_top_pairs(line) + elseif section_name == ""exclusions"" + read_top_exclusions(line) + elseif section_name == ""pwmcos"" + read_top_pwmcos(line) + elseif section_name == ""pwmcosns"" + read_top_pwmcosns(line) + elseif section_name == ""cg_IDR_HPS_region"" + read_top_idr_hps(line) + elseif section_name == ""cg_IDR_KH_region"" + read_top_idr_kh(line) + end + end + end + + num_atom = length(top_atoms) + if num_atom > 0 + new_top_mol = GenTopMolecule(mol_name, nonlocal_interval, num_atom, + top_atoms, + top_bonds, + top_angles, + top_dihedrals, + top_pairs, + top_exclusions, + top_pwmcos, + top_pwmcosns, + top_idr_hps, + top_idr_kh) + push!(top_mols, new_top_mol) + end + + return top_mols +end + +function read_groitp_defaults(itp_filename::AbstractString) + + top_default_atomtype = Vector{GenTopAtomType}(undef, 0) + top_default_CGIDR_HPS_atomtype = Vector{GenTopCGIDRHPSAtomType}(undef, 0) + + function read_top_atomtype(line::AbstractString) + words = split(line) + a_name = words[1] + n = parse(Int, words[2]) + mass = parse(Float64, words[3]) + charge = parse(Float64, words[4]) + p_type = words[5] + rmin = parse(Float64, words[6]) + eps = parse(Float64, words[7]) + + new_atom_type = GenTopAtomType(a_name, mass, charge, rmin, eps, n, p_type) + push!(top_default_atomtype, new_atom_type) + end + + function read_top_CGIDRHPS_atomtype(line::AbstractString) + words = split(line) + a_name = words[1] + mass = parse(Float64, words[2]) + charge = parse(Float64, words[3]) + sigma = parse(Float64, words[4]) + lambda = parse(Float64, words[5]) + + new_atom_type = GenTopCGIDRHPSAtomType(a_name, mass, charge, sigma, lambda) + push!(top_default_CGIDR_HPS_atomtype, new_atom_type) + end + + # --------- + # main part + # --------- + section_name = """" + for line in eachline(itp_filename) + + sep = findfirst("";"", line) + if sep != nothing + line = strip(line[1 : sep[1] - 1]) + else + line = strip(line) + end + if length(line) == 0 + continue + end + + if line[1] == '[' + sep = findfirst(""]"", line) + section_name = strip(line[2 : sep[1] - 1]) + continue + end + + # read_function_name = ""read_top_"" * section_name * ""(line)"" + # read_expression = Meta.parse(read_function_name) + # eval(read_expression) + if section_name == ""atomtypes"" + read_top_atomtype(line) + elseif section_name == ""cg_IDR_HPS_atomtypes"" + read_top_CGIDRHPS_atomtype(line) + end + end + + return (top_default_atomtype, top_default_CGIDR_HPS_atomtype) +end + +function read_grotop(top_filename::AbstractString) + + sys_name = """" + num_atom = 0 + mol_id = 0 + + top_default_params = GenTopDefault(0, 0, false, 0.0, 0.0) + top_default_atomtype = Vector{GenTopAtomType}(undef, 0) + top_default_CGDNA_bp = Vector{GenTopCGDNABasepairType}(undef, 0) + top_default_CGDNA_bs = Vector{GenTopCGDNABasestackType}(undef, 0) + top_default_CGDNA_cs = Vector{GenTopCGDNABasecrossType}(undef, 0) + top_default_CGDNA_exv = Vector{GenTopCGDNAExvType}(undef, 0) + top_default_CGPro_flx_angle = Vector{GenTopCGProAICGFlexAngleType}(undef, 0) + top_default_CGPro_flx_dihedral = Vector{GenTopCGProAICGFlexDihedralType}(undef, 0) + top_default_CGIDR_HPS_atomtype = Vector{GenTopCGIDRHPSAtomType}(undef, 0) + top_default_CGIDR_KH_atomtype = Vector{GenTopCGIDRKHAtomType}(undef, 0) + + global_index_2_local_index = Vector{Int}(undef, 0) + global_index_2_local_molid = Vector{Int}(undef, 0) + top_atoms = Vector{GenTopAtom}(undef, 0) + top_bonds = Vector{GenTopBond}(undef, 0) + top_angles = Vector{GenTopAngle}(undef, 0) + top_dihedrals = Vector{GenTopDihedral}(undef, 0) + top_pairs = Vector{GenTopPair}(undef, 0) + top_exclusions = Vector{GenTopExclusion}(undef, 0) + top_pwmcos = Vector{GenTopPWMcos}(undef, 0) + top_pwmcosns = Vector{GenTopPWMcos}(undef, 0) + top_idr_hps = Vector{GenTopRegion}(undef, 0) + top_idr_kh = Vector{GenTopRegion}(undef, 0) + top_mol_list = Vector{GenTopMolList}(undef, 0) + + section_name = """" + mol_topologies = Dict() + + if dirname(top_filename) == """" + top_dirname = ""./"" + else + top_dirname = dirname(top_filename) * ""/"" + end + + # ------------------------ + # read the top file itself + # ------------------------ + # in some cases, there is information in the topology file... + new_mols = read_groitp(top_filename) + for new_mol in new_mols + new_mol_name = new_mol.mol_name + mol_topologies[new_mol_name] = new_mol + end + + + for line in eachline(top_filename) + sep = findfirst("";"", line) + if sep != nothing + line = strip(line[1 : sep[1] - 1]) + else + line = strip(line) + end + if length(line) == 0 + continue + end + + + if startswith(line, ""#include"") + mol_file_name = strip(line[9:end], ['\""', '\'', ' ']) + mol_file_basename = basename(mol_file_name) + if !isabspath(mol_file_name) + mol_file_name = normpath( joinpath( top_dirname, mol_file_name ) ) + end + if in(mol_file_basename, [""atom_types.itp"", + ""pair_energy_MJ_96.itp"", + ""pair_energy_mpipi_23.itp"", + ""flexible_local_angle.itp"", + ""flexible_local_dihedral.itp""]) + # continue + tmp_defaults = read_groitp_defaults(mol_file_name) + tmp_atomtype = tmp_defaults[1] + tmp_CGIDR_HPS_atomtype = tmp_defaults[2] + for a in tmp_atomtype + push!(top_default_atomtype, a) + end + for a in tmp_CGIDR_HPS_atomtype + push!(top_default_CGIDR_HPS_atomtype, a) + end + end + new_mols = read_groitp(mol_file_name) + for new_mol in new_mols + new_mol_name = new_mol.mol_name + mol_topologies[new_mol_name] = new_mol + end + end + + if line[1] == '[' + sep = findfirst(""]"", line) + section_name = strip(line[2 : sep[1] - 1]) + continue + end + + if section_name == ""system"" + words = split(line) + sys_name = words[1] + elseif section_name == ""molecules"" + words = split(line) + mol_name = words[1] + mol_count = parse(Int, words[2]) + + tmp_mol_list = GenTopMolList(mol_name, mol_count) + push!(top_mol_list, tmp_mol_list) + + for i = 1 : mol_count + tmp_mol = mol_topologies[mol_name] + mol_id += 1 + + # ----------------------- + # add molecules to system + # ----------------------- + for t in tmp_mol.top_atoms + new_index = t.atom_index + num_atom + s = GenTopAtom(new_index, + t.atom_type, + t.residue_index, + t.residue_name, + t.atom_name, + t.function_type, + t.charge, + t.mass, + t.chain_id, + t.seg_name * ""M$i"") + push!(top_atoms, s) + push!(global_index_2_local_index, t.atom_index) + push!(global_index_2_local_molid, mol_id) + end + for t in tmp_mol.top_bonds + s = GenTopBond(t.i + num_atom, + t.j + num_atom, + t.function_type, + t.r0, + t.coef + ) + push!(top_bonds, s) + end + for t in tmp_mol.top_angles + s = GenTopAngle(t.i + num_atom, + t.j + num_atom, + t.k + num_atom, + t.function_type, + t.a0, t.coef, t.w) + push!(top_angles, s) + end + for t in tmp_mol.top_dihedrals + s = GenTopDihedral(t.i + num_atom, + t.j + num_atom, + t.k + num_atom, + t.l + num_atom, + t.function_type, + t.d0, t.coef, t.w, t.n) + push!(top_dihedrals, s) + end + for t in tmp_mol.top_pairs + s = GenTopPair(t.i + num_atom, + t.j + num_atom, + t.function_type, + t.r0, t.coef) + push!(top_pairs, s) + end + for t in tmp_mol.top_exclusions + s = GenTopExclusion(t.i + num_atom, + t.j + num_atom) + push!(top_exclusions, s) + end + for t in tmp_mol.top_pwmcos + s = GenTopPWMcos(t.i + num_atom, + t.function_type, + t.r0, + t.theta1, + t.theta2, + t.theta3, + t.ene_A, t.ene_C, t.ene_G, t.ene_T, + t.gamma, t.eps) + push!(top_pwmcos, s) + end + for t in tmp_mol.top_pwmcosns + s = GenTopPWMcos(t.i + num_atom, + t.function_type, + t.r0, + t.theta1, + t.theta2, + t.theta3, + t.ene_A, t.ene_C, t.ene_G, t.ene_T, + t.gamma, t.eps) + push!(top_pwmcosns, s) + end + for t in tmp_mol.top_idr_hps + s = GenTopRegion(t.istart + num_atom, + t.iend + num_atom) + push!(top_idr_hps, s) + end + for t in tmp_mol.top_idr_kh + s = GenTopRegion(t.istart + num_atom, + t.iend + num_atom) + push!(top_idr_kh, s) + end + + num_atom += tmp_mol.num_atom + end + end + end + + new_top = GenTopology(sys_name, num_atom, + top_default_params, + top_default_atomtype, + top_default_CGDNA_bp, + top_default_CGDNA_bs, + top_default_CGDNA_cs, + top_default_CGDNA_exv, + top_default_CGPro_flx_angle, + top_default_CGPro_flx_dihedral, + top_default_CGIDR_HPS_atomtype, + top_default_CGIDR_KH_atomtype, + global_index_2_local_index, + global_index_2_local_molid, + top_atoms, + top_bonds, + top_angles, + top_dihedrals, + top_pairs, + top_exclusions, + top_pwmcos, + top_pwmcosns, + top_idr_hps, + top_idr_kh, + top_mol_list) + + return new_top + +end + +############################################################################### +# __ # +# _ __ ___ / _| # +# | '_ \ / __|| |_ # +# | |_) |\__ \| _| # +# | .__/ |___/|_| # +# |_| # +# # +############################################################################### + +function write_psf(top::GenTopology, sys_name::AbstractString="""", args::Dict{String, <:Any}=Dict{String, Any}()) + + verbose = get(args, ""verbose"", false) + + if length(sys_name) > 0 + system_name = sys_name + else + system_name = top.system_name + end + psf_name = system_name * "".psf"" + psf_file = open(psf_name, ""w"") + + cg_num_particles = top.num_atom + + @printf(psf_file, ""PSF CMAP \n\n"") + @printf(psf_file, "" 3 !NTITLE \n"") + @printf(psf_file, ""REMARKS PSF file created with Julia. \n"") + @printf(psf_file, ""REMARKS System: %s \n"", system_name) + @printf(psf_file, ""REMARKS ======================================== \n"") + @printf(psf_file, "" \n"") + + psf_atom_line = "" %6d %3s %5d %3s %3s %5s %10.6f %10.6f 0 \n"" + chain_id_set = ""_ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz1234567890"" + + @printf(psf_file, "" %6d !NATOM \n"", cg_num_particles) + for i_bead in 1 : cg_num_particles + @printf(psf_file, "" %6d %3s %5d %3s %3s %5s %10.6f %10.6f 0 \n"", + i_bead, + chain_id_set[mod(top.global_index_2_local_molid[i_bead], 63) + 1], + top.top_atoms[i_bead].residue_index, + top.top_atoms[i_bead].residue_name, + top.top_atoms[i_bead].atom_name, + top.top_atoms[i_bead].atom_type, + top.top_atoms[i_bead].charge, + top.top_atoms[i_bead].mass) + end + print(psf_file,""\n"") + + close(psf_file) + + if verbose + println(""> ... .psf: DONE!"") + end +end + +function read_psf(psf_filename::AbstractString) + + sys_name = """" + num_atom = 0 + mol_id = 0 + + top_default_params = GenTopDefault(0, 0, false, 0.0, 0.0) + top_default_atomtype = Vector{GenTopAtomType}(undef, 0) + top_default_CGDNA_bp = Vector{GenTopCGDNABasepairType}(undef, 0) + top_default_CGDNA_bs = Vector{GenTopCGDNABasestackType}(undef, 0) + top_default_CGDNA_cs = Vector{GenTopCGDNABasecrossType}(undef, 0) + top_default_CGDNA_exv = Vector{GenTopCGDNAExvType}(undef, 0) + top_default_CGPro_flx_angle = Vector{GenTopCGProAICGFlexAngleType}(undef, 0) + top_default_CGPro_flx_dihedral = Vector{GenTopCGProAICGFlexDihedralType}(undef, 0) + + global_index_2_local_index = Vector{Int}(undef, 0) + global_index_2_local_molid = Vector{Int}(undef, 0) + top_atoms = Vector{GenTopAtom}(undef, 0) + top_bonds = Vector{GenTopBond}(undef, 0) + top_angles = Vector{GenTopAngle}(undef, 0) + top_dihedrals = Vector{GenTopDihedral}(undef, 0) + top_pairs = Vector{GenTopPair}(undef, 0) + top_exclusions = Vector{GenTopExclusion}(undef, 0) + top_pwmcos = Vector{GenTopPWMcos}(undef, 0) + top_pwmcosns = Vector{GenTopPWMcos}(undef, 0) + top_idr_hps = Vector{GenTopRegion}(undef, 0) + top_idr_kh = Vector{GenTopRegion}(undef, 0) + top_mol_list = Vector{GenTopMolList}(undef, 0) + + function read_top_atoms(line::AbstractString, c_id::Int, s_name::AbstractString) + words = split(line) + a_indx = parse(Int, words[1]) + seg_id = words[2] + r_indx = parse(Int, words[3]) + r_name = words[4] + a_name = words[5] + a_type = words[6] + charge = parse(Float64, words[7]) + mass = parse(Float64, words[8]) + f_type = parse(Int, words[9]) + new_atom = GenTopAtom(a_indx, a_type, r_indx, r_name, + a_name, f_type, charge, mass, c_id, seg_id) + push!(top_atoms, new_atom) + end + + section_name = """" + for line in eachline(psf_filename) + words = split(line) + + if length(words) == 0 + continue + end + + if words[1] == ""REMARKS"" || words[1] == ""PSF"" + continue + end + + sep = findfirst(""!"", line) + if sep != nothing + num_tmp = parse(Int, words[1]) + section_name = strip(line[sep[1] + 1:end]) + if section_name == ""NATOM"" + num_atom = num_tmp + end + continue + end + + if section_name == ""NATOM"" + read_top_atoms(line, 0, """") + end + end + + new_top = GenTopology(sys_name, num_atom, + top_default_params, + top_default_atomtype, + top_default_CGDNA_bp, + top_default_CGDNA_bs, + top_default_CGDNA_cs, + top_default_CGDNA_exv, + top_default_CGPro_flx_angle, + top_default_CGPro_flx_dihedral, + global_index_2_local_index, + global_index_2_local_molid, + top_atoms, + top_bonds, + top_angles, + top_dihedrals, + top_pairs, + top_exclusions, + top_pwmcos, + top_pwmcosns, + top_idr_hps, + top_idr_kh, + top_mol_list) + + return new_top + +end + +","Julia" +"Genesis","noinil/genesis_cg_tool","src/lib/biomath.jl",".jl","7524","285","############################################################################### +# Functions compting geometric quantities # +############################################################################### + +using LinearAlgebra + +# ====================== +# Mathematical Structure +# ====================== + +struct Quaternion + w::Float64 + x::Float64 + y::Float64 + z::Float64 +end + +struct GeoTransformation + rotation::Array{<:Real, 2} + translation::Array{<:Real, 2} +end + + +# =================== +# Geometric Functions +# =================== + +# -------- +# Distance +# -------- + +function compute_distance(coor1::Vector{<:Real}, coor2::Vector{<:Real}) + d = coor1 - coor2 + return norm(d) +end + +# ----- +# Angle +# ----- + +function compute_angle(coor1::Vector{<:Real}, coor2::Vector{<:Real}, coor3::Vector{<:Real}) + v1 = coor1 - coor2 + v2 = coor3 - coor2 + n1 = norm(v1) + n2 = norm(v2) + return acosd(clamp( dot(v1, v2) / n1 / n2, -1.0, 1.0 )) +end + +function compute_vec_angle(vec1::Vector{<:Real}, vec2::Vector{<:Real}) + n1 = norm(vec1) + n2 = norm(vec2) + return acosd(clamp( dot(vec1, vec2) / n1 / n2, -1.0, 1.0 )) +end + +# -------- +# Dihedral +# -------- + +function compute_dihedral(coor1::Vector{<:Real}, coor2::Vector{<:Real}, coor3::Vector{<:Real}, coor4::Vector{<:Real}) + v12 = coor2 - coor1 + v23 = coor3 - coor2 + v34 = coor4 - coor3 + c123 = cross(v12, v23) + c234 = cross(v23, v34) + nc123 = norm(c123) + nc234 = norm(c234) + dih = acosd(clamp( dot(c123, c234) / nc123 / nc234, -1.0, 1.0 )) + c1234 = cross(c123, c234) + judge = dot(c1234, v23) + dih = judge < 0 ? - dih : dih + return dih +end + +# -------- +# Centroid +# -------- + +function centroid(coors::Array{<:Real, 2}) + num_coor = size(coors, 2) + coor_centroid = zeros(Float64, 3) + for i_bead in 1 : num_coor + coor_centroid .+= coors[:, i_bead] + end + coor_centroid ./= num_coor + return coor_centroid +end + +# ------------------ +# Radius of gyration +# ------------------ + +function radius_of_gyration(coors::Array{<:Real, 2}) + num_coor = size(coors, 2) + coor_centroid = zeros(Float64, 3) + for i_bead in 1 : num_coor + coor_centroid .+= coors[:, i_bead] + end + coor_centroid ./= num_coor + + dist_sq_sum = 0 + for i_bead in 1 : num_coor + v = coors[:, i_bead] - coor_centroid + dist_sq_sum += v' * v + end + rg = sqrt(dist_sq_sum / num_coor) +end + +# --------------- +# Superimposition +# --------------- + +"""""" + compute_superimposition_transformation(coors_group_1, coors_group_2) + +Find out the transformation (rotation + translation) to superimpose Group 1 onto Group 2. + +# Arguments +- `coors_group_1`: Group of particles to be moved; +- `coors_group_2`: Group of particles used as target. +"""""" +function compute_superimposition_transformation(coors_group_1::Array{<:Real, 2}, coors_group_2::Array{<:Real, 2}) + coor_size = size(coors_group_1)[2] + + if coor_size != size(coors_group_2)[2] + error(""Can not perform superimposition for conformations with different size."") + end + + # Step 1: scaling group 2 to group 3 to math group 1 + measure_group_1 = 0 + measure_group_2 = 0 + for i in 1:coor_size - 1 + measure_group_1 += norm(coors_group_1[:, i] - coors_group_1[:, i + 1]) + measure_group_2 += norm(coors_group_2[:, i] - coors_group_2[:, i + 1]) + end + measure_scale = measure_group_1 / measure_group_2 + coors_group_3 = coors_group_2 .* measure_scale + + # Step 2: compute centroids + # + coor_centroid_1 = sum(coors_group_1, dims=2) .* (1 / coor_size) + coor_centroid_3 = sum(coors_group_3, dims=2) .* (1 / coor_size) + + # Step 3: shift coordinates to centroid + # + coors_shift_1 = coors_group_1 .- coor_centroid_1 + coors_shift_3 = coors_group_3 .- coor_centroid_3 + + # SVD + # + s = svd(coors_shift_1 * coors_shift_3') + + # rotation + # + d = det(s.V * s.U') < 0.0 ? -1.0 : 1.0 + m = diagm([1, 1, d]) + rotation_matrix = s.V * m * s.U' + + # translation + # + translation_matrix = ( coor_centroid_3 ./ measure_scale ) - ( rotation_matrix * coor_centroid_1 ) + + # final RMSD fit + # + fit = GeoTransformation(rotation_matrix, translation_matrix) + + return fit +end + +function apply_transformation(t::GeoTransformation, coors_group_old::Array{<:Real, 2}) + coors_group_new = t.rotation * coors_group_old .+ t.translation + + return coors_group_new +end + +# ------------ +# Compute RMSD +# ------------ + +function compute_rmsd(coors_group_1::Array{<:Real, 2}, coors_group_2::Array{<:Real, 2}) + coor_size = size(coors_group_1)[2] + + if coor_size != size(coors_group_2)[2] + error(""Can not perform superimposition for conformations with different size."") + end + + # ----------------------- + # perform superimposition + # ----------------------- + fit = compute_superimposition_transformation(coors_group_1, coors_group_2) + coors_group_3 = apply_transformation(fit, coors_group_1) + + d = sum((coors_group_2 - coors_group_3).^2) + rmsd = sqrt(d / coor_size) + + return rmsd +end + +# -------------------------- +# Generate a random rotation +# -------------------------- +function generate_random_rotation() + # step 1: random axis + rand_vec = rand(Float64, 3) .- 0.5 + rand_vec_norm = sqrt(rand_vec' * rand_vec) + axis = rand_vec ./ rand_vec_norm + (ax, ay, az) = axis + + outer_product_matrix = axis * axis' + cross_product_matrix = [0 -az ay; az 0 -ax; -ay ax 0] + + # step 2: random θ + theta = rand() * 2 * pi + cos_theta = cos(theta) + sin_theta = sin(theta) + R = cos_theta * I + (1 - cos_theta) * outer_product_matrix + sin_theta * cross_product_matrix + + # return the rotation matrix + return R +end + +# ------------------------------ +# Rotate by theta around an axis +# ------------------------------ +function rotation_matrix_around_axis(axis::Vector{<:Real}, theta) + cost = cosd(theta) + sint = sind(theta) + _1_m_cost = 1 - cost + ux = axis[1] + uy = axis[2] + uz = axis[3] + rotmat = reshape([cost + ux*ux*_1_m_cost, + uy*ux*_1_m_cost + uz*sint, + uz*ux*_1_m_cost - uy*sint, + ux*uy*_1_m_cost - uz*sint, + cost + uy*uy*_1_m_cost, + uz*uy*_1_m_cost + ux*sint, + ux*uz*_1_m_cost + uy*sint, + uy*uz*_1_m_cost - ux*sint, + cost + uz*uz*_1_m_cost], (3, 3)) + return rotmat +end + +function rotation_matrix_around_x(theta) + cost = cosd(theta) + sint = sind(theta) + rotmat = [1 0 0; 0 cost -sint; 0 sint cost] + return rotmat +end + +function rotation_matrix_around_y(theta) + cost = cosd(theta) + sint = sind(theta) + rotmat = [cost 0 sint; 0 1 0; -sint 0 cost] + return rotmat +end + +function rotation_matrix_around_z(theta) + cost = cosd(theta) + sint = sind(theta) + rotmat = [cost -sint 0; sint cost 0; 0 0 1] + return rotmat +end + +# =================== +# Physical properties +# =================== + +# -------------- +# Center of mass +# -------------- + +function compute_center_of_mass(atom_indices::Vector{Int}, atom_names::Vector{String}, atom_coors::Array{<:Real, 2}) + total_mass = 0 + tmp_coor = zeros(Float64, 3) + for i in atom_indices + a_mass = ATOM_MASS_DICT[atom_names[i][1]] + a_coor = atom_coors[:, i] + total_mass += a_mass + tmp_coor += a_coor * a_mass + end + com = tmp_coor / total_mass + + return com +end +","Julia" +"Genesis","noinil/genesis_cg_tool","src/lib/parser_dcd.jl",".jl","10840","377","############################################################################### +# _ _ # +# __| | ___ __| | # +# / _` | / __|/ _` | # +# | (_| || (__| (_| | # +# \__,_| \___|\__,_| # +# # +############################################################################### + +using Printf + +struct DCD_trajectory + traj_type::String + traj_frames::Int + traj_first_step::Int + traj_output_interval::Int + traj_steps::Int + boundary_type::Int + traj_format::Int + md_doc::Vector{String} + num_atoms::Int + boundary_box_size::Array{Float64, 2} + conformations::Vector{Conformation} +end + + +function read_dcd(dcd_filename::String, args::Dict{String, <:Any}=Dict{String, Any}()) + + verbose = get(args, ""verbose"", false) + i_frame_begin = get(args, ""begin"", 1) + i_frame_end = get(args, ""end"", -1) + i_frame_step = get(args, ""step"", 1) + + dcd_file = open(dcd_filename, ""r"") + + if verbose + println(""============================================================"") + println(""> Open DCD file: "", dcd_filename) + end + + # ======================== + # Read in head information + # ======================== + + # DCD first block + block_size_0 = read(dcd_file, Int32) + if block_size_0 != 84 + error(""ERROR: wrong DCD format!"") + end + + # ""CORD"" or ""VELD"" + tmp_char_array = Array{Char}(undef, 0) + for i = 1:4 + c = read(dcd_file, Char) + push!(tmp_char_array, c) + end + file_type = String(tmp_char_array) + + # simulation info + tmp_int_array = Array{Int32}(undef, 0) + for i = 1:20 + m = read(dcd_file, Int32) + push!(tmp_int_array, m) + end + n_frames = tmp_int_array[1] + n_ts_first = tmp_int_array[2] + n_ts_interval = tmp_int_array[3] + n_ts_all = tmp_int_array[4] + bc_flag = tmp_int_array[11] + traj_format = tmp_int_array[20] + + bc_type = bc_flag == 0 ? ""No boundary"" : ""Periodic boundary"" + + block_size_1 = read(dcd_file, Int32) + if block_size_1 != 84 + error(""ERROR: wrong DCD format!"") + end + + # ====================== + # Read in MD system info + # ====================== + + block_size_0 = read(dcd_file, Int32) + + dcd_doc_lines = Vector{String}(undef, 0) + n_doc_line = read(dcd_file, Int32) + for i in 1:n_doc_line + tmp_char_array = Array{Char}(undef, 0) + for j = 1:80 + c = read(dcd_file, Char) + push!(tmp_char_array, c) + end + doc_line = String(tmp_char_array) + push!(dcd_doc_lines, doc_line) + end + + block_size_1 = read(dcd_file, Int32) + + # ======================= + # Read in particle number + # ======================= + + block_size_0 = read(dcd_file, Int32) + + n_particles = read(dcd_file, Int32) + + block_size_1 = read(dcd_file, Int32) + + # ======================== + # Brief output of DCD info + # ======================== + + if verbose + println(""> DCD information:"") + println("" > File type : $(file_type)"") + println("" --------------------------------------------------"") + println("" > Number of particles : $(n_particles)"") + println("" > Boundary codition : $(bc_type)"") + println("" --------------------------------------------------"") + println("" > Number of frames : $(n_frames)"") + println("" > First MD step : $(n_ts_first)"") + println("" > Total MD steps : $(n_ts_all)"") + println("" > Output interval : $(n_ts_interval)"") + println("" > Trajectory format : $(traj_format)"") + + println(""============================================================"") + println(""> DCD information:"") + end + + if verbose + for line in dcd_doc_lines + println("" > |"", line, ""|"") + end + end + + # ====================== + # Read in Coordinates!!! + # ====================== + data_block_size = n_particles * 4 # Single precision (4-byte), Float32 + + data_frame_size = (data_block_size + 8) * 3 + if bc_flag == 1 + data_frame_size += 8 * 6 + 8 + end + + if i_frame_end < 1 + i_frame_end = n_frames + end + i_frame_read_indices = [i_frame_begin:i_frame_step:i_frame_end...] + + boundary_conditions = zeros(Float64, 6, length(i_frame_read_indices)) + conformations = Vector{Conformation}(undef, 0) + is_broken_trajectory = false + i_read_frame = 0 + for t in 1 : n_frames + if !(t in i_frame_read_indices) + skip(dcd_file, data_frame_size) + continue + end + if verbose + println("" ~~~~> Reading Frame: "", t) + end + i_read_frame += 1 + # ---------------- + # Read in box info + # ---------------- + if bc_flag == 1 + block_size_0 = read(dcd_file, Int32) + bc_size = zeros(Float64, 6) + for i in 1 : 6 + l = read(dcd_file, Float64) + bc_size[i] = l + end + block_size_1 = read(dcd_file, Int32) + # store box information + boundary_conditions[:, i_read_frame] = bc_size + end + + # ------------------ + # Read in coors/vels + # ------------------ + coors = zeros(Float64, (3, n_particles)) + for dim = 1 : 3 + block_size_0 = read(dcd_file, Int32) + for i in 1 : n_particles + x = read(dcd_file, Float32) + coors[dim, i] = x + end + block_size_1 = read(dcd_file, Int32) + if block_size_0 != data_block_size || block_size_1 != data_block_size + println("" WARNING: wrong block size in coordinate reading!"") + println("" Incomplete trajectory?"") + is_broken_trajectory = true + break + end + end + if is_broken_trajectory + break + end + push!(conformations, Conformation(n_particles, coors)) + if eof(dcd_file) + break + end + end + + if verbose + println(""------------------------------------------------------------"") + println(""> FINISH reading the DCD file. Have fun!"") + println(""============================================================"") + println("" "") + end + + close(dcd_file) + + new_trajectory = DCD_trajectory(file_type, + i_read_frame, + n_ts_first, + n_ts_interval, + n_ts_all, + bc_flag, + traj_format, + dcd_doc_lines, + n_particles, + boundary_conditions, + conformations) + + return new_trajectory + +end + + +function write_dcd(dcd_trajectory::DCD_trajectory, dcd_filename::AbstractString) + + dcd_file = open(dcd_filename, ""w"") + + n_atom = dcd_trajectory.num_atoms + + # ====================== + # Write head information + # ====================== + # + # block size + write(dcd_file, Int32(84)) + + # ""CORD"" or ""VELD"" + for i = 1:4 + write(dcd_file, dcd_trajectory.traj_type[i]) + end + + # simulation info + tmp_int_array = zeros(Int32, 20) + tmp_int_array[1] = dcd_trajectory.traj_frames + tmp_int_array[2] = dcd_trajectory.traj_first_step + tmp_int_array[3] = dcd_trajectory.traj_output_interval + tmp_int_array[4] = dcd_trajectory.traj_steps + tmp_int_array[11] = dcd_trajectory.boundary_type + tmp_int_array[20] = dcd_trajectory.traj_format + for i = 1:20 + write(dcd_file, tmp_int_array[i]) + end + + # block size + write(dcd_file, Int32(84)) + + # =========================== + # Write MD information string + # =========================== + # + # block size calculation + n_doc_line = length(dcd_trajectory.md_doc) + block_size = 4 + 80 * n_doc_line + + # block size + write(dcd_file, Int32(block_size)) + + # write dcd documentation + write(dcd_file, Int32(n_doc_line)) + for i in 1:n_doc_line - 1 + for j = 1:80 + write(dcd_file, dcd_trajectory.md_doc[i][j]) + end + end + new_doc_line = rpad(""REMARKS ** MODIFIED BY JULIA CG TOOL **"", 80) + for j = 1:80 + write(dcd_file, new_doc_line[j]) + end + + # block size + write(dcd_file, Int32(block_size)) + + # ===================== + # Write number of atoms + # ===================== + # + # block size + write(dcd_file, Int32(4)) + + # write num_atoms + write(dcd_file, Int32(n_atom)) + + # block size + write(dcd_file, Int32(4)) + + # =================== + # Write trajectory!!! + # =================== + # + coor_block_size = 4 * n_atom + + # loop over frames + for i_frame in 1:length( dcd_trajectory.conformations ) + # ------------------------- + # Write boundary conditions + # ------------------------- + if dcd_trajectory.boundary_type == 1 + # block size + write(dcd_file, Int32(48)) + + for j_dim in 1:6 + write(dcd_file, Float64(dcd_trajectory.boundary_box_size[j_dim, i_frame])) + end + + # block size + write(dcd_file, Int32(48)) + end + + # ------------ + # coordinate x + # ------------ + # + # block size + write(dcd_file, Int32(coor_block_size)) + + # write X + for j in 1:n_atom + write(dcd_file, Float32(dcd_trajectory.conformations[i_frame].coors[1, j])) + end + + # block size + write(dcd_file, Int32(coor_block_size)) + + # ------------ + # coordinate y + # ------------ + # + # block size + write(dcd_file, Int32(coor_block_size)) + + # write Y + for j in 1:n_atom + write(dcd_file, Float32(dcd_trajectory.conformations[i_frame].coors[2, j])) + end + + # block size + write(dcd_file, Int32(coor_block_size)) + + # ------------ + # coordinate z + # ------------ + # + # block size + write(dcd_file, Int32(coor_block_size)) + + # write Z + for j in 1:n_atom + write(dcd_file, Float32(dcd_trajectory.conformations[i_frame].coors[3, j])) + end + + # block size + write(dcd_file, Int32(coor_block_size)) + + end + + close(dcd_file) + +end +","Julia" +"Genesis","noinil/genesis_cg_tool","src/lib/collective_variables.jl",".jl","3046","105","############################################################################### +# Collective Variables # +############################################################################### +# compute_nativeness(top, conformation, args) +# compute_center_of_mass(atom_indices, top, conformation, args) +############################################################################### + +# ============= +# CG nativeness +# ============= + +"""""" + compute_nativeness(t, c, args) + +# Arguments +- `t`: GenTopology +- `c`: Conformation +- `args`: other arguments + +## Contact type +- 1: cannonical contact type: when r < r0 * cutoff contact is formed +- 2: (1/N) * sum (1 / (1 + exp(beta * (r - r0 * cutoff)))) + (Robert B. Best et al. PNAS 2013) +"""""" +function compute_nativeness(t::GenTopology, c::Conformation, args::Dict{String, <:Any}=Dict{String, Any}()) + verbose = get(args, ""verbose"", false) + q_type = get(args, ""type"", 1) + cutoff = get(args, ""cutoff"", 1.2) + beta = get(args, ""beta"", 5.0) + + region = get(args, ""region"", []) + + # count number of native contacts + num_native_contacts = 0 + for p in t.top_pairs + if p.function_type == 1 || p.function_type == 2 + if length(region) > 0 && (!in(p.i, region) || !in(p.j, region)) + continue + end + num_native_contacts += 1 + end + end + + # loop over all the native contacts + num_correct_contact = 0 + num_contact_type_2 = 0.0 + for p in t.top_pairs + if p.function_type == 1 || p.function_type == 2 + r0 = p.r0 + i = p.i + j = p.j + if length(region) > 0 && (!in(i, region) || !in(j, region)) + continue + end + r = compute_distance(c.coors[:, i], c.coors[:, j]) + if q_type == 1 # simple type + if r <= r0 * cutoff + num_correct_contact += 1 + end + elseif q_type == 2 # complex type + num_contact_type_2 += 1 / (1 + exp( beta * (r - cutoff * r0))) + end + end + end + + if q_type == 1 # simple type + return num_correct_contact / num_native_contacts + elseif q_type == 2 # complex type + return num_contact_type_2 / num_native_contacts + end +end + + +# ============== +# Center of mass +# ============== + +"""""" + compute_center_of_mass(idx, t, c, args) + +# Arguments +- `idx`: indices of particles +- `t`: GenTopology +- `c`: Conformation +- `args`: other arguments + +"""""" +function compute_center_of_mass(atom_indices::Vector{Int}, t::GenTopology, c::Conformation, args::Dict{String, <:Any}=Dict{String, Any}()) + + verbose = get(args, ""verbose"", false) + + total_mass = 0 + tmp_coor = zeros(Float64, 3) + + for i in atom_indices + a_mass = RES_MASS_DICT[t.top_atoms[i].residue_name] + a_coor = c.coors[:, i] + total_mass += a_mass + tmp_coor += a_coor * a_mass + end + com = tmp_coor / total_mass + + return com +end +","Julia" +"Genesis","noinil/genesis_cg_tool","src/lib/parsers.jl",".jl","418","13","############################################################################### +# File IO # +############################################################################### + +include(""parser_toml.jl"") +include(""parser_pdb.jl"") +include(""parser_cif.jl"") +include(""parser_seq.jl"") +include(""parser_top.jl"") +include(""parser_crd.jl"") +include(""parser_dcd.jl"") + +","Julia" +"Genesis","noinil/genesis_cg_tool","src/lib/molecule.jl",".jl","1189","65","############################################################################### +# Molecule Structures # +############################################################################### + +# ======== +# All-atom +# ======== + +struct AAResidue + name::String + atoms::Vector{Int64} +end + +struct AAChain + id::String + segname::String + moltype::Int + residues::Vector{Int64} +end + +struct AAMolecule + atom_names::Vector{String} + atom_coors::Array{Float64, 2} + residues::Vector{AAResidue} + chains::Vector{AAChain} +end + +struct AACGResidue + res_idx::Int + res_name::String + atm_name::String + atoms::Vector{Int64} +end + +struct AACGChain + first::Int + last::Int + moltype::Int + segname::String +end + +# ============== +# Coarse-grained +# ============== + +# struct CGResidue +# name::String +# particles::Vector{Int64} +# end + +# struct CGChain +# id::Char +# segname::String +# moltype::Int +# residues::Vector{Int64} +# end + +# struct CGMolecule +# particle_names::Vector{String} +# particle_coors::Array{Float64, 2} +# residues::Vector{CGResidue} +# chains::Vector{CGChain} +# end + +","Julia" +"Genesis","noinil/genesis_cg_tool","src/lib/conformation.jl",".jl","1397","50","############################################################################### +# Conformation # +############################################################################### + +struct Conformation + num_particle::Int + coors::Array{Float64, 2} +end + +function centroid(c::Conformation) + coor_centroid = zeros(Float64, 3) + for i_bead in 1 : c.num_particle + coor_centroid .+= c.coors[:, i_bead] + end + coor_centroid ./= c.num_particle + return coor_centroid +end + +function radius_of_gyration(c::Conformation) + coor_centroid = zeros(Float64, 3) + for i_bead in 1 : c.num_particle + coor_centroid .+= c.coors[:, i_bead] + end + coor_centroid ./= c.num_particle + + dist_sq_sum = 0 + for i_bead in 1 : c.num_particle + v = c.coors[:, i_bead] - coor_centroid + dist_sq_sum += v' * v + end + rg = sqrt(dist_sq_sum / c.num_particle) +end + + +function radius_of_circumshpere(c::Conformation) + coor_centroid = zeros(Float64, 3) + for i_bead in 1 : c.num_particle + coor_centroid .+= c.coors[:, i_bead] + end + coor_centroid ./= c.num_particle + + tmp_dist = 0 + for i_bead in 1 : c.num_particle + v = c.coors[:, i_bead] - coor_centroid + v_norm_sqr = sqrt( v' * v ) + tmp_dist = v_norm_sqr > tmp_dist ? v_norm_sqr : tmp_dist + end + rc = tmp_dist +end +","Julia" +"Genesis","noinil/genesis_cg_tool","src/lib/selection.jl",".jl","749","23","############################################################################### +# selections # +############################################################################### + +function parse_selection(s::AbstractString) + v = Vector{Int}(undef, 0) + sel_pieces = split(s, r""\s*,\s*"", keepempty=false) + for sel in sel_pieces + if occursin(""to"", sel) + bounds = split(sel, r""\s*to\s*"", limit=2) + first = parse(Int, bounds[1]) + last = parse(Int, bounds[2]) + for i in first : last + push!(v, i) + end + else + i = parse(Int, sel) + push!(v, i) + end + end + return v +end +","Julia" +"Genesis","noinil/genesis_cg_tool","src/lib/parser_pdb.jl",".jl","10159","307","############################################################################### +# ____ ____ ____ # +# | _ \ | _ \ | __ ) # +# | |_) || | | || _ \ # +# | __/ | |_| || |_) | # +# |_| |____/ |____/ # +# # +############################################################################### + +# ============= +# Function list +# +# function parse_PDB_line(pdb_line::AbstractString) +# function write_PDB_line(io::IO, aline::PDBLine) +# function read_PDB(pdb_name::AbstractString) +# function write_pdb(top::GenTopology, conf::Conformation, system_name::AbstractString, args::Dict{String, <:Any}=Dict{String, Any}()) +# ============= + +using Printf + +struct PDBLine + atom_serial::Int # line[7:11] + atom_name::String # line[13:16] + residue_name::String # line[18:21] + chain_id::String # line[22] + residue_serial::Int # line[23:26] + coor_x::Float64 # line[31:38] + coor_y::Float64 # line[39:46] + coor_z::Float64 # line[47:54] + occupancy::Float64 # line[55:60] + tempfactor::Float64 # line[61:66] + segment_id::String # line[67:76] + element_name::String # line[77:78] + charge::Float64 # line[79:80] +end + +function parse_PDB_line(pdb_line::AbstractString) + atom_serial = 0 + try + atom_serial = parse(Int, pdb_line[7:11]) + catch + atom_serial = 0 + # println(""WARNING! Error in reading atom serial in PDB!"") + end + atom_name = strip(pdb_line[13:16]) + residue_name = strip(pdb_line[18:21]) + chain_id = pdb_line[22:22] + residue_serial = parse(Int, pdb_line[23:26]) + coor_x = parse(Float64, pdb_line[31:38]) + coor_y = parse(Float64, pdb_line[39:46]) + coor_z = parse(Float64, pdb_line[47:54]) + occupancy = 0.0 + try + occupancy = parse(Float64, pdb_line[55:60]) + catch + occupancy = 0.0 + end + tempfactor = 0.0 + try + tempfactor = parse(Float64, pdb_line[61:66]) + catch + tempfactor = 0.0 + end + segment_id = strip(pdb_line[67:76]) + element_name = strip(pdb_line[77:78]) + charge = 0.0 + try + charge = parse(Float64, pdb_line[79:80]) + catch + charge = 0.0 + end + new_pdb_data = PDBLine(atom_serial, atom_name, + residue_name, chain_id, residue_serial, + coor_x, coor_y, coor_z, + occupancy, tempfactor, segment_id, + element_name, charge) + return new_pdb_data +end + +function write_PDB_line(io::IO, aline::PDBLine) + @printf(io, ""ATOM %5d %4s %4s%1s%4d %8.3f%8.3f%8.3f%6.2f%6.2f%10s%2s%2d \n"", + aline.atom_serial, + aline.atom_name, + rpad(aline.residue_name, 4), + aline.chain_id, + aline.residue_serial, + aline.coor_x, + aline.coor_y, + aline.coor_z, + aline.occupancy, + aline.tempfactor, + aline.segment_id, + aline.element_name, + Int(round(aline.charge))) +end + + + +# =========== +# Read PDB!!! +# =========== + +function read_PDB(pdb_name::AbstractString) + aa_pdb_lines = [] + + # ================================= + # Step 1: Determine number of atoms + # ================================= + + aa_num_atom = 0 + for line in eachline(pdb_name) + if startswith(line, ""ATOM"") + push!(aa_pdb_lines, rpad(line, 80)) + aa_num_atom += 1 + elseif startswith(line, ""TER"") || startswith(line, ""END"") + push!(aa_pdb_lines, line) + end + end + + # ========================== + # Data structures for output + # ========================== + + aa_atom_name = fill("" "", aa_num_atom) + aa_coor = zeros(Float64, (3, aa_num_atom)) + aa_residues = [] + aa_chains = [] + + # --------------- + # Local variables + # --------------- + + i_atom = 0 + i_resid = 0 + curr_resid = NaN + curr_chain = NaN + curr_rname = "" "" + residue_name = "" "" + chain_id = ""?"" + tmp_res_atoms = [] + tmp_chain_res = [] + segment_id = "" "" + + # ======================================== + # Step 2: Add atoms to residues and chains + # ======================================== + + for line in aa_pdb_lines + if startswith(line, ""TER"") || startswith(line, ""END"") + if length(tmp_res_atoms) > 0 + push!(aa_residues, AAResidue(residue_name, tmp_res_atoms)) + tmp_res_atoms = [] + end + if length(tmp_chain_res) > 0 + # ----------------------------- + # Determine chain molecule type + # ----------------------------- + mol_type = -1 + for i_res in tmp_chain_res + res_name = aa_residues[i_res].name + tmp_mol_type = MOL_OTHER + if in(res_name, RES_NAME_LIST_PROTEIN) + tmp_mol_type = MOL_PROTEIN + elseif in(res_name, RES_NAME_LIST_DNA) + tmp_mol_type = MOL_DNA + elseif in(res_name, RES_NAME_LIST_RNA) + tmp_mol_type = MOL_RNA + elseif haskey(RES_NAME_RNA_DICT, res_name) || haskey(RES_NAME_DNA_DICT, res_name) + tmp_mol_type = MOL_DNA + for i_atom in aa_residues[i_res].atoms + atom_name = aa_atom_name[i_atom] + if atom_name == ""O2'"" + tmp_mol_type = MOL_RNA + break + end + end + end + if mol_type == -1 + mol_type = tmp_mol_type + elseif tmp_mol_type != mol_type + errmsg = @sprintf(""BUG: Inconsistent residue types in chain ID - %s residue - %d : %s "", + chain_id, + i_res, + res_name) + error(errmsg) + end + end + # -------------------------------------- + # chain mol type determination ends here + # -------------------------------------- + + push!(aa_chains, AAChain(chain_id, segment_id, mol_type, tmp_chain_res)) + tmp_chain_res = [] + end + continue + end + + new_pdb_data = parse_PDB_line(line) + + i_atom += 1 + atom_name = new_pdb_data.atom_name + residue_name = new_pdb_data.residue_name + chain_id = new_pdb_data.chain_id + residue_serial = new_pdb_data.residue_serial + coor_x = new_pdb_data.coor_x + coor_y = new_pdb_data.coor_y + coor_z = new_pdb_data.coor_z + segment_id = new_pdb_data.segment_id + + aa_atom_name[i_atom] = atom_name + aa_coor[1, i_atom] = coor_x + aa_coor[2, i_atom] = coor_y + aa_coor[3, i_atom] = coor_z + + if residue_serial != curr_resid + i_resid += 1 + push!(tmp_chain_res, i_resid) + curr_resid = residue_serial + if length(tmp_res_atoms) > 0 + push!(aa_residues, AAResidue(curr_rname, tmp_res_atoms)) + tmp_res_atoms = [] + end + curr_rname = residue_name + end + + push!(tmp_res_atoms, i_atom) + end + + new_molecule = AAMolecule(aa_atom_name, aa_coor, aa_residues, aa_chains) + + return new_molecule + +end + + +# ============= +# Output CG PDB +# ============= +function write_pdb(top::GenTopology, conf::Conformation, system_name::AbstractString, args::Dict{String, <:Any}=Dict{String, Any}()) + + verbose = get(args, ""verbose"", false) + + pdb_name = system_name * "".pdb"" + pdb_file = open(pdb_name, ""w"") + + do_output_cgconnect = get(args, ""cgconnect"", false) + + num_particles = conf.num_particle + is_huge_system = num_particles > 9999 + + chain_id_set = ""_ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz1234567890"" + tmp_chain_id = 0 + tmp_seg_name = """" + real_chain_id = 1 + for i_bead in 1 : num_particles + i_chain = top.top_atoms[i_bead].chain_id + i_sname = top.top_atoms[i_bead].seg_name + if i_chain != tmp_chain_id || i_sname != tmp_seg_name + if tmp_chain_id > 0 + print(pdb_file, ""TER\n"") + real_chain_id += 1 + end + tmp_chain_id = i_chain + tmp_seg_name = i_sname + end + resid_index_tmp = top.top_atoms[i_bead].residue_index + + @printf(pdb_file, + ""ATOM %5d %4s%1s%4s%1s%4d%1s %8.3f%8.3f%8.3f%6.2f%6.2f%10s%2s%2s \n"", + i_bead % 100000, + top.top_atoms[i_bead].atom_name, + ' ', + rpad( top.top_atoms[i_bead].residue_name, 4 ), + chain_id_set[mod(real_chain_id, 63) + 1], + resid_index_tmp % 10000, + ' ', + conf.coors[1 , i_bead], + conf.coors[2 , i_bead], + conf.coors[3 , i_bead], + 0.0, + 0.0, + top.top_atoms[i_bead].seg_name, + """", + """") + end + print(pdb_file,""TER\n"") + + cg_pdb_cnct_line = ""CONECT%5d%5d \n"" + if do_output_cgconnect + for bond in top.top_bonds + @printf(pdb_file, ""CONECT%5d%5d \n"", bond.i, bond.j) + end + end + + print(pdb_file,""END\n"") + print(pdb_file,""\n"") + + close(pdb_file) + + if verbose + println(""> ... .pdb (CG) : DONE!"") + end + +end + + +","Julia" +"Genesis","noinil/genesis_cg_tool","src/lib/parser_crd.jl",".jl","2754","82","############################################################################### +# _ # +# __ _ _ __ ___ ___ _ __ __| | # +# / _` || '__|/ _ \ / __|| '__|/ _` | # +# | (_| || | | (_) || (__ | | | (_| | # +# \__, ||_| \___/ \___||_| \__,_| # +# |___/ # +# # +############################################################################### + +using Printf + +function write_grocrd(top::GenTopology, conf::Conformation, sys_name::AbstractString="""", args::Dict{String, <:Any}=Dict{String, Any}()) + + verbose = get(args, ""verbose"", false) + + if length(sys_name) > 0 + system_name = sys_name + else + system_name = top.system_name + end + + gro_name = system_name * "".gro"" + gro_file = open(gro_name, ""w"") + + cg_num_particles = conf.num_particle + + # measure box size + (minx, miny, minz) = minimum(conf.coors, dims=2) + (maxx, maxy, maxz) = maximum(conf.coors, dims=2) + box_size = [maxx, maxy, maxz] - [minx, miny, minz] + + @printf(gro_file, ""CG model %s, t = %16.3f \n"", system_name, 0) + @printf(gro_file, ""%12d \n"", cg_num_particles) + + for i_bead in 1 : cg_num_particles + @printf(gro_file, ""%5d%5s%5s%5d %8.4f %8.4f %8.4f %8.4f %8.4f %8.4f \n"", + top.top_atoms[i_bead].residue_index % 100000, + top.top_atoms[i_bead].residue_name, + top.top_atoms[i_bead].atom_name, + i_bead % 100000, + conf.coors[1 , i_bead] * 0.1, + conf.coors[2 , i_bead] * 0.1, + conf.coors[3 , i_bead] * 0.1, + 0.0, 0.0, 0.0) + end + @printf(gro_file, ""%15.4f%15.4f%15.4f \n\n"", box_size[1] * 0.1, box_size[2] * 0.1, box_size[3] * 0.1) + + close(gro_file) + + if verbose + println(""> ... .gro: DONE!"") + end + +end + + +function read_grocrd(grocrd_name::AbstractString) + + grocrd_lines = [] + for line in eachline(grocrd_name) + push!(grocrd_lines, line) + end + + num_particle = parse(Int, grocrd_lines[2]) + coors = zeros(Float64, (3, num_particle)) + for i in 1:num_particle + words = split(grocrd_lines[i + 2][21:end]) + x = parse(Float64, words[1]) * 10.0 + y = parse(Float64, words[2]) * 10.0 + z = parse(Float64, words[3]) * 10.0 + coors[1, i] = x + coors[2, i] = y + coors[3, i] = z + end + conf = Conformation(num_particle, coors) + + return conf + +end + +","Julia" +"Genesis","noinil/genesis_cg_tool","src/lib/parser_seq.jl",".jl","3186","105","############################################################################### +# ____ _ _ __ _ _ # +# | __ )(_) ___ (_)_ __ / _| ___ _ __ _ __ ___ __ _| |_(_) ___ ___ # +# | _ \| |/ _ \| | '_ \| |_ / _ \| '__| '_ ` _ \ / _` | __| |/ __/ __| # +# | |_) | | (_) | | | | | _| (_) | | | | | | | | (_| | |_| | (__\__ \ # +# |____/|_|\___/|_|_| |_|_| \___/|_| |_| |_| |_|\__,_|\__|_|\___|___/ # +# # +############################################################################### + +using Printf + +function read_fasta(fasta_filename::String) + mol_seqence = """" + num_chain = 0 + seq_list = [] + for line in eachline(fasta_filename) + if length(line) == 0 + continue + end + if line[1] == '>' + if num_chain > 0 && length(mol_seqence) > 0 + push!(seq_list, mol_seqence) + end + mol_seqence = """" + num_chain += 1 + continue + end + seq = strip(line) + if length(seq) == 0 + continue + end + mol_seqence *= join(split(seq)) + end + if num_chain > 0 && length(mol_seqence) > 0 + push!(seq_list, mol_seqence) + end + return (num_chain, seq_list[:]) +end + +function read_modified_pfm(pfm_filename::String) + pfm = Dict() + for line in eachline(pfm_filename) + words = split(line) + if length(words) < 1 + continue + end + w1 = words[1] + if occursin(w1, ""ACGT"") + local_list = [] + for p in words[2:end] + push!( local_list, parse(Float64, p) ) + end + pfm[w1] = local_list + elseif in(w1, [""CHAIN_A"", ""CHAIN_B""]) + local_list = [] + for dna_id in words[2:end] + push!( local_list, parse(Int, dna_id) ) + end + pfm[w1] = local_list + end + end + + pfmat = [pfm[""A""] pfm[""C""] pfm[""G""] pfm[""T""]] + ppmat = pfmat ./ sum(pfmat, dims=2) + pwmat0 = -log.(ppmat) + pwmat = pwmat0 .- sum(pwmat0, dims=2) ./ 4 + + return (pwmat, pfm[""CHAIN_A""], pfm[""CHAIN_B""]) +end + +function write_sequence(aa_molecule::AAMolecule, system_name::String) + + chain_id_set = ""_ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz1234567890"" + cg_seq_name = system_name * "".fasta"" + cg_seq_file = open(cg_seq_name, ""w"") + + aa_residues = aa_molecule.residues + aa_chains = aa_molecule.chains + + aa_num_residue = length(aa_residues) + aa_num_chain = length(aa_chains) + + for i_chain in 1:aa_num_chain + chain = aa_chains[i_chain] + mol_type = chain.moltype + @printf(cg_seq_file, + ""> Chain %s : %s \n"", + chain_id_set[mod(i_chain, 63) + 1], + MOL_TYPE_LIST[mol_type]) + + for i_res in chain.residues + res_name = aa_residues[i_res].name + print(cg_seq_file, RES_SHORTNAME_DICT[res_name]) + end + + print(cg_seq_file, ""\n"") + end + + close(cg_seq_file) + println(""> ... sequence output : DONE!"") + +end + + +","Julia" +"Genesis","noinil/genesis_cg_tool","src/lib/coarse_graining.jl",".jl","124099","2787","############################################################################### +# Coarse Graining Biomolecules # +############################################################################### + +using Printf + +struct CGTopBond + i::Int + j::Int + r0::Float64 +end + +struct CGTopAngle + i::Int + j::Int + k::Int + a0::Float64 +end + +struct CGTopDihedral + i::Int + j::Int + k::Int + l::Int + t0::Float64 +end + +struct CGTopContact + i::Int + j::Int + r0::Float64 +end + +struct CGTopPWMcos + i::Int + r0::Float64 + t1::Float64 + t2::Float64 + t3::Float64 + eA::Float64 + eC::Float64 + eG::Float64 + eT::Float64 +end + +function coarse_graining(aa_molecule::AAMolecule, force_field::ForceFieldCG, args) + + # ================= + # Parsing arguments + # ================= + pdb_name = get(args, ""pdb"", ""random.pdb"") + protein_charge_filename = get(args, ""respac"", """") + pfm_filename = get(args, ""pfm"", """") + verbose = get(args, ""verbose"", false) + do_debug = get(args, ""debug"", false) + do_output_log = get(args, ""log"", false) + do_test_local_only = get(args, ""test-local-only"", false) + + use_safe_dihedral = get(args, ""use-safe-dihedral"", 1) + + gen_3spn_itp = get(args, ""3spn-param"", 0) + DNA_use_5_phos = get(args, ""3spn-use-5-phos"", false) + DNA_circular = get(args, ""3spn-circular"", false) + + ccgo_contact_scale = get(args, ""CCGO-contact-scale"", 1.0) + aicg_scale_scheme = get(args, ""aicg-scale"", 1) + cgRNA_use_phosphate_go = get(args, ""cgRNA-phosphate-Go"", false) + pwmcos_gamma = get(args, ""pwmcos-scale"", 1.0) + pwmcos_epsil = get(args, ""pwmcos-shift"", 0.0) + pwmcosns_epsil = get(args, ""pwmcos-ns-ene"", -1.0) + + # ---------------------- + # More details from TOML + # ---------------------- + has_toml_mod = false + if haskey(args, ""modeling-options"") + has_toml_mod = true + ff_detail_config = args[""modeling-options""] + + if haskey(ff_detail_config, ""3SPN.2C"") + if haskey(ff_detail_config[""3SPN.2C""], ""USE_5_PHOSPHATE"") + val_string = ff_detail_config[""3SPN.2C""][""USE_5_PHOSPHATE""] + if val_string == ""YES"" + DNA_use_5_phos = true + end + end + end + end + + # =============== + # Step 0: numbers + # =============== + i_step = 0 + + ff_pro = force_field.ff_protein + ff_dna = force_field.ff_DNA + ff_rna = force_field.ff_RNA + ff_pro_dna = force_field.ff_protein_DNA + ff_pro_rna = force_field.ff_protein_RNA + ff_dna_rna = force_field.ff_DNA_RNA + + aa_atom_name = aa_molecule.atom_names + aa_coor = aa_molecule.atom_coors + aa_residues = aa_molecule.residues + aa_chains = aa_molecule.chains + + aa_num_atom = length(aa_atom_name) + aa_num_residue = length(aa_residues) + aa_num_chain = length(aa_chains) + + num_chain_pro = 0 + num_chain_DNA = 0 + num_chain_RNA = 0 + + # =============================== + # Step 1: find out molecule types + # =============================== + i_step += 1 + if verbose + println(""============================================================"") + println(""> Step $(i_step): estimate CG particle number for every chain."") + end + + cg_num_particles = 0 + cg_chain_length = zeros(Int, aa_num_chain) + + for i_chain = 1 : aa_num_chain + chain = aa_chains[i_chain] + mol_type = chain.moltype + + # ---------------------- + # determine cg chain len + # ---------------------- + n_res = length(chain.residues) + if mol_type == MOL_DNA + if DNA_use_5_phos + n_particles = 3 * n_res + else + n_particles = 3 * n_res - 1 + end + num_chain_DNA += 1 + elseif mol_type == MOL_RNA + n_particles = 3 * n_res - 1 + num_chain_RNA += 1 + elseif mol_type == MOL_PROTEIN + n_particles = n_res + num_chain_pro += 1 + else + n_particles = 0 + end + cg_chain_length[i_chain] = n_particles + cg_num_particles += n_particles + if verbose + @printf("" > Chain %3d | %7s \n"", i_chain, MOL_TYPE_LIST[ mol_type ]) + end + end + + if verbose + println(""------------------------------------------------------------"") + @printf("" In total: %5d protein chains,\n"", num_chain_pro) + @printf("" %5d DNA strands,\n"", num_chain_DNA) + @printf("" %5d RNA strands.\n"", num_chain_RNA) + end + + # =========================== + # Step 2: Assign CG particles + # =========================== + i_step += 1 + if verbose + println(""============================================================"") + println(""> Step $(i_step): assign coarse-grained particles."") + end + + cg_residues = [] + cg_chains = [] + + i_offset_cg_particle = 0 + i_offset_cg_residue = 0 + + for i_chain in 1:aa_num_chain + chain = aa_chains[i_chain] + mol_type = chain.moltype + seg_name = chain.segname + + i_bead = i_offset_cg_particle + i_resi = i_offset_cg_residue + if mol_type == MOL_PROTEIN + for i_res in chain.residues + cg_idx = [] + aa_res_name = aa_residues[i_res].name + res_name = RES_NAME_PROTEIN_DICT[aa_res_name] + for i_atom in aa_residues[i_res].atoms + atom_name = aa_atom_name[i_atom] + if atom_name[1] == 'H' + continue + else + push!(cg_idx, i_atom) + end + end + i_bead += 1 + i_resi += 1 + push!(cg_residues, AACGResidue(i_resi, res_name, ""CA"", cg_idx)) + end + elseif mol_type == MOL_DNA + tmp_atom_index_O3p = 0 + for (i_local_index, i_res) in enumerate( chain.residues ) + aa_res_name = aa_residues[i_res].name + res_name = RES_NAME_DNA_DICT[aa_res_name] + if i_local_index > 1 + cg_DP_idx = [tmp_atom_index_O3p] + else + cg_DP_idx = [] + end + cg_DS_idx = [] + cg_DB_idx = [] + for i_atom in aa_residues[i_res].atoms + atom_name = aa_atom_name[i_atom] + if atom_name[1] == 'H' + continue + elseif in(atom_name, ATOM_NAME_LIST_DP) + push!(cg_DP_idx, i_atom) + elseif in(atom_name, ATOM_NAME_LIST_DS) + push!(cg_DS_idx, i_atom) + elseif atom_name == ""O3'"" + tmp_atom_index_O3p = i_atom + else + push!(cg_DB_idx, i_atom) + end + end + i_resi += 1 + if i_local_index > 1 || DNA_use_5_phos + i_bead += 1 + push!(cg_residues, AACGResidue(i_resi, res_name, ""DP"", cg_DP_idx)) + end + i_bead += 1 + push!(cg_residues, AACGResidue(i_resi, res_name, ""DS"", cg_DS_idx)) + i_bead += 1 + push!(cg_residues, AACGResidue(i_resi, res_name, ""DB"", cg_DB_idx)) + end + elseif mol_type == MOL_RNA + for (i_local_index, i_res) in enumerate( chain.residues ) + aa_res_name = aa_residues[i_res].name + res_name = RES_NAME_RNA_DICT[aa_res_name] + cg_RP_idx = [] + cg_RS_idx = [] + cg_RB_idx = [] + for i_atom in aa_residues[i_res].atoms + atom_name = aa_atom_name[i_atom] + if atom_name[1] == 'H' + continue + elseif in(atom_name, ATOM_NAME_LIST_RP) + push!(cg_RP_idx, i_atom) + elseif in(atom_name, ATOM_NAME_LIST_RS) + push!(cg_RS_idx, i_atom) + else + push!(cg_RB_idx, i_atom) + end + end + i_resi += 1 + if i_local_index > 1 + i_bead += 1 + push!(cg_residues, AACGResidue(i_resi, res_name, ""RP"", cg_RP_idx)) + end + i_bead += 1 + push!(cg_residues, AACGResidue(i_resi, res_name, ""RS"", cg_RS_idx)) + i_bead += 1 + push!(cg_residues, AACGResidue(i_resi, res_name, ""RB"", cg_RB_idx)) + end + end + push!(cg_chains, AACGChain(i_offset_cg_particle + 1, i_bead, mol_type, seg_name)) + i_offset_cg_particle += cg_chain_length[i_chain] + i_offset_cg_residue += length(chain.residues) + end + + if verbose + for i_chain in 1:aa_num_chain + @printf("" > Chain %3d | # particles: %5d | %5d -- %5d \n"", + i_chain, cg_chain_length[i_chain], + cg_chains[i_chain].first, cg_chains[i_chain].last) + end + + println(""------------------------------------------------------------"") + println("" In total: $(cg_num_particles) CG particles."") + end + + + + + # ========================================================================= + # ____ ____ _____ ___ ____ ___ _ ___ ______ __ + # / ___/ ___| |_ _/ _ \| _ \ / _ \| | / _ \ / ___\ \ / / + # | | | | _ | || | | | |_) | | | | | | | | | | _ \ V / + # | |__| |_| | | || |_| | __/| |_| | |__| |_| | |_| | | | + # \____\____| |_| \___/|_| \___/|_____\___/ \____| |_| + # + # ========================================================================= + + # ======================================== + # Coarse Grained Model Topology Structures + # ======================================== + + cg_resid_name = fill("" "", cg_num_particles) + cg_resid_index = zeros(Int, cg_num_particles) + cg_bead_name = fill("" "", cg_num_particles) + cg_bead_type = fill("" "", cg_num_particles) + cg_bead_charge = zeros(Float64, cg_num_particles) + cg_bead_mass = zeros(Float64, cg_num_particles) + cg_bead_coor = zeros(Float64, (3, cg_num_particles)) + cg_chain_id = zeros(Int, cg_num_particles) + cg_seg_name = fill("" "", cg_num_particles) + + # protein + top_cg_pro_bonds = Vector{CGTopBond}(undef, 0) + top_cg_pro_angles = Vector{CGTopAngle}(undef, 0) + top_cg_pro_dihedrals = Vector{CGTopDihedral}(undef, 0) + top_cg_pro_aicg13 = Vector{CGTopAngle}(undef, 0) + top_cg_pro_aicg14 = Vector{CGTopDihedral}(undef, 0) + top_cg_pro_go_contact = [[] for i in 1:cg_num_particles] + + param_cg_pro_e_13 = [] + param_cg_pro_e_14 = [] + + AICG2p_flexible_local = [] + AICG2p_flexible_nonlocal = [] + HPS_IDR_region = [] + KH_IDR_region = [] + + # DNA + top_cg_DNA_bonds = Vector{CGTopBond}(undef, 0) + top_cg_DNA_angles = Vector{CGTopAngle}(undef, 0) + top_cg_DNA_dih_Gaussian = Vector{CGTopDihedral}(undef, 0) + top_cg_DNA_dih_periodic = Vector{CGTopDihedral}(undef, 0) + param_cg_DNA_k_angles = [] + + # RNA + top_cg_RNA_bonds = Vector{CGTopBond}(undef, 0) + top_cg_RNA_angles = Vector{CGTopAngle}(undef, 0) + top_cg_RNA_dihedrals = Vector{CGTopDihedral}(undef, 0) + # top_cg_RNA_base_stack = Vector{CGTopContact}(undef, 0) + # top_cg_RNA_base_pair = Vector{CGTopContact}(undef, 0) + # top_cg_RNA_other_contact = Vector{CGTopContact}(undef, 0) + top_cg_RNA_base_stack = [[] for i in 1:cg_num_particles] + top_cg_RNA_base_pair = [[] for i in 1:cg_num_particles] + top_cg_RNA_other_contact = [[] for i in 1:cg_num_particles] + param_cg_RNA_k_bonds = [] + param_cg_RNA_k_angles = [] + param_cg_RNA_k_dihedrals = [] + # param_cg_RNA_e_base_stack = [] + # param_cg_RNA_e_base_pair = [] + # param_cg_RNA_e_other_contact = [] + + # protein-DNA + top_cg_pro_DNA_pwmcos = Vector{CGTopPWMcos}(undef, 0) + top_cg_pro_DNA_pwmcosns = Vector{CGTopPWMcos}(undef, 0) + # top_cg_pro_DNA_contact = Vector{CGTopContact}(undef, 0) + top_cg_pro_DNA_contact = [[] for i in 1:cg_num_particles] + + # protein-RNA + # top_cg_pro_RNA_contact = Vector{CGTopContact}(undef, 0) + # param_cg_pro_RNA_e_contact = [] + top_cg_pro_RNA_contact = [[] for i in 1:cg_num_particles] + + # -------------------- + # geometric properties + # -------------------- + # center of geometry + # geo_centroid = zeros(Float64, (3, aa_num_chain)) + # radius of gyration + geo_radius_of_gyration = zeros(Float64, aa_num_chain) + # radius of circumsphere + geo_radius_of_circumsphere = zeros(Float64, aa_num_chain) + + # ========================================================= + # Step 4: Determine CG particles and geometry of each chain + # ========================================================= + i_step += 1 + if verbose + println(""============================================================"") + println(""> Step $(i_step): determine protein/DNA/RNA chais."") + end + + # ------- + # protein + # ------- + + if num_chain_pro > 0 + if verbose + println(""------------------------------------------------------------"") + println(""> PROTEIN: determine CA mass, charge, and coordinates."") + end + + for i_chain in 1:aa_num_chain + chain = cg_chains[i_chain] + + if verbose + print(""\b""^32) + progress_percent = trunc(Int, i_chain / aa_num_chain * 20) + progress_bar = ""|"" ^ progress_percent * "" "" ^ (20 - progress_percent) + @printf("" [%20s] %5.1f %% "", progress_bar, i_chain / aa_num_chain * 100) + end + + if chain.moltype != MOL_PROTEIN + continue + end + + for i_res in chain.first : chain.last + res_name = cg_residues[i_res].res_name + for i_atom in cg_residues[i_res].atoms + if aa_atom_name[i_atom] == ""CA"" + cg_resid_name[i_res] = res_name + cg_resid_index[i_res] = cg_residues[i_res].res_idx + cg_bead_name[i_res] = ""CA"" + cg_bead_type[i_res] = res_name + cg_bead_charge[i_res] = RES_CHARGE_DICT[res_name] + cg_bead_mass[i_res] = RES_MASS_DICT[res_name] + cg_bead_coor[:, i_res] = aa_coor[:, i_atom] + cg_chain_id[i_res] = i_chain + cg_seg_name[i_res] = chain.segname + break + end + end + end + end + + if length(protein_charge_filename) > 0 + try + for line in eachline(protein_charge_filename) + charge_data = split(line) + if length(charge_data) < 1 + continue + end + i = parse(Int, charge_data[1]) + c = parse(Float64, charge_data[2]) + cg_bead_charge[i] = c + end + catch e + println(e) + error(""ERROR in user-defined charge distribution.\n"") + end + end + end + + # --- + # DNA + # --- + + if num_chain_DNA > 0 + + if verbose + println(""\n------------------------------------------------------------"") + println(""> DNA: determine P, S, B mass, charge, and coordinates."") + end + + for i_chain in 1 : aa_num_chain + chain = cg_chains[i_chain] + + if verbose + print(""\b""^32) + progress_percent = trunc(Int, i_chain / aa_num_chain * 20) + progress_bar = ""|"" ^ progress_percent * "" "" ^ (20 - progress_percent) + @printf("" [%20s] %5.1f %% "", progress_bar, i_chain / aa_num_chain * 100) + end + + if chain.moltype != MOL_DNA + continue + end + + for i_res in chain.first : chain.last + res_name = cg_residues[i_res].res_name + bead_name = cg_residues[i_res].atm_name + bead_type = bead_name == ""DP"" || bead_name == ""DS"" ? bead_name : res_name + bead_coor = compute_center_of_mass(cg_residues[i_res].atoms, aa_atom_name, aa_coor) + cg_resid_name[i_res] = res_name + cg_resid_index[i_res] = cg_residues[i_res].res_idx + cg_bead_name[i_res] = bead_name + cg_bead_type[i_res] = bead_type + cg_bead_charge[i_res] = RES_CHARGE_DICT[bead_type] + cg_bead_mass[i_res] = RES_MASS_DICT[bead_type] + cg_bead_coor[:, i_res] = bead_coor[:] + cg_chain_id[i_res] = i_chain + cg_seg_name[i_res] = chain.segname + end + end + end + + # --- + # RNA + # --- + + if num_chain_RNA > 0 + + if verbose + println(""\n------------------------------------------------------------"") + println(""> RNA: determine P, S, B mass, charge, and coordinates."") + end + + for i_chain in 1 : aa_num_chain + chain = cg_chains[i_chain] + + if verbose + print(""\b""^32) + progress_percent = trunc(Int, i_chain / aa_num_chain * 20) + progress_bar = ""|"" ^ progress_percent * "" "" ^ (20 - progress_percent) + @printf("" [%20s] %5.1f %% "", progress_bar, i_chain / aa_num_chain * 100) + end + + if chain.moltype != MOL_RNA + continue + end + + for i_res in chain.first : chain.last + res_name = cg_residues[i_res].res_name + bead_name = cg_residues[i_res].atm_name + bead_type = bead_name == ""RP"" || bead_name == ""RS"" ? bead_name : res_name + # bead_coor = compute_center_of_mass(cg_residues[i_res].atoms, aa_atom_name, aa_coor) + cg_resid_name[i_res] = res_name + cg_resid_index[i_res] = cg_residues[i_res].res_idx + cg_bead_name[i_res] = bead_name + cg_bead_type[i_res] = bead_type + cg_bead_charge[i_res] = RES_CHARGE_DICT[bead_type] + cg_bead_mass[i_res] = RES_MASS_DICT[bead_type] + cg_chain_id[i_res] = i_chain + cg_seg_name[i_res] = chain.segname + if bead_name == ""RP"" + for i_atom in cg_residues[i_res].atoms + if aa_atom_name[i_atom][1] == 'P' + bead_coor = aa_coor[:, i_atom] + end + end + elseif bead_name == ""RS"" + total_mass = 0 + tmp_coor = zeros(Float64, 3) + for i_atom in cg_residues[i_res].atoms + a_name = aa_atom_name[i_atom] + if in(a_name, [""C1'"", ""C2'"", ""C3'"", ""C4'"", ""O4'""] ) + a_mass = ATOM_MASS_DICT[a_name[1]] + a_coor = aa_coor[:, i_atom] + total_mass += a_mass + tmp_coor += a_coor * a_mass + end + end + bead_coor = tmp_coor / total_mass + elseif bead_name == ""RB"" + if res_name[end] == 'A' || res_name[end] == 'G' + for i_atom in cg_residues[i_res].atoms + if aa_atom_name[i_atom] == ""N1"" + bead_coor = aa_coor[:, i_atom] + end + end + else + for i_atom in cg_residues[i_res].atoms + if aa_atom_name[i_atom] == ""N3"" + bead_coor = aa_coor[:, i_atom] + end + end + end + end + cg_bead_coor[:, i_res] = bead_coor[:] + end + end + end + + # =================== + # Geometry properties + # =================== + if verbose + println(""\n------------------------------------------------------------"") + println(""> determine Centroid, Rg, Rc..."") + end + + for i_chain in 1:aa_num_chain + chain = cg_chains[i_chain] + + if verbose + print(""\b""^32) + progress_percent = trunc(Int, i_chain / aa_num_chain * 20) + progress_bar = ""|"" ^ progress_percent * "" "" ^ (20 - progress_percent) + @printf("" [%20s] %5.1f %% "", progress_bar, i_chain / aa_num_chain * 100) + end + + # centroid + coor_centroid = zeros(Float64, 3) + for i_res in chain.first : chain.last + coor_centroid += cg_bead_coor[:, i_res] + end + coor_centroid /= (chain.last - chain.first + 1) + # geo_centroid[:, i_chain] = coor_centroid + + # Rg + tmp_dist = 0 + tmp_dist_sq_sum = 0 + for i_res in chain.first : chain.last + vec_from_center = cg_bead_coor[:, i_res] - coor_centroid + vec_norm_tmp = norm(vec_from_center) + tmp_dist = vec_norm_tmp > tmp_dist ? vec_norm_tmp : tmp_dist + tmp_dist_sq_sum += vec_norm_tmp * vec_norm_tmp + end + rg = sqrt(tmp_dist_sq_sum / (chain.last - chain.first + 1)) + rc = tmp_dist + geo_radius_of_gyration[i_chain] = rg + geo_radius_of_circumsphere[i_chain] = rc + end + + if verbose + println(""\n> ... geometric properties : DONE!"") + end + + + # ==================================== + # Cell lists for contact determination + # ================================================ + # ____ _____ _ _ _ ___ ____ _____ + # / ___| ____| | | | | | |_ _/ ___|_ _| + # | | | _| | | | | | | | |\___ \ | | + # | |___| |___| |___| |___ | |___ | | ___) || | + # \____|_____|_____|_____| |_____|___|____/ |_| + # + # ================================================ + i_step += 1 + if verbose + println(""============================================================"") + println(""> Step $(i_step): Cell list construction."") + end + + if verbose + println(""------------------------------------------------------------"") + println(""> $(i_step).1: Cell Division."") + println("" - - - - - - - - - - - - - - - - - - - - - - - -"") + println(""> $(i_step).1.1: Determine system size."") + end + + sys_size_lower = minimum(cg_bead_coor, dims=2) .- 1.0 + sys_size_upper = maximum(cg_bead_coor, dims=2) .+ 1.0 + sys_size_3d = sys_size_upper - sys_size_lower + + if verbose + println("" - - - - - - - - - - - - - - - - - - - - - - - -"") + println(""> $(i_step).1.2: Determine cell size."") + end + + cell_num_3d = max.(Int.(ceil.(sys_size_3d / ( 18.0 + AICG_GO_ATOMIC_CUTOFF ))) .- 1, 1) + cell_num_all = prod(cell_num_3d) + cell_size_3d = sys_size_3d ./ cell_num_3d + + if verbose + println("" - - - - - - - - - - - - - - - - - - - - - - - -"") + println(""> $(i_step).1.3: Prepare cells."") + end + + cell_neighbors = [[] for i in 1:cell_num_all] + for i in 1:cell_num_3d[1] + for j in 1:cell_num_3d[2] + for k in 1:cell_num_3d[3] + box_indx = (i - 1) * cell_num_3d[2] * cell_num_3d[3] + (j -1) * cell_num_3d[3] + k + for n_i in max(1, i - 1):min(cell_num_3d[1], i + 1) + for n_j in max(1, j - 1):min(cell_num_3d[2], j + 1) + for n_k in max(1, k - 1):min(cell_num_3d[3], k + 1) + nb_box_indx = (n_i - 1) * cell_num_3d[2] * cell_num_3d[3] + (n_j -1) * cell_num_3d[3] + n_k + push!(cell_neighbors[box_indx], nb_box_indx) + end + end + end + end + end + end + + # ------------------------ + # put particles into cells + # ------------------------ + if verbose + println(""------------------------------------------------------------"") + println(""> $(i_step).2: Put particles into cells."") + end + + cell_particles = [[] for i in 1:cell_num_all] + cell_index_cg_bead = [0 for i in 1:cg_num_particles] + for i_chain in 1:aa_num_chain + chain = cg_chains[i_chain] + + if verbose + print(""\b""^32) + progress_percent = trunc(Int, i_chain / aa_num_chain * 20) + progress_bar = ""|"" ^ progress_percent * "" "" ^ (20 - progress_percent) + @printf("" [%20s] %5.1f %% "", progress_bar, i_chain / aa_num_chain * 100) + end + + for i_res in chain.first : chain.last + x, y, z = cg_bead_coor[:, i_res] .- sys_size_lower + bi = Int.(ceil.(x / cell_size_3d[1])) + bj = Int.(ceil.(y / cell_size_3d[2])) + bk = Int.(ceil.(z / cell_size_3d[3])) + box_indx = (bi - 1) * cell_num_3d[2] * cell_num_3d[3] + (bj -1) * cell_num_3d[3] + bk + + push!(cell_particles[box_indx], i_res) + cell_index_cg_bead[i_res] = box_indx + end + end + if verbose + println(""\n> ... cell lists : DONE!"") + end + + + + # ============================= + # Step 4: CG model for proteins + # ============================= + # _ _ + # _ __ _ __ ___ | |_ ___(_)_ __ + # | '_ \| '__/ _ \| __/ _ \ | '_ \ + # | |_) | | | (_) | || __/ | | | | + # | .__/|_| \___/ \__\___|_|_| |_| + # |_| + # + # ================================= + + num_cg_pro_contact_all = 0 + num_cg_pro_contact_intra = 0 + num_cg_pro_contact_inter = 0 + + if num_chain_pro > 0 + i_step += 1 + if verbose + println(""============================================================"") + println(""> Step $(i_step): processing proteins."") + end + + # ----------------------------- + # Step 4.2: CG protein topology + # ----------------------------- + if verbose + println(""------------------------------------------------------------"") + println(""> $(i_step).1: CG protein topology."") + println("" - - - - - - - - - - - - - - - - - - - - - - - -"") + println(""> $(i_step).1.1: CG protein local interactions."") + end + for i_chain in 1:aa_num_chain + chain = cg_chains[i_chain] + + if chain.moltype != MOL_PROTEIN + continue + end + + for i_res in chain.first : chain.last - 1 + coor1 = cg_bead_coor[:, i_res] + coor2 = cg_bead_coor[:, i_res + 1] + dist12 = compute_distance(coor1, coor2) + tmp_top_bond = CGTopBond(i_res, i_res + 1, dist12) + push!(top_cg_pro_bonds, tmp_top_bond) + if verbose && ( dist12 > 4.0 || dist12 < 3.6 ) + errmsg = @sprintf(""WARNING: abnormal bond length in chain %d, residue %d %s and %d %s, r0 = %8.3f"", + i_chain, + i_res, cg_bead_name[i_res], + i_res + 1, cg_bead_name[i_res + 1], + dist12) + println(errmsg) + end + end + end + if verbose + println(""> ... Bond: DONE!"") + end + + e_ground_local = 0.0 + e_ground_13 = 0.0 + num_angle = 0 + for i_chain in 1:aa_num_chain + chain = cg_chains[i_chain] + + if chain.moltype != MOL_PROTEIN + continue + end + + for i_res in chain.first : chain.last - 2 + coor1 = cg_bead_coor[:, i_res] + coor2 = cg_bead_coor[:, i_res + 1] + coor3 = cg_bead_coor[:, i_res + 2] + dist13 = compute_distance(coor1, coor3) + angle123 = compute_angle(coor1, coor2, coor3) + tmp_top_angle = CGTopAngle(i_res, i_res + 1, i_res + 2, angle123) + push!(top_cg_pro_angles, tmp_top_angle) + tmp_top_angle = CGTopAngle(i_res, i_res + 1, i_res + 2, dist13) + push!(top_cg_pro_aicg13, tmp_top_angle) + + if ff_pro == FF_pro_AICG2p + # count AICG2+ 1-3 interaction atomic contact + contact_counts = count_aicg_atomic_contact(cg_residues[ i_res ].atoms, + cg_residues[ i_res + 2 ].atoms, + cg_resid_name[i_res], + cg_resid_name[i_res + 2], + aa_atom_name, + aa_coor) + + # calculate AICG2+ 1-3 interaction pairwise energy + e_local = dot(AICG_PAIRWISE_ENERGY, contact_counts) + if e_local > AICG_ENE_UPPER_LIM + e_local = AICG_ENE_UPPER_LIM + end + if e_local < AICG_ENE_LOWER_LIM + e_local = AICG_ENE_LOWER_LIM + end + e_ground_local += e_local + e_ground_13 += e_local + num_angle += 1 + push!(param_cg_pro_e_13, e_local) + end + end + end + if verbose + println(""> ... Angle: DONE!"") + end + + e_ground_14 = 0.0 + num_dih = 0 + for i_chain in 1:aa_num_chain + chain = cg_chains[i_chain] + + if chain.moltype != MOL_PROTEIN + continue + end + + for i_res in chain.first : chain.last - 3 + coor1 = cg_bead_coor[:, i_res] + coor2 = cg_bead_coor[:, i_res + 1] + coor3 = cg_bead_coor[:, i_res + 2] + coor4 = cg_bead_coor[:, i_res + 3] + dihed = compute_dihedral(coor1, coor2, coor3, coor4) + tmp_top_dihe = CGTopDihedral(i_res, i_res + 1, i_res + 2, i_res + 3, dihed) + push!(top_cg_pro_dihedrals, tmp_top_dihe) + push!(top_cg_pro_aicg14, tmp_top_dihe) + + if ff_pro == FF_pro_AICG2p + # count AICG2+ dihedral atomic contact + contact_counts = count_aicg_atomic_contact(cg_residues[ i_res ].atoms, + cg_residues[ i_res + 3 ].atoms, + cg_resid_name[i_res], + cg_resid_name[i_res + 3], + aa_atom_name, + aa_coor) + + # calculate AICG2+ dihedral pairwise energy + e_local = dot(AICG_PAIRWISE_ENERGY, contact_counts) + if e_local > AICG_ENE_UPPER_LIM + e_local = AICG_ENE_UPPER_LIM + end + if e_local < AICG_ENE_LOWER_LIM + e_local = AICG_ENE_LOWER_LIM + end + e_ground_local += e_local + e_ground_14 += e_local + num_dih += 1 + push!(param_cg_pro_e_14, e_local) + end + end + end + if verbose + println(""> ... Dihedral: DONE!"") + end + + # ------------------------ + # Normalize local energies + # ------------------------ + if ff_pro == FF_pro_AICG2p + e_ground_local /= (num_angle + num_dih) + e_ground_13 /= num_angle + e_ground_14 /= num_dih + + if aicg_scale_scheme == 0 + for i in 1:length(param_cg_pro_e_13) + param_cg_pro_e_13[i] *= AICG_13_AVE / e_ground_13 + end + for i in 1:length(param_cg_pro_e_14) + param_cg_pro_e_14[i] *= AICG_14_AVE / e_ground_14 + end + elseif aicg_scale_scheme == 1 + for i in 1:length(param_cg_pro_e_13) + param_cg_pro_e_13[i] *= -AICG_13_GEN + end + for i in 1:length(param_cg_pro_e_14) + param_cg_pro_e_14[i] *= -AICG_14_GEN + end + end + end + + # ----------------------- + # Go type native contacts + # ----------------------- + if verbose + println("" - - - - - - - - - - - - - - - - - - - - - - - -"") + println(""> $(i_step).1.2: Looking for native contacts."") + end + + # intra-molecular contacts + if verbose + @printf(""%11s Calculating intra-molecular contacts... \n"", "" "") + @printf("" ... chain : %32s"", "" "") + end + for i_chain in 1:aa_num_chain + + if do_test_local_only + continue + end + + chain = cg_chains[i_chain] + + # ----------------- + # show progress bar + # ----------------- + if verbose + print(""\b""^32) + progress_percent = trunc(Int, i_chain / aa_num_chain * 20) + progress_bar = ""|"" ^ progress_percent * "" "" ^ (20 - progress_percent) + @printf("" [%20s] %5.1f %% "", progress_bar, i_chain / aa_num_chain * 100) + end + + if chain.moltype != MOL_PROTEIN + continue + end + + if chain.last - chain.first < 100 + Threads.@threads for i_res in chain.first : chain.last - 4 + cell_i = cell_index_cg_bead[i_res] + coor_cai = cg_bead_coor[:, i_res] + neighbor_cell_i = cell_neighbors[cell_i] + for j_res in i_res + 4 : chain.last + cell_j = cell_index_cg_bead[j_res] + if ! (cell_j in neighbor_cell_i) + continue + end + coor_caj = cg_bead_coor[:, j_res] + if is_protein_native_contact(cg_residues[i_res].atoms, cg_residues[j_res].atoms, aa_atom_name, aa_coor) + native_dist = compute_distance(coor_cai, coor_caj) + num_cg_pro_contact_all += 1 + num_cg_pro_contact_intra += 1 + + e_local = 0.0 + if ff_pro == FF_pro_AICG2p + # count AICG2+ atomic contact + contact_counts = count_aicg_atomic_contact(cg_residues[ i_res ].atoms, + cg_residues[ j_res ].atoms, + cg_resid_name[i_res], + cg_resid_name[j_res], + aa_atom_name, + aa_coor) + + # calculate AICG2+ pairwise energy + e_local = dot(AICG_PAIRWISE_ENERGY, contact_counts) + if e_local > AICG_ENE_UPPER_LIM + e_local = AICG_ENE_UPPER_LIM + end + if e_local < AICG_ENE_LOWER_LIM + e_local = AICG_ENE_LOWER_LIM + end + end + push!(top_cg_pro_go_contact[i_res], [j_res, native_dist, e_local]) + end + end + end + else + Threads.@threads for i_res in chain.first : chain.last + cell_i = cell_index_cg_bead[i_res] + coor_cai = cg_bead_coor[:, i_res] + neighbor_cell_i = cell_neighbors[cell_i] + for j_cell in neighbor_cell_i + for j_res in cell_particles[j_cell] + if cg_chain_id[j_res] != i_chain + continue + end + if j_res < i_res + 4 + continue + end + coor_caj = cg_bead_coor[:, j_res] + if is_protein_native_contact(cg_residues[i_res].atoms, cg_residues[j_res].atoms, aa_atom_name, aa_coor) + native_dist = compute_distance(coor_cai, coor_caj) + num_cg_pro_contact_all += 1 + num_cg_pro_contact_inter += 1 + + e_local = 0.0 + if ff_pro == FF_pro_AICG2p + # count AICG2+ atomic contact + contact_counts = count_aicg_atomic_contact(cg_residues[ i_res ].atoms, + cg_residues[ j_res ].atoms, + cg_resid_name[i_res], + cg_resid_name[j_res], + aa_atom_name, + aa_coor) + + # calculate AICG2+ pairwise energy + e_local = dot(AICG_PAIRWISE_ENERGY, contact_counts) + if e_local > AICG_ENE_UPPER_LIM + e_local = AICG_ENE_UPPER_LIM + end + if e_local < AICG_ENE_LOWER_LIM + e_local = AICG_ENE_LOWER_LIM + end + end + push!(top_cg_pro_go_contact[i_res], [j_res, native_dist, e_local]) + end + end + end + end + end + end + if verbose + print(""\n ... intra-molecular contacts: DONE! \n"") + end + + # inter-molecular ( protein-protein ) contacts + if num_chain_pro > 1 && !do_test_local_only + if verbose + @printf(""%11s Calculating inter-molecular contacts... \n"", "" "") + @printf("" ... progress: %32s"", "" "") + end + for i_chain in 1 : aa_num_chain - 1 + chain1 = cg_chains[i_chain] + + # ----------------- + # show progress bar + # ----------------- + if verbose + print(""\b""^32) + progress_percent = trunc(Int, i_chain / ( aa_num_chain - 1 ) * 20) + progress_bar = ""|"" ^ progress_percent * "" "" ^ (20 - progress_percent) + @printf("" [%20s] %5.1f %% "", progress_bar, i_chain / ( aa_num_chain - 1 ) * 100) + end + # ------------------ + + if chain1.moltype != MOL_PROTEIN + continue + end + + Threads.@threads for i_res in chain1.first : chain1.last + cell_i = cell_index_cg_bead[i_res] + coor_cai = cg_bead_coor[:, i_res] + neighbor_cell_i = cell_neighbors[cell_i] + for j_cell in neighbor_cell_i + for j_res in cell_particles[j_cell] + if j_res < i_res + continue + end + if cg_chains[cg_chain_id[j_res]].moltype != MOL_PROTEIN + continue + end + if cg_chain_id[j_res] == i_chain + continue + end + coor_caj = cg_bead_coor[:, j_res] + if is_protein_native_contact(cg_residues[i_res].atoms, cg_residues[j_res].atoms, aa_atom_name, aa_coor) + native_dist = compute_distance(coor_cai, coor_caj) + num_cg_pro_contact_all += 1 + num_cg_pro_contact_inter += 1 + + e_local = 0.0 + if ff_pro == FF_pro_AICG2p + # count AICG2+ atomic contact + contact_counts = count_aicg_atomic_contact(cg_residues[ i_res ].atoms, + cg_residues[ j_res ].atoms, + cg_resid_name[i_res], + cg_resid_name[j_res], + aa_atom_name, + aa_coor) + + # calculate AICG2+ pairwise energy + e_local = dot(AICG_PAIRWISE_ENERGY, contact_counts) + if e_local > AICG_ENE_UPPER_LIM + e_local = AICG_ENE_UPPER_LIM + end + if e_local < AICG_ENE_LOWER_LIM + e_local = AICG_ENE_LOWER_LIM + end + end + push!(top_cg_pro_go_contact[i_res], [j_res, native_dist, e_local]) + end + end + end + end + end + if verbose + print(""\n ... inter-molecular contacts: DONE! \n"") + end + end + + if ff_pro == FF_pro_AICG2p + e_ground_contact = 0.0 + num_contact = 0 + # count num of contacts, sum up e_ground_contact + for i_res in 1:cg_num_particles + for cntct_tmp in top_cg_pro_go_contact[i_res] + num_contact += 1 + e_ground_contact += cntct_tmp[3] + end + end + # normalize + if num_contact > 0 + e_ground_contact /= num_contact + else + e_ground_contact = 0 + end + + if aicg_scale_scheme == 0 + for i in 1:cg_num_particles + for cntct_tmp in top_cg_pro_go_contact[i] + cntct_tmp[3] *= AICG_CONTACT_AVE / e_ground_contact + end + end + elseif aicg_scale_scheme == 1 + for i in 1:cg_num_particles + for cntct_tmp in top_cg_pro_go_contact[i] + cntct_tmp[3] *= -AICG_CONTACT_GEN + end + end + end + end + + if verbose + println(""------------------------------------------------------------"") + @printf("" > Total number of protein contacts: %12d \n"", + sum( length.( top_cg_pro_go_contact ) )) + end + + end + + # =============================== + # Intrinsically disordered region + # =============================== + if haskey(args, ""modeling-options"") + has_toml_mod = true + ff_detail_config = args[""modeling-options""] + + if haskey(ff_detail_config, ""IDR"") + if haskey(ff_detail_config[""IDR""], ""AICG2p_IDR_local"") + index_string = ff_detail_config[""IDR""][""AICG2p_IDR_local""] + AICG2p_flexible_local = parse_selection(index_string) + end + if haskey(ff_detail_config[""IDR""], ""AICG2p_IDR_nonlocal"") + index_string = ff_detail_config[""IDR""][""AICG2p_IDR_nonlocal""] + AICG2p_flexible_nonlocal = parse_selection(index_string) + end + if haskey(ff_detail_config[""IDR""], ""HPS_region"") + index_string = ff_detail_config[""IDR""][""HPS_region""] + HPS_IDR_region = parse_selection(index_string) + end + if haskey(ff_detail_config[""IDR""], ""KH_region"") + index_string = ff_detail_config[""IDR""][""KH_region""] + KH_IDR_region = parse_selection(index_string) + end + end + end + + + + # ============================= + # Step 5: 3SPN.2C model for DNA + # ============================= + # _ + # __| |_ __ __ _ + # / _` | '_ \ / _` | + # | (_| | | | | (_| | + # \__,_|_| |_|\__,_| + # + # ============================= + + if num_chain_DNA > 0 + i_step += 1 + if verbose + println(""============================================================"") + println(""> Step $(i_step): processing DNA."") + end + + # --------------------------------- + # Step 5.1: 3SPN.2C topology + # --------------------------------- + if verbose + println(""------------------------------------------------------------"") + println(""> $(i_step).1: 3SPN.2C topology."") + println("" - - - - - - - - - - - - - - - - - - - - - - - -"") + println(""> $(i_step).1.1: 3SPN.2C local interactions."") + end + for i_chain in 1:aa_num_chain + + chain = cg_chains[i_chain] + + if chain.moltype != MOL_DNA + continue + end + + if verbose + @printf(""%11s Calculating DNA strand %d ... \n"", "" "", i_chain) + @printf("" ... progress: %32s"", "" "") + end + + if DNA_circular + DNA_basetype_pre = cg_resid_name[chain.last][end] + DNA_basetype_post = cg_resid_name[chain.first][end] + else + DNA_basetype_pre = ""X"" + end + if gen_3spn_itp == 1 + + for i_res in chain.first : chain.last + + # ----------------- + # show progress bar + # ----------------- + if verbose + print(""\b""^32) + progress_percent = trunc(Int, ( i_res - chain.first ) / ( chain.last - chain.first ) * 20) + progress_bar = ""|"" ^ progress_percent * "" "" ^ (20 - progress_percent) + @printf("" [%20s] %5.1f %% "", progress_bar, progress_percent * 5) + end + # ------------------ + + if cg_bead_name[i_res] == ""DS"" + # bond S--B + coor_s = cg_bead_coor[:, i_res] + coor_b = cg_bead_coor[:, i_res + 1] + r_sb = compute_distance(coor_s, coor_b) + tmp_top_bond = CGTopBond(i_res, i_res + 1, r_sb) + push!(top_cg_DNA_bonds, tmp_top_bond) + if i_res + 3 < chain.last + # bond S--P+1 + coor_p3 = cg_bead_coor[:, i_res + 2] + r_sp3 = compute_distance(coor_s, coor_p3) + tmp_top_bond = CGTopBond(i_res, i_res + 2, r_sp3) + push!(top_cg_DNA_bonds, tmp_top_bond) + # Angle S--P+1--S+1 + resname5 = cg_resid_name[i_res][end] + resname3 = cg_resid_name[i_res + 3][end] + coor_s3 = cg_bead_coor[:, i_res + 3] + ang_sp3s3 = compute_angle(coor_s, coor_p3, coor_s3) + k = get_DNA3SPN_angle_param(""SPS"", resname5 * resname3) + tmp_top_angl = CGTopAngle(i_res, i_res + 2, i_res + 3, ang_sp3s3) + push!(top_cg_DNA_angles, tmp_top_angl) + push!(param_cg_DNA_k_angles, k) + # Dihedral S--P+1--S+1--B+1 + coor_b3 = cg_bead_coor[:, i_res + 4] + dih_sp3s3b3 = compute_dihedral(coor_s, coor_p3, coor_s3, coor_b3) + tmp_top_dih = CGTopDihedral(i_res, i_res + 2, i_res + 3, i_res + 4, dih_sp3s3b3) + push!(top_cg_DNA_dih_periodic, tmp_top_dih) + # Dihedral S--P+1--S+1--P+2 + if i_res + 6 < chain.last + coor_p33 = cg_bead_coor[:, i_res + 5] + dih_sp3s3p33 = compute_dihedral(coor_s, coor_p3, coor_s3, coor_p33) + tmp_top_dih = CGTopDihedral(i_res, i_res + 2, i_res + 3, i_res + 5, dih_sp3s3p33) + push!(top_cg_DNA_dih_periodic, tmp_top_dih) + tmp_top_dih = CGTopDihedral(i_res, i_res + 2, i_res + 3, i_res + 5, dih_sp3s3p33) + push!(top_cg_DNA_dih_Gaussian, tmp_top_dih) + end + end + elseif cg_bead_name[i_res] == ""DP"" + # bond P--S + coor_p = cg_bead_coor[:, i_res] + coor_s = cg_bead_coor[:, i_res + 1] + r_ps = compute_distance(coor_p, coor_s) + tmp_top_bond = CGTopBond(i_res, i_res + 1, r_ps) + push!(top_cg_DNA_bonds, tmp_top_bond) + # angle P--S--B + resname5 = i_res > chain.first ? cg_resid_name[i_res - 1][end] : DNA_basetype_pre + # resname5 = cg_resid_name[i_res - 1][end] + resname3 = cg_resid_name[i_res + 2][end] + coor_b = cg_bead_coor[:, i_res + 2] + ang_psb = compute_angle(coor_p, coor_s, coor_b) + k = get_DNA3SPN_angle_param(""PSB"", resname5 * resname3) + tmp_top_angl = CGTopAngle(i_res, i_res + 1, i_res + 2, ang_psb) + push!(top_cg_DNA_angles, tmp_top_angl) + push!(param_cg_DNA_k_angles, k) + if i_res + 4 < chain.last + # angle P--S--P+1 + coor_p3 = cg_bead_coor[:, i_res + 3] + ang_psp3 = compute_angle(coor_p, coor_s, coor_p3) + k = get_DNA3SPN_angle_param(""PSP"", ""all"") + tmp_top_angl = CGTopAngle(i_res, i_res + 1, i_res + 3, ang_psp3) + push!(top_cg_DNA_angles, tmp_top_angl) + push!(param_cg_DNA_k_angles, k) + # Dihedral P--S--P+1--S+1 + coor_s3 = cg_bead_coor[:, i_res + 4] + dih_psp3s3 = compute_dihedral(coor_p, coor_s, coor_p3, coor_s3) + tmp_top_dih = CGTopDihedral(i_res, i_res + 1, i_res + 3, i_res + 4, dih_psp3s3) + push!(top_cg_DNA_dih_periodic, tmp_top_dih) + tmp_top_dih = CGTopDihedral(i_res, i_res + 1, i_res + 3, i_res + 4, dih_psp3s3) + push!(top_cg_DNA_dih_Gaussian, tmp_top_dih) + end + elseif cg_bead_name[i_res] == ""DB"" + if i_res + 2 < chain.last + # angle B--S--P+1 + resname5 = cg_resid_name[i_res][end] + resname3 = cg_resid_name[i_res + 1][end] + coor_b = cg_bead_coor[:, i_res] + coor_s = cg_bead_coor[:, i_res - 1] + coor_p3 = cg_bead_coor[:, i_res + 1] + ang_bsp3 = compute_angle(coor_b, coor_s, coor_p3) + k = get_DNA3SPN_angle_param(""BSP"", resname5 * resname3) + tmp_top_angl = CGTopAngle(i_res, i_res - 1, i_res + 1, ang_bsp3) + push!(top_cg_DNA_angles, tmp_top_angl) + push!(param_cg_DNA_k_angles, k) + # Dihedral B--S--P+1--S+1 + coor_s3 = cg_bead_coor[:, i_res + 2] + dih_bsp3s3 = compute_dihedral(coor_b, coor_s, coor_p3, coor_s3) + tmp_top_dih = CGTopDihedral(i_res, i_res - 1, i_res + 1, i_res + 2, dih_bsp3s3) + push!(top_cg_DNA_dih_periodic, tmp_top_dih) + end + else + errmsg = @sprintf(""BUG: Wrong DNA particle type in chain %d, residue %d : %s "", + i_chain, + i_res, cg_bead_name[i_res]) + error(errmsg) + end + end # for i_res + + end # if gen_3spn_itp == 1 + + if gen_3spn_itp == 2 + + for i_res in chain.first : chain.last + + # ----------------- + # show progress bar + # ----------------- + if verbose + print(""\b""^32) + progress_percent = trunc(Int, ( i_res - chain.first ) / ( chain.last - chain.first ) * 20) + progress_bar = ""|"" ^ progress_percent * "" "" ^ (20 - progress_percent) + @printf("" [%20s] %5.1f %% "", progress_bar, progress_percent * 5) + end + # ------------------ + + if cg_bead_name[i_res] == ""DS"" + # bond S--B + resname5 = cg_resid_name[i_res][end] + r_sb = get_DNA3SPN_bond_length(""SB"", resname5 * "" "") + tmp_top_bond = CGTopBond(i_res, i_res + 1, r_sb) + push!(top_cg_DNA_bonds, tmp_top_bond) + if i_res + 3 < chain.last + resname3 = cg_resid_name[i_res + 3][end] + base_step = resname5 * resname3 + # bond S--P+1 + r_sp3 = get_DNA3SPN_bond_length(""SP"", base_step) + tmp_top_bond = CGTopBond(i_res, i_res + 2, r_sp3) + push!(top_cg_DNA_bonds, tmp_top_bond) + # Angle S--P+1--S+1 + ang_sp3s3 = get_DNA3SPN_angle_equilibrium(""SPS"", base_step) + k = get_DNA3SPN_angle_param(""SPS"", base_step) + tmp_top_angl = CGTopAngle(i_res, i_res + 2, i_res + 3, ang_sp3s3) + push!(top_cg_DNA_angles, tmp_top_angl) + push!(param_cg_DNA_k_angles, k) + # Dihedral S--P+1--S+1--B+1 + dih_sp3s3b3 = get_DNA3SPN_dihedral_equilibrium(""SPSB"", base_step) + tmp_top_dih = CGTopDihedral(i_res, i_res + 2, i_res + 3, i_res + 4, dih_sp3s3b3) + push!(top_cg_DNA_dih_periodic, tmp_top_dih) + # Dihedral S--P+1--S+1--P+2 + if i_res + 6 < chain.last + base_two_steps = base_step * cg_resid_name[i_res + 5][end] + dih_sp3s3p33 = get_DNA3SPN_dihedral_equilibrium(""SPSP"", base_two_steps) + tmp_top_dih = CGTopDihedral(i_res, i_res + 2, i_res + 3, i_res + 5, dih_sp3s3p33) + push!(top_cg_DNA_dih_periodic, tmp_top_dih) + push!(top_cg_DNA_dih_Gaussian, tmp_top_dih) + elseif DNA_circular + base_two_steps = base_step * DNA_basetype_post + dih_sp3s3p33 = get_DNA3SPN_dihedral_equilibrium(""SPSP"", base_two_steps) + tmp_top_dih = CGTopDihedral(i_res, i_res + 2, i_res + 3, chain.first, dih_sp3s3p33) + push!(top_cg_DNA_dih_periodic, tmp_top_dih) + push!(top_cg_DNA_dih_Gaussian, tmp_top_dih) + end + elseif DNA_circular + resname3 = DNA_basetype_post + base_step = resname5 * resname3 + # bond S--P+1 + r_sp3 = get_DNA3SPN_bond_length(""SP"", base_step) + tmp_top_bond = CGTopBond(i_res, chain.first, r_sp3) + push!(top_cg_DNA_bonds, tmp_top_bond) + # Angle S--P+1--S+1 + ang_sp3s3 = get_DNA3SPN_angle_equilibrium(""SPS"", base_step) + k = get_DNA3SPN_angle_param(""SPS"", base_step) + tmp_top_angl = CGTopAngle(i_res, chain.first, chain.first + 1, ang_sp3s3) + push!(top_cg_DNA_angles, tmp_top_angl) + push!(param_cg_DNA_k_angles, k) + # Dihedral S--P+1--S+1--B+1 + dih_sp3s3b3 = get_DNA3SPN_dihedral_equilibrium(""SPSB"", base_step) + tmp_top_dih = CGTopDihedral(i_res, chain.first, chain.first + 1, chain.first + 2, dih_sp3s3b3) + push!(top_cg_DNA_dih_periodic, tmp_top_dih) + # Dihedral S--P+1--S+1--P+2 + base_two_steps = base_step * cg_resid_name[chain.first + 3][end] + dih_sp3s3p33 = get_DNA3SPN_dihedral_equilibrium(""SPSP"", base_two_steps) + tmp_top_dih = CGTopDihedral(i_res, chain.first, chain.first + 1, chain.first + 3, dih_sp3s3p33) + push!(top_cg_DNA_dih_periodic, tmp_top_dih) + push!(top_cg_DNA_dih_Gaussian, tmp_top_dih) + end + elseif cg_bead_name[i_res] == ""DP"" + resname5 = i_res > chain.first ? cg_resid_name[i_res - 1][end] : DNA_basetype_pre + resname3 = cg_resid_name[i_res + 2][end] + base_step = resname5 * resname3 + # bond P--S + r_ps = get_DNA3SPN_bond_length(""PS"", base_step) + tmp_top_bond = CGTopBond(i_res, i_res + 1, r_ps) + push!(top_cg_DNA_bonds, tmp_top_bond) + # angle P--S--B + ang_psb = get_DNA3SPN_angle_equilibrium(""PSB"", base_step) + k = get_DNA3SPN_angle_param(""PSB"", base_step) + tmp_top_angl = CGTopAngle(i_res, i_res + 1, i_res + 2, ang_psb) + push!(top_cg_DNA_angles, tmp_top_angl) + push!(param_cg_DNA_k_angles, k) + if i_res + 4 < chain.last + base_two_steps = base_step * cg_resid_name[i_res + 3][end] + # angle P--S--P+1 + ang_psp3 = get_DNA3SPN_angle_equilibrium(""PSP"", base_two_steps) + k = get_DNA3SPN_angle_param(""PSP"", ""all"") + tmp_top_angl = CGTopAngle(i_res, i_res + 1, i_res + 3, ang_psp3) + push!(top_cg_DNA_angles, tmp_top_angl) + push!(param_cg_DNA_k_angles, k) + # Dihedral P--S--P+1--S+1 + dih_psp3s3 = get_DNA3SPN_dihedral_equilibrium(""PSPS"", base_two_steps) + tmp_top_dih = CGTopDihedral(i_res, i_res + 1, i_res + 3, i_res + 4, dih_psp3s3) + push!(top_cg_DNA_dih_periodic, tmp_top_dih) + push!(top_cg_DNA_dih_Gaussian, tmp_top_dih) + elseif DNA_circular + base_two_steps = base_step * DNA_basetype_post + # angle P--S--P+1 + ang_psp3 = get_DNA3SPN_angle_equilibrium(""PSP"", base_two_steps) + k = get_DNA3SPN_angle_param(""PSP"", ""all"") + tmp_top_angl = CGTopAngle(i_res, i_res + 1, chain.first, ang_psp3) + push!(top_cg_DNA_angles, tmp_top_angl) + push!(param_cg_DNA_k_angles, k) + # Dihedral P--S--P+1--S+1 + dih_psp3s3 = get_DNA3SPN_dihedral_equilibrium(""PSPS"", base_two_steps) + tmp_top_dih = CGTopDihedral(i_res, i_res + 1, chain.first, chain.first + 1, dih_psp3s3) + push!(top_cg_DNA_dih_periodic, tmp_top_dih) + push!(top_cg_DNA_dih_Gaussian, tmp_top_dih) + end + elseif cg_bead_name[i_res] == ""DB"" + resname5 = cg_resid_name[i_res][end] + if i_res + 2 < chain.last + resname3 = cg_resid_name[i_res + 1][end] + base_step = resname5 * resname3 + # angle B--S--P+1 + ang_bsp3 = get_DNA3SPN_angle_equilibrium(""BSP"", base_step) + k = get_DNA3SPN_angle_param(""BSP"", base_step) + tmp_top_angl = CGTopAngle(i_res, i_res - 1, i_res + 1, ang_bsp3) + push!(top_cg_DNA_angles, tmp_top_angl) + push!(param_cg_DNA_k_angles, k) + # Dihedral B--S--P+1--S+1 + dih_bsp3s3 = get_DNA3SPN_dihedral_equilibrium(""BSPS"", base_step) + tmp_top_dih = CGTopDihedral(i_res, i_res - 1, i_res + 1, i_res + 2, dih_bsp3s3) + push!(top_cg_DNA_dih_periodic, tmp_top_dih) + elseif DNA_circular + resname3 = DNA_basetype_post + base_step = resname5 * resname3 + # angle B--S--P+1 + ang_bsp3 = get_DNA3SPN_angle_equilibrium(""BSP"", base_step) + k = get_DNA3SPN_angle_param(""BSP"", base_step) + tmp_top_angl = CGTopAngle(i_res, i_res - 1, chain.first, ang_bsp3) + push!(top_cg_DNA_angles, tmp_top_angl) + push!(param_cg_DNA_k_angles, k) + # Dihedral B--S--P+1--S+1 + dih_bsp3s3 = get_DNA3SPN_dihedral_equilibrium(""BSPS"", base_step) + tmp_top_dih = CGTopDihedral(i_res, i_res - 1, chain.first, chain.first + 1, dih_bsp3s3) + push!(top_cg_DNA_dih_periodic, tmp_top_dih) + end + else + errmsg = @sprintf(""BUG: Wrong DNA particle type in chain %d, residue %d : %s "", + i_chain, + i_res, cg_bead_name[i_res]) + error(errmsg) + end + end # for i_res + + end # if gen_3spn_itp == 2 + + if verbose + print("" \n"") + end + + end # for i_chain + + if verbose + println(""> ... Bond, Angle, Dihedral: DONE!"") + end + end + + # ========================= + # RNA structure based model + # ========================= + # ____ _ _ _ + # | _ \| \ | | / \ + # | |_) | \| | / _ \ + # | _ <| |\ |/ ___ \ + # |_| \_\_| \_/_/ \_\ + # + # ========================= + + if num_chain_RNA > 0 + i_step += 1 + if verbose + println(""============================================================"") + println(""> Step $(i_step): processing RNA."") + end + + # ------------------------- + # Step 6.1: RNA topology + # ------------------------- + if verbose + println(""------------------------------------------------------------"") + println(""> $(i_step).1: RNA topology."") + println("" - - - - - - - - - - - - - - - - - - - - - - - -"") + println(""> $(i_step).1.1: RNA local interactions."") + + @printf(""%11s Calculating bonded terms... \n"", "" "") + end + for i_chain in 1:aa_num_chain + chain = cg_chains[i_chain] + + if chain.moltype != MOL_RNA + continue + end + + for i_res in chain.first : chain.last + if cg_bead_name[i_res] == ""RS"" + # bond S--B + coor_s = cg_bead_coor[:, i_res] + coor_b = cg_bead_coor[:, i_res + 1] + r_sb = compute_distance(coor_s, coor_b) + base_type = cg_resid_name[i_res] in [""RA"", ""RG""] ? ""R"" : ""Y"" + bond_type = ""S"" * base_type + k = RNA_BOND_K_LIST[bond_type] + tmp_top_bond = CGTopBond(i_res, i_res + 1, r_sb) + push!(top_cg_RNA_bonds, tmp_top_bond) + push!(param_cg_RNA_k_bonds, k) + # bond S--P+1 + if i_res + 2 < chain.last + coor_p3 = cg_bead_coor[:, i_res + 2] + r_sp3 = compute_distance(coor_s, coor_p3) + k = RNA_BOND_K_LIST[""SP""] + tmp_top_bond = CGTopBond(i_res, i_res + 2, r_sp3) + push!(top_cg_RNA_bonds, tmp_top_bond) + push!(param_cg_RNA_k_bonds, k) + end + if i_res + 4 <= chain.last + # Angle S--P+1--S+1 + coor_s3 = cg_bead_coor[:, i_res + 3] + ang_sp3s3 = compute_angle(coor_s, coor_p3, coor_s3) + k = RNA_ANGLE_K_LIST[""SPS""] + tmp_top_angl = CGTopAngle(i_res, i_res + 2, i_res + 3, ang_sp3s3) + push!(top_cg_RNA_angles, tmp_top_angl) + push!(param_cg_RNA_k_angles, k) + # Dihedral S--P+1--S+1--B+1 + coor_b3 = cg_bead_coor[:, i_res + 4] + dih_sp3s3b3 = compute_dihedral(coor_s, coor_p3, coor_s3, coor_b3) + base_type = cg_resid_name[i_res + 4] in [""RA"", ""RG""] ? ""R"" : ""Y"" + dihe_type = ""SPS"" * base_type + k = RNA_DIHEDRAL_K_LIST[dihe_type] + tmp_top_dih = CGTopDihedral(i_res, i_res + 2, i_res + 3, i_res + 4, dih_sp3s3b3) + push!(top_cg_RNA_dihedrals, tmp_top_dih) + push!(param_cg_RNA_k_dihedrals, k) + end + # Dihedral S--P+1--S+1--P+2 + if i_res + 5 < chain.last + coor_p33 = cg_bead_coor[:, i_res + 5] + dih_sp3s3p33 = compute_dihedral(coor_s, coor_p3, coor_s3, coor_p33) + k = RNA_DIHEDRAL_K_LIST[""SPSP""] + tmp_top_dih = CGTopDihedral(i_res, i_res + 2, i_res + 3, i_res + 5, dih_sp3s3p33) + push!(top_cg_RNA_dihedrals, tmp_top_dih) + push!(param_cg_RNA_k_dihedrals, k) + end + elseif cg_bead_name[i_res] == ""RP"" + # bond P--S + coor_p = cg_bead_coor[:, i_res] + coor_s = cg_bead_coor[:, i_res + 1] + r_ps = compute_distance(coor_p, coor_s) + k = RNA_BOND_K_LIST[""PS""] + tmp_top_bond = CGTopBond(i_res, i_res + 1, r_ps) + push!(top_cg_RNA_bonds, tmp_top_bond) + push!(param_cg_RNA_k_bonds, k) + # angle P--S--B + coor_b = cg_bead_coor[:, i_res + 2] + ang_psb = compute_angle(coor_p, coor_s, coor_b) + base_type = cg_resid_name[i_res + 2] in [""RA"", ""RG""] ? ""R"" : ""Y"" + angl_type = ""PS"" * base_type + k = RNA_ANGLE_K_LIST[angl_type] + tmp_top_angl = CGTopAngle(i_res, i_res + 1, i_res + 2, ang_psb) + push!(top_cg_RNA_angles, tmp_top_angl) + push!(param_cg_RNA_k_angles, k) + if i_res + 4 < chain.last + # angle P--S--P+1 + coor_p3 = cg_bead_coor[:, i_res + 3] + ang_psp3 = compute_angle(coor_p, coor_s, coor_p3) + k = RNA_ANGLE_K_LIST[""PSP""] + tmp_top_angl = CGTopAngle(i_res, i_res + 1, i_res + 3, ang_psp3) + push!(top_cg_RNA_angles, tmp_top_angl) + push!(param_cg_RNA_k_angles, k) + # Dihedral P--S--P+1--S+1 + coor_s3 = cg_bead_coor[:, i_res + 4] + dih_psp3s3 = compute_dihedral(coor_p, coor_s, coor_p3, coor_s3) + k = RNA_DIHEDRAL_K_LIST[""PSPS""] + tmp_top_dih = CGTopDihedral(i_res, i_res + 1, i_res + 3, i_res + 4, dih_psp3s3) + push!(top_cg_RNA_dihedrals, tmp_top_dih) + push!(param_cg_RNA_k_dihedrals, k) + end + elseif cg_bead_name[i_res] == ""RB"" + # add fake angles and dihedrals... + coor_b = cg_bead_coor[:, i_res] + coor_s = cg_bead_coor[:, i_res - 1] + if i_res + 1 < chain.last + # angle B--S--P+1 + coor_p3 = cg_bead_coor[:, i_res + 1] + ang_bsp3 = compute_angle(coor_b, coor_s, coor_p3) + k = 0.0 + tmp_top_angl = CGTopAngle(i_res, i_res - 1, i_res + 1, ang_bsp3) + push!(top_cg_RNA_angles, tmp_top_angl) + push!(param_cg_RNA_k_angles, k) + end + if i_res + 2 < chain.last + # Dihedral B--S--P+1--S+1 + coor_p3 = cg_bead_coor[:, i_res + 1] + coor_s3 = cg_bead_coor[:, i_res + 2] + dih_bsp3s3 = compute_dihedral(coor_b, coor_s, coor_p3, coor_s3) + k = 0.0 + tmp_top_dih = CGTopDihedral(i_res, i_res - 1, i_res + 1, i_res + 2, dih_bsp3s3) + push!(top_cg_RNA_dihedrals, tmp_top_dih) + push!(param_cg_RNA_k_dihedrals, k) + end + end + end + end + + # ----------------------- + # Go type native contacts + # ----------------------- + if verbose + println("" - - - - - - - - - - - - - - - - - - - - - - - -"") + println(""> $(i_step).1.2: RNA HT-type native contacts."") + @printf(""%11s Calculating intra-molecular contacts... \n"", "" "") + @printf("" ... progress: %32s"", "" "") + end + for i_chain in 1:aa_num_chain + + if do_test_local_only + continue + end + + chain = cg_chains[i_chain] + + # ----------------- + # show progress bar + # ----------------- + if verbose + print(""\b""^32) + progress_percent = trunc(Int, i_chain / aa_num_chain * 20) + progress_bar = ""|"" ^ progress_percent * "" "" ^ (20 - progress_percent) + @printf("" [%20s] %5.1f %% "", progress_bar, i_chain / aa_num_chain * 100) + end + # ------------------ + + if chain.moltype != MOL_RNA + continue + end + + Threads.@threads for i_res in chain.first : chain.last - 3 + + if cg_bead_name[i_res] == ""RP"" && ! cgRNA_use_phosphate_go + continue + end + + coor_i = cg_bead_coor[:, i_res] + cell_i = cell_index_cg_bead[i_res] + neighbor_cell_i = cell_neighbors[cell_i] + + for j_cell in neighbor_cell_i + for j_res in cell_particles[j_cell] + if cg_chain_id[j_res] != i_chain + continue + end + if j_res < i_res + 3 + continue + end + if cgRNA_use_phosphate_go + if cg_bead_name[i_res] == ""RP"" || cg_bead_name[j_res] == ""RP"" + if j_res < i_res + 4 + continue + end + end + elseif cg_bead_name[j_res] == ""RP"" + continue + end + + if cg_bead_name[i_res] == ""RS"" || cg_bead_name[j_res] == ""RS"" + if j_res < i_res + 6 + continue + end + end + + coor_j = cg_bead_coor[:, j_res] + + adist, nhb = compute_RNA_native_contact(cg_residues[i_res].atoms, + cg_residues[j_res].atoms, + aa_atom_name, + aa_coor) + if adist > RNA_GO_ATOMIC_CUTOFF + continue + end + + native_dist = compute_distance(coor_i, coor_j) + + if j_res == i_res + 3 && cg_bead_name[i_res] == ""RB"" + coor_i_sug = cg_bead_coor[:, i_res - 1] + coor_j_sug = cg_bead_coor[:, j_res - 1] + st_dih = compute_dihedral(coor_i, coor_i_sug, coor_j_sug, coor_j) + if abs( st_dih ) < RNA_STACK_DIH_CUTOFF && adist < RNA_STACK_DIST_CUTOFF + # tmp_top_cnt = CGTopContact(i_res, j_res, native_dist) + # push!(top_cg_RNA_base_stack, tmp_top_cnt) + # push!(param_cg_RNA_e_base_stack, RNA_STACK_EPSILON) + push!(top_cg_RNA_base_stack[i_res], [j_res, native_dist, RNA_STACK_EPSILON]) + else + # tmp_top_cnt = CGTopContact(i_res, j_res, native_dist) + # push!(top_cg_RNA_other_contact, tmp_top_cnt) + # push!(param_cg_RNA_e_other_contact, RNA_PAIR_EPSILON_OTHER[""BB""]) + push!(top_cg_RNA_other_contact[i_res], [j_res, native_dist, RNA_PAIR_EPSILON_OTHER[""BB""]]) + end + elseif cg_bead_name[i_res] == ""RB"" && cg_bead_name[j_res] == ""RB"" + if nhb == 2 + # tmp_top_cnt = CGTopContact(i_res, j_res, native_dist) + # push!(top_cg_RNA_base_pair, tmp_top_cnt) + # push!(param_cg_RNA_e_base_pair, RNA_BPAIR_EPSILON_2HB) + push!(top_cg_RNA_base_pair[i_res], [j_res, native_dist, RNA_BPAIR_EPSILON_2HB]) + elseif nhb >= 3 + # tmp_top_cnt = CGTopContact(i_res, j_res, native_dist) + # push!(top_cg_RNA_base_pair, tmp_top_cnt) + # push!(param_cg_RNA_e_base_pair, RNA_BPAIR_EPSILON_3HB) + push!(top_cg_RNA_base_pair[i_res], [j_res, native_dist, RNA_BPAIR_EPSILON_3HB]) + else + # tmp_top_cnt = CGTopContact(i_res, j_res, native_dist) + # push!(top_cg_RNA_other_contact, tmp_top_cnt) + # push!(param_cg_RNA_e_other_contact, RNA_PAIR_EPSILON_OTHER[""BB""]) + push!(top_cg_RNA_other_contact[i_res], [j_res, native_dist, RNA_PAIR_EPSILON_OTHER[""BB""]]) + end + else + contact_type = cg_bead_name[i_res][end] * cg_bead_name[j_res][end] + # tmp_top_cnt = CGTopContact(i_res, j_res, native_dist) + # push!(top_cg_RNA_other_contact, tmp_top_cnt) + # push!(param_cg_RNA_e_other_contact, RNA_PAIR_EPSILON_OTHER[contact_type]) + push!(top_cg_RNA_other_contact[i_res], [j_res, native_dist, RNA_PAIR_EPSILON_OTHER[contact_type]]) + end + end + end + end + end + if verbose + print(""\n ... intra-molecular contacts: DONE! \n"") + end + + if num_chain_RNA > 1 && !do_test_local_only + if verbose + @printf(""%11s Calculating inter-molecular contacts... \n"", "" "") + @printf("" ... progress: %32s"", "" "") + end + for i_chain in 1:aa_num_chain - 1 + + chain_1 = cg_chains[i_chain] + + # ----------------- + # show progress bar + # ----------------- + if verbose + print(""\b""^32) + progress_percent = trunc(Int, i_chain / ( aa_num_chain - 1 ) * 20) + progress_bar = ""|"" ^ progress_percent * "" "" ^ (20 - progress_percent) + @printf("" [%20s] %5.1f %% "", progress_bar, i_chain / ( aa_num_chain - 1 ) * 100) + end + # ------------------ + + if chain_1.moltype != MOL_RNA + continue + end + + Threads.@threads for i_res in chain_1.first : chain_1.last + if cg_bead_name[i_res] == ""RP"" && ! cgRNA_use_phosphate_go + continue + end + coor_i = cg_bead_coor[:, i_res] + cell_i = cell_index_cg_bead[i_res] + neighbor_cell_i = cell_neighbors[cell_i] + + for j_cell in neighbor_cell_i + for j_res in cell_particles[j_cell] + if j_res < i_res + continue + end + if cg_chains[cg_chain_id[j_res]].moltype != MOL_RNA + continue + end + if cg_chain_id[j_res] == i_chain + continue + end + if cg_bead_name[j_res] == ""RP"" && ! cgRNA_use_phosphate_go + continue + end + coor_j = cg_bead_coor[:, j_res] + adist, nhb = compute_RNA_native_contact(cg_residues[i_res].atoms, + cg_residues[j_res].atoms, + aa_atom_name, + aa_coor) + if adist > RNA_GO_ATOMIC_CUTOFF + continue + end + native_dist = compute_distance(coor_i, coor_j) + if cg_bead_name[i_res] == ""RB"" && cg_bead_name[j_res] == ""RB"" + if nhb == 2 + # tmp_top_cnt = CGTopContact(i_res, j_res, native_dist) + # push!(top_cg_RNA_base_pair, tmp_top_cnt) + # push!(param_cg_RNA_e_base_pair, RNA_BPAIR_EPSILON_2HB) + push!(top_cg_RNA_base_pair[i_res], [j_res, native_dist, RNA_BPAIR_EPSILON_2HB]) + elseif nhb >= 3 + # tmp_top_cnt = CGTopContact(i_res, j_res, native_dist) + # push!(top_cg_RNA_base_pair, tmp_top_cnt) + # push!(param_cg_RNA_e_base_pair, RNA_BPAIR_EPSILON_3HB) + push!(top_cg_RNA_base_pair[i_res], [j_res, native_dist, RNA_BPAIR_EPSILON_3HB]) + else + # tmp_top_cnt = CGTopContact(i_res, j_res, native_dist) + # push!(top_cg_RNA_other_contact, tmp_top_cnt) + # push!(param_cg_RNA_e_other_contact, RNA_PAIR_EPSILON_OTHER[""BB""]) + push!(top_cg_RNA_other_contact[i_res], [j_res, native_dist, RNA_PAIR_EPSILON_OTHER[""BB""]]) + end + else + contact_type = cg_bead_name[i_res][end] * cg_bead_name[j_res][end] + # tmp_top_cnt = CGTopContact(i_res, j_res, native_dist) + # push!(top_cg_RNA_other_contact, tmp_top_cnt) + # push!(param_cg_RNA_e_other_contact, RNA_PAIR_EPSILON_OTHER[contact_type]) + push!(top_cg_RNA_other_contact[i_res], [j_res, native_dist, RNA_PAIR_EPSILON_OTHER[contact_type]]) + end + end + end + end + end + if verbose + print(""\n ... inter-molecular contacts: DONE! \n"") + end + end + + if verbose + println(""------------------------------------------------------------"") + @printf("" > Total number of RNA contacts: %12d \n"", + sum(length.(top_cg_RNA_base_stack)) + + sum(length.(top_cg_RNA_base_pair)) + + sum(length.(top_cg_RNA_other_contact)) ) + end + + end + + # =========================================================== + # Protein-RNA structure-based interactions: Go-like potential + # =========================================================== + # _ _ ____ _ _ _ + # _ __ _ __ ___ | |_ ___(_)_ __ | _ \| \ | | / \ + # | '_ \| '__/ _ \| __/ _ \ | '_ \ _____| |_) | \| | / _ \ + # | |_) | | | (_) | || __/ | | | |_____| _ <| |\ |/ ___ \ + # | .__/|_| \___/ \__\___|_|_| |_| |_| \_\_| \_/_/ \_\ + # |_| + # + # ============================================================ + + if ff_pro_rna == FF_pro_RNA_Go && num_chain_RNA > 0 && num_chain_pro > 0 && !do_test_local_only + i_step += 1 + if verbose + println(""============================================================"") + println(""> Step $(i_step): Generating protein-RNA native contacts."") + + @printf(""%11s Calculating protein-RNA contacts... \n"", "" "") + @printf("" ... progress: %32s"", "" "") + end + for i_chain in 1:aa_num_chain + # ----------------- + # show progress bar + # ----------------- + if verbose + print(""\b""^32) + progress_percent = trunc(Int, i_chain / aa_num_chain * 20) + progress_bar = ""|"" ^ progress_percent * "" "" ^ (20 - progress_percent) + @printf("" [%20s] %5.1f %% "", progress_bar, i_chain / aa_num_chain * 100) + end + # ------------------ + + chain_pro = cg_chains[i_chain] + + if chain_pro.moltype != MOL_PROTEIN + continue + end + + + Threads.@threads for i_res in chain_pro.first : chain_pro.last + cell_i = cell_index_cg_bead[i_res] + neighbor_cell_i = cell_neighbors[cell_i] + coor_i = cg_bead_coor[:, i_res] + + for j_cell in neighbor_cell_i + for j_res in cell_particles[j_cell] + if cg_chains[cg_chain_id[j_res]].moltype != MOL_RNA + continue + end + if cg_bead_name[j_res] == ""RP"" && ! cgRNA_use_phosphate_go + continue + end + if !is_protein_RNA_native_contact(cg_residues[i_res].atoms, cg_residues[j_res].atoms, aa_atom_name, aa_coor) + continue + end + coor_j = cg_bead_coor[:, j_res] + native_dist = compute_distance(coor_i, coor_j) + if cg_bead_name[j_res] == ""RS"" + # tmp_top_cnt = CGTopContact(i_res, j_res, native_dist) + # push!(top_cg_pro_RNA_contact, tmp_top_cnt) + # push!(param_cg_pro_RNA_e_contact, PRO_RNA_GO_EPSILON_S) + push!(top_cg_pro_RNA_contact[i_res], [j_res, native_dist, PRO_RNA_GO_EPSILON_S]) + elseif cg_bead_name[j_res] == ""RB"" + # tmp_top_cnt = CGTopContact(i_res, j_res, native_dist) + # push!(top_cg_pro_RNA_contact, tmp_top_cnt) + # push!(param_cg_pro_RNA_e_contact, PRO_RNA_GO_EPSILON_B) + push!(top_cg_pro_RNA_contact[i_res], [j_res, native_dist, PRO_RNA_GO_EPSILON_B]) + elseif cg_bead_name[j_res] == ""RP"" && ! cgRNA_use_phosphate_go + # tmp_top_cnt = CGTopContact(i_res, j_res, native_dist) + # push!(top_cg_pro_RNA_contact, tmp_top_cnt) + # push!(param_cg_pro_RNA_e_contact, PRO_RNA_GO_EPSILON_P) + push!(top_cg_pro_RNA_contact[i_res], [j_res, native_dist, PRO_RNA_GO_EPSILON_P]) + end + end + end + end + end + + if verbose + println(""\n------------------------------------------------------------"") + @printf("" > Total number of protein-RNA contacts: %8d \n"", + sum( length.(top_cg_pro_RNA_contact) ) ) + end + end + + + # ============================================================ + # PWMcos parameters: protein-DNA sequence-specific interaction + # ============================================================ + # ______ ____ __ + # | _ \ \ / / \/ | ___ ___ ___ + # | |_) \ \ /\ / /| |\/| |/ __/ _ \/ __| + # | __/ \ V V / | | | | (_| (_) \__ \ + # |_| \_/\_/ |_| |_|\___\___/|___/ + # + # ============================================================ + if ff_pro_dna == FF_PWMcos + pwmcos_native_contacts = [] + + if num_chain_pro == 0 + error(""Cannot generate PWMcos parameters without protein..."") + end + if num_chain_DNA != 2 + error(""Cannot generate PWMcos parameters from more or less than two DNA chains..."") + end + + i_step += 1 + if verbose + println(""============================================================"") + println(""> Step $(i_step): Generating PWMcos parameters."") + end + + # ------------------------------------------------ + # Step 7.1: determine protein-DNA contacts + # ------------------------------------------------ + if verbose + println(""------------------------------------------------------------"") + println(""> $(i_step).1: determine contacts between protein and DNA."") + end + + for i_chain in 1:aa_num_chain + chain_pro = cg_chains[i_chain] + + if chain_pro.moltype != MOL_PROTEIN + continue + end + + for i_res in chain_pro.first : chain_pro.last + i_res_N = i_res == chain_pro.first ? i_res : i_res - 1 + i_res_C = i_res == chain_pro.last ? i_res : i_res + 1 + + coor_pro_i = cg_bead_coor[:, i_res] + coor_pro_N = cg_bead_coor[:, i_res_N] + coor_pro_C = cg_bead_coor[:, i_res_C] + + for j_chain in 1:aa_num_chain + chain_DNA = cg_chains[j_chain] + + if chain_DNA.moltype != MOL_DNA + continue + end + + for j_res in chain_DNA.first + 3 : chain_DNA.last - 3 + if cg_bead_name[j_res] != ""DB"" + continue + end + if !is_PWMcos_contact(cg_residues[i_res].atoms, cg_residues[j_res].atoms, aa_atom_name, aa_coor) + continue + end + + j_res_5, j_res_3 = j_res - 3, j_res + 3 + coor_dna_j = cg_bead_coor[:, j_res] + coor_dna_5 = cg_bead_coor[:, j_res_5] + coor_dna_3 = cg_bead_coor[:, j_res_3] + coor_dna_S = cg_bead_coor[:, j_res - 1] + + vec0 = coor_pro_i - coor_dna_j + vec1 = coor_dna_S - coor_dna_j + vec2 = coor_dna_3 - coor_dna_5 + vec3 = coor_pro_N - coor_pro_C + r0 = norm(vec0) + theta1 = compute_vec_angle(vec0, vec1) + theta2 = compute_vec_angle(vec0, vec2) + theta3 = compute_vec_angle(vec0, vec3) + + push!(pwmcos_native_contacts, (i_res - chain_pro.first + 1, + cg_resid_index[j_res], + r0, + theta1, + theta2, + theta3)) + + if do_debug + println(""PWMcos | pro ===> "", i_res - chain_pro.first + 1, + "" DNA ===> "", j_res, "" : "", cg_resid_index[j_res], + "" r0 = "", r0, + "" theta1 = "", theta1, + "" theta2 = "", theta2, + "" theta3 = "", theta3) + end + end + end + end + end + + # ------------------------------------------------ + # Step 7.2: Read in PFM and convert to PWM + # ------------------------------------------------ + if verbose + println(""------------------------------------------------------------"") + println(""> $(i_step).2: read in position frequency matrix (PFM)."") + end + + pwmcos_pwm, pwmcos_chain_a, pwmcos_chain_b = read_modified_pfm(pfm_filename) + num_pwmcos_terms = length(pwmcos_chain_a) + + + # ------------------------------------------------ + # Step 7.2: Read in PFM and convert to PWM + # ------------------------------------------------ + if verbose + println(""------------------------------------------------------------"") + println(""> $(i_step).3: decomposing PWM."") + end + + ip_count = zeros(Float64, num_pwmcos_terms) + + contact_to_pwm = [] + for nat_contact in pwmcos_native_contacts + i_dna = nat_contact[2] # cg_resid_index[dna] + cnt_pwm_idx_a = indexin(i_dna, pwmcos_chain_a)[1] + cnt_pwm_idx_b = indexin(i_dna, pwmcos_chain_b)[1] + if cnt_pwm_idx_a isa Int + push!( contact_to_pwm, (cnt_pwm_idx_a, 1) ) + ip_count[cnt_pwm_idx_a] += 1 + elseif cnt_pwm_idx_b isa Int + push!( contact_to_pwm, (cnt_pwm_idx_b, -1) ) + ip_count[cnt_pwm_idx_b] += 1 + else + error(""Index error in CHAIN_A or CHAIN_B!"") + end + end + pwm_decomposed = pwmcos_pwm ./ ip_count + + for (i_cnt, nat_cnt) in enumerate(pwmcos_native_contacts) + pwm_i, pwm_order = contact_to_pwm[i_cnt][1], contact_to_pwm[i_cnt][2] + if pwm_order == 1 + eA, eC, eG, eT = pwm_decomposed[pwm_i, 1:4] + elseif pwm_order == -1 + eA, eC, eG, eT = pwm_decomposed[pwm_i, 4:-1:1] + end + tmp_top_pwmcos = CGTopPWMcos(nat_cnt[1], + nat_cnt[3], + nat_cnt[4], + nat_cnt[5], + nat_cnt[6], + eA, eC, eG, eT) + push!(top_cg_pro_DNA_pwmcos, tmp_top_pwmcos) + end + + if do_debug + println(size( contact_to_pwm )) + println(pwm_decomposed) + end + + if verbose + println(""> ... DONE!"") + end + end + + + # =================================================================== + # PWMcos-ns parameters: protein-DNA sequence-non-specific interaction + # =================================================================== + # ______ ____ __ + # | _ \ \ / / \/ | ___ ___ ___ _ __ ___ + # | |_) \ \ /\ / /| |\/| |/ __/ _ \/ __|_____| '_ \/ __| + # | __/ \ V V / | | | | (_| (_) \__ \_____| | | \__ \ + # |_| \_/\_/ |_| |_|\___\___/|___/ |_| |_|___/ + # + # ============================================================ + if ff_pro_dna == FF_PWMcos_ns + + # pwmcos_ns_native_contacts = [] + + if num_chain_pro == 0 + error(""Cannot generate PWMcos parameters without protein..."") + end + + i_step += 1 + if verbose + println(""============================================================"") + println(""> Step $(i_step): Generating PWMcos-ns parameters."") + end + + # ------------------------------------------------ + # Step 7.1: determine protein-DNA contacts + # ------------------------------------------------ + if verbose + println(""------------------------------------------------------------"") + println(""> $(i_step).1: determine contacts between CA and DP."") + end + + for i_chain in 1:aa_num_chain + chain_pro = cg_chains[i_chain] + + if chain_pro.moltype != MOL_PROTEIN + continue + end + + for i_res in chain_pro.first : chain_pro.last + i_res_N = i_res == chain_pro.first ? i_res : i_res - 1 + i_res_C = i_res == chain_pro.last ? i_res : i_res + 1 + + coor_pro_i = cg_bead_coor[:, i_res] + coor_pro_N = cg_bead_coor[:, i_res_N] + coor_pro_C = cg_bead_coor[:, i_res_C] + + for j_chain in 1:aa_num_chain + chain_DNA = cg_chains[j_chain] + + if chain_DNA.moltype != MOL_DNA + continue + end + + for j_res in chain_DNA.first + 2 : chain_DNA.last - 2 + if cg_bead_name[j_res] != ""DP"" + continue + end + if !is_PWMcos_contact(cg_residues[i_res].atoms, cg_residues[j_res].atoms, aa_atom_name, aa_coor) + continue + end + + coor_dna_j = cg_bead_coor[:, j_res] + coor_dna_S = cg_bead_coor[:, j_res + 1] + + vec0 = coor_pro_i - coor_dna_j + vec1 = coor_dna_S - coor_dna_j + vec3 = coor_pro_N - coor_pro_C + r0 = norm(vec0) + theta1 = compute_vec_angle(vec0, vec1) + theta3 = compute_vec_angle(vec0, vec3) + + # push!(pwmcos_ns_native_contacts, (i_res - chain_pro.first + 1, + # r0, + # theta1, + # theta3)) + + if do_debug + println(""PWMcos | pro ===> "", i_res - chain_pro.first + 1, + "" DNA ===> "", j_res, "" : "", cg_resid_index[j_res], + "" r0 = "", r0, + "" theta1 = "", theta1, + "" theta3 = "", theta3) + end + + tmp_top_pwmcosns = CGTopPWMcos(i_res - chain_pro.first + 1, + r0, theta1, 0.0, theta3, + 0.0, 0.0, 0.0, 0.0) + push!(top_cg_pro_DNA_pwmcosns, tmp_top_pwmcosns) + + end + end + end + end + + if verbose + println(""> ... DONE!"") + end + end + + + # ============================================================ + # Protein-DNA structure-based Go-like interaction + # ============================================================ + # ____ _ _ _ ____ + # _ __ _ __ ___ | _ \| \ | | / \ / ___| ___ + # | '_ \| '__/ _ \ _____| | | | \| | / _ \ | | _ / _ \ + # | |_) | | | (_) |_____| |_| | |\ |/ ___ \ | |_| | (_) | + # | .__/|_| \___/ |____/|_| \_/_/ \_\ \____|\___/ + # |_| + # ============================================================ + if ff_pro_dna == FF_pro_DNA_Go + + if num_chain_pro == 0 + error(""Cannot generate protein-DNA parameters without protein..."") + end + if num_chain_DNA == 0 + error(""Cannot generate protein-DNA parameters without DNA..."") + end + + i_step += 1 + if verbose + println(""============================================================"") + println(""> Step $(i_step): Generating protein-DNA Go-like interactions."") + end + + for i_chain in 1:aa_num_chain + chain_pro = cg_chains[i_chain] + + if chain_pro.moltype != MOL_PROTEIN + continue + end + + Threads.@threads for i_res in chain_pro.first : chain_pro.last + cell_i = cell_index_cg_bead[i_res] + neighbor_cell_i = cell_neighbors[cell_i] + coor_i = cg_bead_coor[:, i_res] + + for j_cell in neighbor_cell_i + for j_res in cell_particles[j_cell] + + if cg_chains[cg_chain_id[j_res]].moltype != MOL_DNA + continue + end + if !is_protein_DNA_Go_contact(cg_residues[i_res].atoms, cg_residues[j_res].atoms, aa_atom_name, aa_coor) + continue + end + + coor_j = cg_bead_coor[:, j_res] + native_dist = compute_distance(coor_i, coor_j) + # tmp_top_cnt = CGTopContact(i_res, j_res, native_dist) + # push!(top_cg_pro_DNA_contact, tmp_top_cnt) + push!(top_cg_pro_DNA_contact[i_res], [j_res, native_dist]) + + end + end + end + end + + if verbose + println(""\n------------------------------------------------------------"") + @printf("" > Total number of protein-DNA contacts: %8d \n"", + sum( length.(top_cg_pro_DNA_contact) ) ) + end + end + + + + # ============================= + # Make a new topology structure + # ============================= + + top_default_params = GenTopDefault(0, 0, false, 0.0, 0.0) + top_default_atomtype = Vector{GenTopAtomType}(undef, 0) + top_default_CGDNA_bp = Vector{GenTopCGDNABasepairType}(undef, 0) + top_default_CGDNA_bs = Vector{GenTopCGDNABasestackType}(undef, 0) + top_default_CGDNA_cs = Vector{GenTopCGDNABasecrossType}(undef, 0) + top_default_CGDNA_exv = Vector{GenTopCGDNAExvType}(undef, 0) + top_default_CGPro_flx_angle = Vector{GenTopCGProAICGFlexAngleType}(undef, 0) + top_default_CGPro_flx_dihedral = Vector{GenTopCGProAICGFlexDihedralType}(undef, 0) + top_default_CGIDR_HPS_atomtype = Vector{GenTopCGIDRHPSAtomType}(undef, 0) + top_default_CGIDR_KH_atomtype = Vector{GenTopCGIDRKHAtomType}(undef, 0) + + global_index_2_local_index = Vector{Int}(undef, 0) + global_index_2_local_molid = Vector{Int}(undef, 0) + top_atoms = Vector{GenTopAtom}(undef, 0) + top_bonds = Vector{GenTopBond}(undef, 0) + top_angles = Vector{GenTopAngle}(undef, 0) + top_dihedrals = Vector{GenTopDihedral}(undef, 0) + top_pairs = Vector{GenTopPair}(undef, 0) + top_exclusions = Vector{GenTopExclusion}(undef, 0) + top_pwmcos = Vector{GenTopPWMcos}(undef, 0) + top_pwmcosns = Vector{GenTopPWMcos}(undef, 0) + top_idr_hps = Vector{GenTopRegion}(undef, 0) + top_idr_kh = Vector{GenTopRegion}(undef, 0) + top_mol_list = Vector{GenTopMolList}(undef, 0) + + # --------- + # [ atoms ] + # --------- + for i_bead in 1 : cg_num_particles + a_type = cg_bead_type[i_bead] + r_indx = cg_resid_index[i_bead] + r_name = cg_resid_name[i_bead] + a_name = cg_bead_name[i_bead] + f_type = AICG_ATOM_FUNC_NR + charge = cg_bead_charge[i_bead] + mass = cg_bead_mass[i_bead] + c_id = cg_chain_id[i_bead] + s_name = cg_seg_name[i_bead] + new_atom = GenTopAtom(i_bead, a_type, r_indx, r_name, a_name, f_type, charge, mass, c_id, s_name) + push!(top_atoms, new_atom) + push!(global_index_2_local_index, i_bead) + push!(global_index_2_local_molid, c_id) + end + + # --------- + # [ bonds ] + # --------- + # AICG2+ bonds + if ff_pro == FF_pro_AICG2p + for bond in top_cg_pro_bonds + new_bond = GenTopBond(bond.i, bond.j, AICG_BOND_FUNC_TYPE, bond.r0, AICG_BOND_K) + push!(top_bonds, new_bond) + end + # Clementi Go bonds + elseif ff_pro == FF_pro_Clementi_Go + for bond in top_cg_pro_bonds + new_bond = GenTopBond(bond.i, bond.j, CCGO_BOND_FUNC_TYPE, bond.r0, CCGO_BOND_K) + push!(top_bonds, new_bond) + end + end + + # 3SPN.2C bonds + if ff_dna == FF_DNA_3SPN2C && gen_3spn_itp > 0 + for bond in top_cg_DNA_bonds + new_bond = GenTopBond(bond.i, bond.j, DNA3SPN_BOND_FUNC4_TYPE, bond.r0, DNA3SPN_BOND_K_2) + push!(top_bonds, new_bond) + end + end + + # Structure-based RNA bonds + if ff_rna == FF_RNA_HT + for ( i_bond, bond ) in enumerate( top_cg_RNA_bonds ) + new_bond = GenTopBond(bond.i, bond.j, RNA_BOND_FUNC_TYPE, bond.r0, param_cg_RNA_k_bonds[i_bond]) + push!(top_bonds, new_bond) + end + end + + # ---------- + # [ angles ] + # ---------- + # AICG2+ angles + if ff_pro == FF_pro_AICG2p + # AICG2+ 1-3 + if length(top_cg_pro_aicg13) > 0 + for ( i_13, a13 ) in enumerate( top_cg_pro_aicg13 ) + if in(a13.i, AICG2p_flexible_local) || in(a13.j, AICG2p_flexible_local) || in(a13.k, AICG2p_flexible_local) || + in(a13.i, HPS_IDR_region) || in(a13.j, HPS_IDR_region) || in(a13.k, HPS_IDR_region) || + in(a13.i, KH_IDR_region) || in(a13.j, KH_IDR_region) || in(a13.k, KH_IDR_region) + continue + end + new_angle = GenTopAngle(a13.i, a13.j, a13.k, AICG_ANG_G_FUNC_TYPE, a13.a0, param_cg_pro_e_13[i_13], AICG_13_SIGMA) + push!(top_angles, new_angle) + end + end + # AICG2+ flexible + if length(top_cg_pro_angles) > 0 + for ang in top_cg_pro_angles + if ( in(ang.i, HPS_IDR_region) && in(ang.j, HPS_IDR_region) && in(ang.k, HPS_IDR_region) ) || + ( in(ang.i, KH_IDR_region) && in(ang.j, KH_IDR_region) && in(ang.k, KH_IDR_region) ) + continue + end + new_angle = GenTopAngle(ang.i, ang.j, ang.k, AICG_ANG_F_FUNC_TYPE, 0.0, 0.0, 0.0) + push!(top_angles, new_angle) + end + end + # Clementi Go angles + elseif ff_pro == FF_pro_Clementi_Go + for ang in top_cg_pro_angles + if in(ang.i, HPS_IDR_region) || in(ang.j, HPS_IDR_region) || in(ang.k, HPS_IDR_region) || + in(ang.i, KH_IDR_region) || in(ang.j, KH_IDR_region) || in(ang.k, KH_IDR_region) + continue + end + new_angle = GenTopAngle(ang.i, ang.j, ang.k, CCGO_ANG_FUNC_TYPE, ang.a0, CCGO_ANGL_K, 0.0) + push!(top_angles, new_angle) + end + end + + # 3SPN.2C angles + if ff_dna == FF_DNA_3SPN2C && gen_3spn_itp > 0 + for ( i_ang, ang ) in enumerate( top_cg_DNA_angles ) + new_angle = GenTopAngle(ang.i, ang.j, ang.k, DNA3SPN_ANG_FUNC_TYPE, ang.a0, param_cg_DNA_k_angles[i_ang], 0.0) + push!(top_angles, new_angle) + end + end + + # RNA structure-based angles + if ff_rna == FF_RNA_HT + for ( i_ang, ang ) in enumerate( top_cg_RNA_angles ) + new_angle = GenTopAngle(ang.i, ang.j, ang.k, RNA_ANG_FUNC_TYPE, ang.a0, param_cg_RNA_k_angles[i_ang], 0.0) + push!(top_angles, new_angle) + end + end + + # ------------- + # [ dihedrals ] + # ------------- + function is_dihedral_dangerous(dih::CGTopDihedral) + coor1 = cg_bead_coor[:, dih.i] + coor2 = cg_bead_coor[:, dih.j] + coor3 = cg_bead_coor[:, dih.k] + coor4 = cg_bead_coor[:, dih.l] + ang1 = compute_angle(coor1, coor2, coor3) + ang2 = compute_angle(coor2, coor3, coor4) + if ang1 > DIHEDRAL_SAFE_CUTOFF || ang2 > DIHEDRAL_SAFE_CUTOFF + return true + end + return false + end + # AICG2+ dihedrals + if ff_pro == FF_pro_AICG2p + # AICG2+ Gaussian dihedrals + for ( i_dih, dih ) in enumerate( top_cg_pro_aicg14 ) + if in(dih.i, AICG2p_flexible_local) || in(dih.j, AICG2p_flexible_local) || + in(dih.k, AICG2p_flexible_local) || in(dih.l, AICG2p_flexible_local) + continue + elseif in(dih.i, HPS_IDR_region) || in(dih.j, HPS_IDR_region) || + in(dih.k, HPS_IDR_region) || in(dih.l, HPS_IDR_region) || + in(dih.i, KH_IDR_region) || in(dih.j, KH_IDR_region) || + in(dih.k, KH_IDR_region) || in(dih.l, KH_IDR_region) + continue + end + # if is_dihedral_dangerous(dih) + if true + dih_func_type = DIHEDRAL_GAUS_MOD_TYPE[use_safe_dihedral] + else + dih_func_type = AICG_DIH_G_FUNC_TYPE + end + new_dihedral = GenTopDihedral(dih.i, dih.j, dih.k, dih.l, dih_func_type, + dih.t0, param_cg_pro_e_14[i_dih], AICG_14_SIGMA, 0) + push!(top_dihedrals, new_dihedral) + end + # AICG2+ flexible dihedrals + for dih in top_cg_pro_dihedrals + if ( in(dih.i, HPS_IDR_region) && in(dih.j, HPS_IDR_region) && + in(dih.k, HPS_IDR_region) && in(dih.l, HPS_IDR_region) ) || + ( in(dih.i, KH_IDR_region) && in(dih.j, KH_IDR_region) && + in(dih.k, KH_IDR_region) && in(dih.l, KH_IDR_region) ) + continue + end + dih_func_type = DIHEDRAL_TABU_MOD_TYPE[use_safe_dihedral] + new_dihedral = GenTopDihedral(dih.i, dih.j, dih.k, dih.l, dih_func_type, 0.0, 0.0, 0.0, 0) + push!(top_dihedrals, new_dihedral) + end + # Clementi Go dihedral + elseif ff_pro == FF_pro_Clementi_Go + for dih in top_cg_pro_dihedrals + if in(dih.i, HPS_IDR_region) || in(dih.j, HPS_IDR_region) || + in(dih.k, HPS_IDR_region) || in(dih.l, HPS_IDR_region) || + in(dih.i, KH_IDR_region) || in(dih.j, KH_IDR_region) || + in(dih.k, KH_IDR_region) || in(dih.l, KH_IDR_region) + continue + end + # if is_dihedral_dangerous(dih) + if true + dih_func_type = DIHEDRAL_PERI_MOD_TYPE[use_safe_dihedral] + else + dih_func_type = CCGO_DIH_P_FUNC_TYPE + end + new_dihedral = GenTopDihedral(dih.i, dih.j, dih.k, dih.l, dih_func_type, + dih.t0 - 180.0, CCGO_DIHE_K_1, 0.0, 1) + push!(top_dihedrals, new_dihedral) + end + for dih in top_cg_pro_dihedrals + if in(dih.i, HPS_IDR_region) || in(dih.j, HPS_IDR_region) || + in(dih.k, HPS_IDR_region) || in(dih.l, HPS_IDR_region) || + in(dih.i, KH_IDR_region) || in(dih.j, KH_IDR_region) || + in(dih.k, KH_IDR_region) || in(dih.l, KH_IDR_region) + continue + end + # if is_dihedral_dangerous(dih) + if true + dih_func_type = DIHEDRAL_PERI_MOD_TYPE[use_safe_dihedral] + else + dih_func_type = CCGO_DIH_P_FUNC_TYPE + end + new_dihedral = GenTopDihedral(dih.i, dih.j, dih.k, dih.l, dih_func_type, + 3 * dih.t0 - 180.0, CCGO_DIHE_K_3, 0.0, 3) + push!(top_dihedrals, new_dihedral) + end + end + + # 3SPN.2C dihedrals + if ff_dna == FF_DNA_3SPN2C && gen_3spn_itp > 0 + # 3SPN.2C Gaussian dihedrals + for dih in top_cg_DNA_dih_Gaussian + # if use_safe_dihedral > 0 && is_dihedral_dangerous(dih) + if true + dih_func_type = DIHEDRAL_GAUS_MOD_TYPE[use_safe_dihedral] + else + dih_func_type = DNA3SPN_DIH_G_FUNC_TYPE + end + new_dihedral = GenTopDihedral(dih.i, dih.j, dih.k, dih.l, dih_func_type, + dih.t0, DNA3SPN_DIH_G_K, DNA3SPN_DIH_G_SIGMA, 0) + push!(top_dihedrals, new_dihedral) + end + + # 3SPN.2C Periodic dihedrals + for dih in top_cg_DNA_dih_periodic + # if use_safe_dihedral > 0 && is_dihedral_dangerous(dih) + if true + dih_func_type = DIHEDRAL_PERI_MOD_TYPE[use_safe_dihedral] + else + dih_func_type = DNA3SPN_DIH_P_FUNC_TYPE + end + n_dih_tmp = DNA3SPN_DIH_P_FUNC_PERI + new_dihedral = GenTopDihedral(dih.i, dih.j, dih.k, dih.l, dih_func_type, + n_dih_tmp * dih.t0 - 180.0, DNA3SPN_DIH_P_K, 0.0, n_dih_tmp) + push!(top_dihedrals, new_dihedral) + end + end + + # RNA structure-based Periodic dihedrals + if ff_rna == FF_RNA_HT + for ( i_dih, dih ) in enumerate( top_cg_RNA_dihedrals ) + # if is_dihedral_dangerous(dih) + if true + dih_func_type = DIHEDRAL_PERI_MOD_TYPE[use_safe_dihedral] + else + dih_func_type = RNA_DIH_FUNC_TYPE + end + new_dihedral = GenTopDihedral(dih.i, dih.j, dih.k, dih.l, dih_func_type, + dih.t0 - 180.0, param_cg_RNA_k_dihedrals[i_dih], 0.0, 1) + push!(top_dihedrals, new_dihedral) + end + for ( i_dih, dih ) in enumerate( top_cg_RNA_dihedrals ) + # if is_dihedral_dangerous(dih) + if true + dih_func_type = DIHEDRAL_PERI_MOD_TYPE[use_safe_dihedral] + else + dih_func_type = RNA_DIH_FUNC_TYPE + end + new_dihedral = GenTopDihedral(dih.i, dih.j, dih.k, dih.l, dih_func_type, + 3 * dih.t0 - 180.0, param_cg_RNA_k_dihedrals[i_dih] / 2, 0.0, 3) + push!(top_dihedrals, new_dihedral) + end + end + + + # --------- + # [ pairs ] + # --------- + # protein Go-type native contacts + if ff_pro == FF_pro_AICG2p + for i_res in 1:cg_num_particles + for c in top_cg_pro_go_contact[i_res] + if in(i_res, AICG2p_flexible_nonlocal) || in(c[1], AICG2p_flexible_nonlocal) || + in(i_res, HPS_IDR_region) || in(c[1], HPS_IDR_region) || + in(i_res, KH_IDR_region) || in(c[1], KH_IDR_region) + continue + end + new_pair = GenTopPair(i_res, c[1], AICG_CONTACT_FUNC_TYPE, c[2], c[3]) + push!(top_pairs, new_pair) + end + end + # Clementi Go native contacts + elseif ff_pro == FF_pro_Clementi_Go + for i_res in 1:cg_num_particles + for c in top_cg_pro_go_contact[i_res] + if in(i_res, HPS_IDR_region) || in(c[1], HPS_IDR_region) || + in(i_res, KH_IDR_region) || in(c[1], KH_IDR_region) + continue + end + new_pair = GenTopPair(i_res, c[1], CCGO_CONTACT_FUNC_TYPE, c[2], CCGO_NATIVE_EPSILON * ccgo_contact_scale) + push!(top_pairs, new_pair) + end + end + end + + # RNA HT-type native contacts + if ff_rna == FF_RNA_HT + for i_res in 1:cg_num_particles + for c in top_cg_RNA_base_stack[i_res] + new_pair = GenTopPair(i_res, c[1], RNA_CONTACT_FUNC_TYPE, c[2], c[3]) + push!(top_pairs, new_pair) + end + for c in top_cg_RNA_base_pair[i_res] + new_pair = GenTopPair(i_res, c[1], RNA_CONTACT_FUNC_TYPE, c[2], c[3]) + push!(top_pairs, new_pair) + end + for c in top_cg_RNA_other_contact[i_res] + new_pair = GenTopPair(i_res, c[1], RNA_CONTACT_FUNC_TYPE, c[2], c[3]) + push!(top_pairs, new_pair) + end + end + end + + # protein-RNA native contacts + if ff_pro_rna == FF_pro_RNA_Go + for i_res in 1:cg_num_particles + for c in top_cg_pro_RNA_contact[i_res] + new_pair = GenTopPair(i_res, c[1], RNA_CONTACT_FUNC_TYPE, c[2], c[3]) + push!(top_pairs, new_pair) + end + end + end + + + # protein-DNA native contacts + if ff_pro_dna == FF_pro_DNA_Go + for i_res in 1:cg_num_particles + for c in top_cg_pro_DNA_contact[i_res] + new_pair = GenTopPair(i_res, c[1], CCGO_CONTACT_FUNC_TYPE, c[2], CCGO_NATIVE_EPSILON * ccgo_contact_scale) + push!(top_pairs, new_pair) + end + end + end + + + # -------------- + # [ exclusions ] + # -------------- + # contact pairs + # for c in top_pairs + # i_exc = c.i + # j_exc = c.j + # new_exc = GenTopExclusion(i_exc, j_exc) + # push!(top_exclusions, new_exc) + # end + + + # ---------- + # [ pwmcos ] + # ---------- + for p in top_cg_pro_DNA_pwmcos + new_pwmcos = GenTopPWMcos(p.i, PWMCOS_FUNC_TYPE, p.r0, p.t1, p.t2, p.t3, + p.eA, p.eC, p.eG, p.eT, pwmcos_gamma, pwmcos_epsil) + push!(top_pwmcos, new_pwmcos) + end + + for p in top_cg_pro_DNA_pwmcosns + new_pwmcos = GenTopPWMcos(p.i, PWMCOS_NS_FUNC_TYPE, p.r0, p.t1, p.t2, p.t3, + p.eA, p.eC, p.eG, p.eT, pwmcos_gamma, pwmcosns_epsil) + push!(top_pwmcosns, new_pwmcos) + end + + + # --------------------- + # [ cg_IDR_HPS_region ] + # --------------------- + if has_toml_mod + if haskey(ff_detail_config, ""IDR"") + if haskey(ff_detail_config[""IDR""], ""HPS_region"") + index_string = ff_detail_config[""IDR""][""HPS_region""] + hps_words = split(index_string, r""\s*,\s*"", keepempty=false) + for w in hps_words + idxwords = split(w, r""\s*to\s*"", keepempty=false) + i = parse(Int, idxwords[1]) + if length(idxwords) > 1 + j = parse(Int, idxwords[2]) + else + j = i + end + new_idr = GenTopRegion(i, j) + push!(top_idr_hps, new_idr) + end + end + end + end + + # --------------------- + # [ cg_IDR_KH_region ] + # --------------------- + if has_toml_mod + if haskey(ff_detail_config, ""IDR"") + if haskey(ff_detail_config[""IDR""], ""KH_region"") + index_string = ff_detail_config[""IDR""][""KH_region""] + kh_words = split(index_string, r""\s*,\s*"", keepempty=false) + for w in kh_words + idxwords = split(w, r""\s*to\s*"", keepempty=false) + i = parse(Int, idxwords[1]) + if length(idxwords) > 1 + j = parse(Int, idxwords[2]) + else + j = i + end + new_idr = GenTopRegion(i, j) + push!(top_idr_kh, new_idr) + end + end + end + end + + + mol_name = pdb_name[1:end-4] + mytop = GenTopology(mol_name, cg_num_particles, + top_default_params, + top_default_atomtype, + top_default_CGDNA_bp, + top_default_CGDNA_bs, + top_default_CGDNA_cs, + top_default_CGDNA_exv, + top_default_CGPro_flx_angle, + top_default_CGPro_flx_dihedral, + top_default_CGIDR_HPS_atomtype, + top_default_CGIDR_KH_atomtype, + global_index_2_local_index, + global_index_2_local_molid, + top_atoms, + top_bonds, + top_angles, + top_dihedrals, + top_pairs, + top_exclusions, + top_pwmcos, + top_pwmcosns, + top_idr_hps, + top_idr_kh, + top_mol_list) + myconf = Conformation(cg_num_particles, cg_bead_coor) + + + # ---------- + # output log + # ---------- + if do_output_log + log_name = pdb_name[1:end-4] * ""_cg.log"" + log_file = open(log_name, ""w"") + + println(log_file, ""================================================================================"") + println(log_file, "" PDB info (atomic):"") + println(log_file, "" - Number of atoms : $(aa_num_atom)"") + println(log_file, "" - Number of residues : $(aa_num_residue)"") + println(log_file, "" - Number of chains : $(aa_num_chain)"") + + println(log_file, ""================================================================================"") + println(log_file, "" Chain info (CG):"") + @printf(log_file, "" - Number of protein chains: %5d \n"", num_chain_pro) + @printf(log_file, "" - Number of DNA strands: %5d \n"", num_chain_DNA) + @printf(log_file, "" - Number of RNA strands: %5d \n"", num_chain_RNA) + + println(log_file, "" |--------------------------------------------------------------------|"") + println(log_file, "" | Chain | Mol Type | # bead | start -- end | Rg (Å) | net charge | "") + println(log_file, "" |-------+----------+--------+----------------+----------+------------|"") + + for i_chain = 1:aa_num_chain + chain = cg_chains[i_chain] + charge = sum( cg_bead_charge[chain.first : chain.last] ) + @printf(log_file, "" | %3d | %8s | %6d | %5d -- %5d | %8.3f | %+10.3f | \n"", + i_chain, MOL_TYPE_LIST[ chain.moltype ], cg_chain_length[i_chain], + cg_chains[i_chain].first, cg_chains[i_chain].last, + geo_radius_of_gyration[i_chain], + charge) + end + + println(log_file, "" |--------------------------------------------------------------------|"") + println(log_file, "" CG mol info:"") + charge = sum( cg_bead_charge ) + rg_all = radius_of_gyration(myconf) + rc_all = radius_of_circumshpere(myconf) + @printf(log_file, "" - Number of CG particles: %8d \n"", cg_num_particles) + @printf(log_file, "" - Radius of gyration: %8.3f Å \n"", rg_all) + @printf(log_file, "" - Radius of circumsphere: %8.3f Å \n"", rc_all) + @printf(log_file, "" - Net Charge: %+8.3f e \n"", charge) + + + println(log_file, ""================================================================================"") + println(log_file, "" Interaction info:"") + if num_chain_pro > 0 + @printf(log_file, "" - Number of protein contacts: %12d \n"", sum( length.(top_cg_pro_go_contact) )) + end + if num_chain_RNA > 0 + @printf(log_file, "" - Number of RNA contacts: %12d \n"", sum( length.(top_cg_RNA_base_stack) ) + + sum( length.(top_cg_RNA_base_pair) ) + sum( length.(top_cg_RNA_other_contact) ) ) + end + if num_chain_RNA > 0 && num_chain_pro > 0 + @printf(log_file, "" - Number of protein-RNA contacts: %12d \n"", sum(length.(top_cg_pro_RNA_contact)) ) + end + println(log_file, ""================================================================================"") + + close(log_file) + end + + + + return ( mytop, myconf ) + +end +","Julia" +"Genesis","noinil/genesis_cg_tool","src/lib/topology.jl",".jl","4505","225","############################################################################### +# Topology # +############################################################################### + +# =========================== +# General Topology Structures +# =========================== + +# ------------- +# Default types +# ------------- + +struct GenTopDefault + nonbonded_function_type::Int + nonbonded_combination_rule::Int + gen_pairs::Bool + fudge_lj::Float64 + fudge_qq::Float64 +end + +struct GenTopAtomType + name::String + mass::Float64 + charge::Float64 + rmin::Float64 + eps::Float64 + n::Int + ptype::String +end + +struct GenTopCGDNABasestackType + base_5::String + base_3::String + function_type::Int + epsilon::Float64 + sigma::Float64 + theta::Float64 +end + +struct GenTopCGDNABasecrossType + base_a::String + base_b::String + function_type::Int + epsilon::Float64 + sigma::Float64 + theta::Float64 +end + +struct GenTopCGDNABasepairType + base_a::String + base_b::String + function_type::Int + theta1::Float64 + theta2::Float64 + theta3::Float64 + phi1::Float64 + sigma::Float64 + epsilon::Float64 +end + +struct GenTopCGDNAExvType + base::String + function_type::Int + sigma::Float64 +end + +struct GenTopCGProAICGFlexAngleType + # TODO +end + +struct GenTopCGProAICGFlexDihedralType + # TODO +end + +struct GenTopCGIDRHPSAtomType + name::String + mass::Float64 + charge::Float64 + sigma::Float64 + lambda::Float64 +end + +struct GenTopCGIDRKHAtomType + name::String + mass::Float64 + charge::Float64 + sigma::Float64 +end + + +# ----------------------- +# Molecule specific types +# ----------------------- + +struct GenTopAtom + atom_index::Int + atom_type::String + residue_index::Int + residue_name::String + atom_name::String + function_type::Int + charge::Float64 + mass::Float64 + chain_id::Int + seg_name::String +end + +struct GenTopBond + i::Int + j::Int + function_type::Int + r0::Float64 + coef::Float64 +end + +struct GenTopAngle + i::Int + j::Int + k::Int + function_type::Int + a0::Float64 + coef::Float64 + w::Float64 +end + +struct GenTopDihedral + i::Int + j::Int + k::Int + l::Int + function_type::Int + d0::Float64 + coef::Float64 + w::Float64 + n::Int +end + +struct GenTopPair + i::Int + j::Int + function_type::Int + r0::Float64 + coef::Float64 +end + +struct GenTopExclusion + i::Int + j::Int +end + +struct GenTopPWMcos + i::Int + function_type::Int + r0::Float64 + theta1::Float64 + theta2::Float64 + theta3::Float64 + ene_A::Float64 + ene_C::Float64 + ene_G::Float64 + ene_T::Float64 + gamma::Float64 + eps::Float64 +end + +struct GenTopRegion + istart::Int + iend::Int +end + +struct GenTopMolecule + mol_name::String + nonlocal_interval::Int + num_atom::Int + + top_atoms::Vector{GenTopAtom} + top_bonds::Vector{GenTopBond} + top_angles::Vector{GenTopAngle} + top_dihedrals::Vector{GenTopDihedral} + top_pairs::Vector{GenTopPair} + top_exclusions::Vector{GenTopExclusion} + top_pwmcos::Vector{GenTopPWMcos} + top_pwmcosns::Vector{GenTopPWMcos} + top_idr_hps::Vector{GenTopRegion} + top_idr_kh::Vector{GenTopRegion} +end + +struct GenTopMolList + mol_name::String + count::Int +end + +struct GenTopology + system_name::String + num_atom::Int + + top_default_params::GenTopDefault + top_default_atomtype::Vector{GenTopAtomType} + top_default_CGDNA_bp::Vector{GenTopCGDNABasepairType} + top_default_CGDNA_bs::Vector{GenTopCGDNABasestackType} + top_default_CGDNA_cs::Vector{GenTopCGDNABasecrossType} + top_default_CGDNA_exv::Vector{GenTopCGDNAExvType} + top_default_CGPro_flx_angle::Vector{GenTopCGProAICGFlexAngleType} + top_default_CGPro_flx_dihedral::Vector{GenTopCGProAICGFlexDihedralType} + top_default_CGIDR_HPS_atomtype::Vector{GenTopCGIDRHPSAtomType} + top_default_CGIDR_KH_atomtype::Vector{GenTopCGIDRKHAtomType} + + global_index_2_local_index::Vector{Int} + global_index_2_local_molid::Vector{Int} + top_atoms::Vector{GenTopAtom} + top_bonds::Vector{GenTopBond} + top_angles::Vector{GenTopAngle} + top_dihedrals::Vector{GenTopDihedral} + top_pairs::Vector{GenTopPair} + top_exclusions::Vector{GenTopExclusion} + top_pwmcos::Vector{GenTopPWMcos} + top_pwmcosns::Vector{GenTopPWMcos} + top_idr_hps::Vector{GenTopRegion} + top_idr_kh::Vector{GenTopRegion} + + top_mol_list::Vector{GenTopMolList} + +end + +","Julia" +"Genesis","noinil/genesis_cg_tool","src/lib/parser_cif.jl",".jl","12327","378","############################################################################### +# ____ ____ ____ __ ____ ___ _____ # +# | _ \| _ \| __ )_ __ / / __ ___ _ __ ___ / ___|_ _| ___| # +# | |_) | | | | _ \ \/ / / / '_ ` _ \| '_ ` _ \| | | || |_ # +# | __/| |_| | |_) > < / /| | | | | | | | | | | |___ | || _| # +# |_| |____/|____/_/\_\/_/ |_| |_| |_|_| |_| |_|\____|___|_| # +############################################################################### + +############################################################################### +# Function list +# +# read_mmCIF(cif_name::AbstractString, args::Dict{String, <:Any}=Dict{String, Any}()) +# write_mmCIF(top::GenTopology, conf::Conformation, system_name::AbstractString, args::Dict{String, <:Any}=Dict{String, Any}()) +############################################################################### + +using Printf + +struct mmCIF_Atom_Site_Info # NOT REALLY USED + group_PDB::String # ""ATOM"" or ""HETATM"" + id::Int # atom serial + type_symbol::String # atom type + label_atom_id::String # atom name + label_alt_id::String # identifier for alternative site + label_comp_id::String # 3-char residue name + label_asym_id::String # chain or asymmetric identifier + label_entity_id::Int # chain identifier + label_seq_id::Int # residue number + pdbx_PDB_ins_code::String # PDB insertion code + Cartn_x::Float64 # coordinate x + Cartn_y::Float64 # coordinate y + Cartn_z::Float64 # coordinate z + occupancy::Float64 # occupancy + B_iso_or_equiv::Float64 # B factor + pdbx_formal_charge::Float64 # charge + auth_seq_id::Int # AUTHOR residue number + auth_comp_id::String # AUTHOR 3-char residue name + auth_asym_id::String # AUTHOR chain identifier + auth_atom_id::String # AUTHOR atom name + pdbx_PDB_model_num::Int # model number +end + +# =========== +# Read mmCIF! +# =========== +# TODO: multiline CHARACTER values +function mmcif_split(s::AbstractString) + new_words = [] + + word_tmp = """" + is_substring = false + for c in s + if c == '\'' || c == '\""' + is_substring = !is_substring + continue + end + if is_substring + word_tmp *= c + else + if c == ' ' + if length(word_tmp) > 0 + push!(new_words, word_tmp) + end + word_tmp = """" + else + word_tmp *= c + end + end + end + if length(word_tmp) > 0 + push!(new_words, word_tmp) + end + return new_words[:] +end +function read_mmCIF(cif_name::AbstractString, args::Dict{String, <:Any}=Dict{String, Any}()) + + # ================================== + # The only structure is a dictionary + # ================================== + data_attributes = Dict() + + # ============ + # arguments... + # ============ + verbose = get(args, ""verbose"", false) + + # ================== + # local variables... + # ================== + TABULAR_FLAG = false + TABULAR_KEYS = [] + MULTILINE_CHAR_FLAG = false + + # =========== + # let's go... + # =========== + for line in eachline(cif_name) + if startswith(line, ""data"") + data_attributes[""entry_name""] = strip(line) + continue + end + if startswith(line, '#') + continue + end + if startswith(line, ';') + MULTILINE_CHAR_FLAG = ! MULTILINE_CHAR_FLAG + end + if MULTILINE_CHAR_FLAG + continue + end + if strip(line) == ""loop_"" + TABULAR_FLAG = true + TABULAR_KEYS = [] + continue + end + if startswith(line, ""ATOM"") || startswith(line, ""HETATM"") + words = split(line) + else + words = mmcif_split(line) + end + # --------- + # key-value + # --------- + if words[1][1] == '_' && length(words) > 1 + cif_key = words[1] + cif_value = words[2] + data_attributes[cif_key] = cif_value + continue + end + # ------------ + # tabular keys + # ------------ + if words[1][1] == '_' && length(words) == 1 && TABULAR_FLAG + cif_key_tabu = words[1] + push!(TABULAR_KEYS, cif_key_tabu) + data_attributes[cif_key_tabu] = [] + continue + end + # -------------- + # tabular values + # -------------- + if words[1][1] != '_' + TABULAR_FLAG = false + for ( i, w ) in enumerate(words) + push!(data_attributes[TABULAR_KEYS[i]], w) + end + end + end + + return data_attributes +end + + +function mmCIF_to_AAMolecule(cif_data::Dict) + # ============================================== + # Extract information frmo the _atom_site block! + # ============================================== + aa_num_atom = count(flag->(flag==""ATOM""), cif_data[""_atom_site.group_PDB""]) + + # ============================== + # Data structures for AAMolecule + # ============================== + aa_atom_name = fill("" "", aa_num_atom) + aa_coor = zeros(Float64, (3, aa_num_atom)) + aa_residues = [] + aa_chains = [] + + # --------------- + # Local variables + # --------------- + i_resid = 0 + curr_resid = NaN + curr_chain = ""?"" + curr_rname = "" "" + residue_serial = NaN + residue_name = "" "" + chain_id = ""?"" + tmp_res_atoms = [] + tmp_chain_res = [] + segment_id = "" "" + + # -------------------------------- + # Add atoms to residues and chains + # -------------------------------- + for i_atom in 1:aa_num_atom + 1 + + if i_atom <= aa_num_atom + atom_name = cif_data[""_atom_site.label_atom_id""][i_atom] + residue_name = cif_data[""_atom_site.label_comp_id""][i_atom] + chain_id = cif_data[""_atom_site.label_asym_id""][i_atom] + residue_serial = parse(Int, cif_data[""_atom_site.label_seq_id""][i_atom]) + coor_x = parse(Float64, cif_data[""_atom_site.Cartn_x""][i_atom]) + coor_y = parse(Float64, cif_data[""_atom_site.Cartn_y""][i_atom]) + coor_z = parse(Float64, cif_data[""_atom_site.Cartn_z""][i_atom]) + segment_id = "" "" + end + + if chain_id != curr_chain || residue_serial > curr_resid + 1 || i_atom > aa_num_atom + if length(tmp_res_atoms) > 0 + push!(aa_residues, AAResidue(curr_rname, tmp_res_atoms)) + tmp_res_atoms = [] + end + if length(tmp_chain_res) > 0 + + # ------------------------------- + # Determine chain molecule type + # ------------------------------- + mol_type = -1 + for i_res in tmp_chain_res + res_name = aa_residues[i_res].name + tmp_mol_type = MOL_OTHER + if in(res_name, RES_NAME_LIST_PROTEIN) + tmp_mol_type = MOL_PROTEIN + elseif in(res_name, RES_NAME_LIST_DNA) + tmp_mol_type = MOL_DNA + elseif in(res_name, RES_NAME_LIST_RNA) + tmp_mol_type = MOL_RNA + elseif haskey(RES_NAME_RNA_DICT, res_name) || haskey(RES_NAME_DNA_DICT, res_name) + tmp_mol_type = MOL_DNA + for i_atom in aa_residues[i_res].atoms + atom_name = aa_atom_name[i_atom] + if atom_name == ""O2'"" + tmp_mol_type = MOL_RNA + break + end + end + end + if mol_type == -1 + mol_type = tmp_mol_type + elseif tmp_mol_type != mol_type + errmsg = @sprintf(""BUG: Inconsistent residue types in chain ID - %s residue - %d : %s "", + chain_id, + i_res, + res_name) + error(errmsg) + end + end + # -------------------------------------- + # chain mol type determination ends here + # -------------------------------------- + + push!(aa_chains, AAChain(chain_id, segment_id, mol_type, tmp_chain_res)) + tmp_chain_res = [] + end + curr_chain = chain_id + if i_atom > aa_num_atom + break + end + end + + aa_atom_name[i_atom] = atom_name + aa_coor[1, i_atom] = coor_x + aa_coor[2, i_atom] = coor_y + aa_coor[3, i_atom] = coor_z + + if residue_serial != curr_resid + i_resid += 1 + push!(tmp_chain_res, i_resid) + if length(tmp_res_atoms) > 0 + push!(aa_residues, AAResidue(curr_rname, tmp_res_atoms)) + tmp_res_atoms = [] + end + curr_resid = residue_serial + curr_rname = residue_name + end + + push!(tmp_res_atoms, i_atom) + end + + new_molecule = AAMolecule(aa_atom_name, aa_coor, aa_residues, aa_chains) + + return new_molecule + +end + + +# =============== +# Output CG mmCIF +# =============== +function write_mmCIF(top::GenTopology, conf::Conformation, system_name::AbstractString, args::Dict{String, <:Any}=Dict{String, Any}()) + + verbose = get(args, ""verbose"", false) + + cif_name = system_name * "".cif"" + cif_file = open(cif_name, ""w"") + + # ========== + # write head + # ========== + println(cif_file, ""data_$system_name"") + + # ============ + # common lines + # ============ + atom_site_dict_string = """"""# +loop_ +_atom_site.group_PDB +_atom_site.id +_atom_site.type_symbol +_atom_site.label_atom_id +_atom_site.label_alt_id +_atom_site.label_comp_id +_atom_site.label_asym_id +_atom_site.label_entity_id +_atom_site.label_seq_id +_atom_site.pdbx_PDB_ins_code +_atom_site.Cartn_x +_atom_site.Cartn_y +_atom_site.Cartn_z +_atom_site.occupancy +_atom_site.B_iso_or_equiv +_atom_site.pdbx_formal_charge +"""""" + print(cif_file, atom_site_dict_string) + + + # ==================== + # write CIF atom sites + # ==================== + num_particles = conf.num_particle + + chain_id_set = ""ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"" + tmp_chain_id = 0 + tmp_seg_name = """" + real_chain_id = 1 + asym_id = """" + for i_bead in 1 : num_particles + i_chain = top.top_atoms[i_bead].chain_id + i_sname = top.top_atoms[i_bead].seg_name + if i_chain != tmp_chain_id || i_sname != tmp_seg_name + if tmp_chain_id > 0 + real_chain_id += 1 + end + tmp_chain_id = i_chain + tmp_seg_name = i_sname + + # ------------------ + # determine chain id + # ------------------ + asym_id = """" + i_tmp = real_chain_id + while i_tmp > 0 + (i_tmp, j) = divrem(i_tmp - 1, 52) + asym_id *= chain_id_set[j + 1] + end + end + + @printf(cif_file, + ""ATOM %10d %2s %4s %1s %4s %4s %2d %8d %1s %15.3f %15.3f %15.3f %8.3f %8.3f %8.3f \n"", + i_bead, + # top.top_atoms[i_bead].atom_type[1], + ""C"", + top.top_atoms[i_bead].atom_name, + ""."", + top.top_atoms[i_bead].residue_name, + asym_id, + 1, + top.top_atoms[i_bead].residue_index, + ""?"", + conf.coors[1 , i_bead], + conf.coors[2 , i_bead], + conf.coors[3 , i_bead], + 0.0, + 0.0, + top.top_atoms[i_bead].charge + ) + end + + print(cif_file,""#\n"") + print(cif_file,""\n"") + + close(cif_file) + + if verbose + println(""> ... .pdb (CG) : DONE!"") + end + +end +","Julia" +"Genesis","noinil/genesis_cg_tool","opt/dependency.jl",".jl","55","7","#!/usr/bin/env julia + +import Pkg + +Pkg.add(""ArgParse"") + +","Julia"