file_name large_stringlengths 4 140 | prefix large_stringlengths 0 12.1k | suffix large_stringlengths 0 12k | middle large_stringlengths 0 7.51k | fim_type large_stringclasses 4
values |
|---|---|---|---|---|
chart.js | Update]').click(function () {
getTopRevs();
getBotRevs();
});
$('[name=chartUpdate]').click(function () {
var whichChart = $('[name=chartSelector]').val();
if (whichChart == "In Total") {
drawPie('#myChart');
} else {
drawBar('#myChart');
}
});
});
}
function getAuthorAnalyticsPage() {
$('#main').empty();
$('#main').load('views/authorAnalytics.html', null, function () {
$('#authorSearchButton').click(function () {
getAuthorArticleList();
})
});
}
//clears the .active class from the menu bar
function resetMenuBar() {
$('#Overview').removeClass("active");
$('#ArticleAnalytics').removeClass("active");
$('#AuthorAnalytics').removeClass("active");
}
/******************
LOAD THE CHART DATA
*******************/
function drawPie(where) {
console.log(where)
graphData = new google.visualization.DataTable();
graphData.addColumn('string', 'Element');
graphData.addColumn('number', 'Percentage');
$.each(pieData, function (key, val) {
graphData.addRow([key, val]);
})
var chart = new google.visualization.PieChart($(where)[0]);
chart.draw(graphData, options);
}
function drawBar(where) {
graphData = new google.visualization.DataTable();
graphData.addColumn('string', 'Year');
graphData.addColumn('number', 'RegularUsers');
graphData.addColumn('number', 'Bots');
graphData.addColumn('number', 'Admins');
graphData.addColumn('number', 'Anon');
var test = [];
for (var i in barData) {
test.push(barData[i])
}
// console.log(test);
for (var x = 0; x < test.length; x++) {
graphData.addRow([test[x].Year, test[x].RegularUsers, test[x].Bots, test[x].Admins, test[x].Anon]);
}
var chart = new google.visualization.ColumnChart($(where)[0]);
chart.draw(graphData, options);
}
function drawBarSpecificUser(where, dataToUse) {
graphData = new google.visualization.DataTable();
graphData.addColumn('string', 'Year');
graphData.addColumn('number', 'Revisions');
var test = [];
for (var i in dataToUse) {
test.push(dataToUse[i])
}
// console.log(test);
for (var x = 0; x < test.length; x++) {
graphData.addRow([test[x].Year, test[x].Revisions]);
}
var chart = new google.visualization.BarChart($(where)[0]);
chart.draw(graphData, options);
}
/*******************************************
FUNCTIONS FOR LOADING REGULAR DATA INTO HTML
********************************************/
function getTopRevs() {
var quantity = $('[name=quantity]').val();
var destination = 'getTopRevs?quantity=' + quantity;
$.get(destination, quantity, function (data) {
$('#topRev').empty();
for (var x = 0; x < data.length; x++) {
var num = x + 1;
num = num + '. ';
var appendMe = $('<li>' + num + data[x]._id + '</li>');
$('#topRev').append(appendMe);
}
})
}
function getBotRevs() {
var quantity = $('[name=quantity]').val();
var destination = 'getBotRevs?quantity=' + quantity;
$.get(destination, quantity, function (data) {
$('#botRev').empty();
for (var x = 0; x < data.length; x++) {
var num = x + 1;
num = num + '. ';
var appendMe = $('<li>' + num + data[x]._id + '</li>');
$('#botRev').append(appendMe);
}
})
}
function getOldestArticles() {
var destination = 'getOldestArticles';
$.get(destination, null, function (data) {
console.log(data);
$('#oldestArticles').empty();
for (var x = 0; x < data.length; x++) {
var num = x + 1;
num = num + '. ';
var appendMe = $('<li>' + num + data[x]._id + '</li>');
$('#oldestArticles').append(appendMe);
}
})
}
function getNewestArticles() {
var destination = 'getNewestArticles';
console.log('here');
$.get(destination, null, function (data) {
console.log(data);
$('#newestArticles').empty();
for (var x = 0; x < data.length; x++) {
var num = x + 1;
num = num + '. ';
var appendMe = $('<li>' + num + data[x]._id + '</li>');
$('#newestArticles').append(appendMe);
}
})
}
function getTitleLargestRegUser(){
var destination = 'getLeastRegoUser';
$.get(destination, null, function (data) {
console.log(data);
$('#mostUsers').empty();
$('#mostUsers').text(data);
})
}
function getTitleLeastRegUser(){
var destination = 'getLargestRegoUser';
$.get(destination, null, function (data) {
console.log(data);
$('#leastUsers').empty();
$('#leastUsers').text(data);
})
}
function getAuthorArticleList() {
var authorName = $('#authorEntryBox').val();
console.log(authorName)
var destination = 'getAuthorArticleList?authorName=' + authorName;
var putListHere = $('#articleList');
$.get(destination, null, function (data) {
console.log('Here is the user list ')
console.log(data)
if (data.length == 0) {
alert("Could not find any users with names matching that query");
} else {
// var heading = $('<thead><tr>' + '<th>' + 'Article Name' + '</th>' + '<th>' + 'Number of Revisions' + '</th>' + '</tr></thead><tbody>')
// $('#articleList').append(heading);
// for (var x = 0; x < data.length; x++) {
// var test = "<tr onclick='getTimestamps()' id= '" + "ArticleNameIs" + data[x]._id + "'>" + "<td>" + data[x]._id + "</td>" + '<td>' + data[x].count + '</td>' + '</tr>';
// var appendMe = $("<tr onclick='getTimestamps()' id= '" + "ArticleNameIs" + data[x]._id + "'>" + "<td>" + data[x]._id + "</td>" + '<td>' + data[x].count + '</td>' + '</tr>');
// console.log(test)
// $('#articleList').append(appendMe);
// }
// var ending = $('</tbody>');
// $('#articleList').append(ending);
putListHere.empty();
//Add headers
var theader = $("<thead><tr><th>User Name</th><th>Article Name</th><th>Number of Revisions</th></tr></thead>")
$('#articleList').append(theader);
//Create data table
for (var x = 0; x < data.length; x++) |
//Create event handler seperately
function handleEvent(idVal){
var elementGetter = '#entryID' + idVal;
$(elementGetter).click(function(){
$(".timestamp").remove();
console.log(elementGetter)
var newdestination = 'getTimestamps?authorName=' + data[idVal]._id + "&title=" + data[idVal].user;
$.get(newdestination, null, function (newdata) {
console.log(newdata)
for(var z = 0; z < newdata.length; z++){
var myDate = new Date(newdata[z].timestamp)
console.log(myDate)
$('<tr><td class="timestamp">' + " " + myDate.toUTCString() + '</td></tr>').insertAfter(elementGetter);
}
})
})
}
for(var x = 0; x < data.length; x++){
handleEvent(x)
}
}
})
}
function fillAutocomplete() {
var destination = 'getAllArticleTitles'
$.get(destination, null, function (data) {
$('#articleEntryList').empty();
for (var x = 0; x < data.length; x++) {
console.log(data[x])
var appendMe = $('<option>' + data[x]._id + " [revisions: " + data[x].count + ']</option>')
$('#articleEntryList').append(appendMe);
allArticleTitles[x] = data[x]._id;
allArt | {
var appnedMe = $("<tr class='articleEntry' id= '" + "entryID" + x + "'>" + '<td>' + data[x].user + '</td>' + "<td>" + data[x]._id + "</td>" + '<td>' + data[x].count + '</td>' + '</tr>');
$('#articleList').append(appnedMe);
var temp = '#entryID' + x;
// $(temp).click(function(x){ //Get timestamps
// console.log(x)
// })
} | conditional_block |
start.go | = "Use system certificate pool." +
" Possible values [true] [false]. Defaults to false if not set." +
" Alternatively, this can be set with the following environment variable: " + tlsSystemCertPoolEnvKey
tlsSystemCertPoolEnvKey = "ADAPTER_REST_TLS_SYSTEMCERTPOOL"
tlsCACertsFlagName = "tls-cacerts"
tlsCACertsFlagUsage = "Comma-Separated list of ca certs path." +
" Alternatively, this can be set with the following environment variable: " + tlsCACertsEnvKey
tlsCACertsEnvKey = "ADAPTER_REST_TLS_CACERTS"
presentationDefinitionsFlagName = "presentation-definitions-file"
presentationDefinitionsFlagUsage = "Path to presentation definitions file with input_descriptors."
presentationDefinitionsEnvKey = "ADAPTER_REST_PRESENTATION_DEFINITIONS_FILE"
hydraURLFlagName = "hydra-url"
hydraURLFlagUsage = "Base URL to the hydra service." +
"Alternatively, this can be set with the following environment variable: " + hydraURLEnvKey
hydraURLEnvKey = "ADAPTER_REST_HYDRA_URL"
modeFlagName = "mode"
modeFlagUsage = "Mode in which the edge-adapter service will run. Possible values: " +
"['issuer', 'rp']."
modeEnvKey = "ADAPTER_REST_MODE"
// inbound host url flag
didCommInboundHostFlagName = "didcomm-inbound-host"
didCommInboundHostEnvKey = "ADAPTER_REST_DIDCOMM_INBOUND_HOST"
didCommInboundHostFlagUsage = "Inbound Host Name:Port. This is used internally to start the didcomm server." +
" Alternatively, this can be set with the following environment variable: " + didCommInboundHostEnvKey
// inbound host external url flag
didCommInboundHostExternalFlagName = "didcomm-inbound-host-external"
didCommInboundHostExternalEnvKey = "ADAPTER_REST_DIDCOMM_INBOUND_HOST_EXTERNAL"
didCommInboundHostExternalFlagUsage = "Inbound Host External Name:Port." +
" This is the URL for the inbound server as seen externally." +
" If not provided, then the internal inbound host will be used here." +
" Alternatively, this can be set with the following environment variable: " + didCommInboundHostExternalEnvKey
// db path
didCommDBPathFlagName = "didcomm-db-path"
didCommDBPathEnvKey = "ADAPTER_REST_DIDCOMM_DB_PATH"
didCommDBPathFlagUsage = "Path to database." +
" Alternatively, this can be set with the following environment variable: " + didCommDBPathEnvKey
trustblocDomainFlagName = "dids-trustbloc-domain"
trustblocDomainEnvKey = "ADAPTER_REST_TRUSTBLOC_DOMAIN"
trustblocDomainFlagUsage = "URL to the did:trustbloc consortium's domain." +
" Alternatively, this can be set with the following environment variable: " + trustblocDomainEnvKey
)
// API endpoints.
const (
uiEndpoint = "/ui"
// modes
issuerMode = "issuer"
rpMode = "rp"
)
type didCommParameters struct {
inboundHostInternal string
inboundHostExternal string
dbPath string
}
type adapterRestParameters struct {
hostURL string
tlsSystemCertPool bool
tlsCACerts []string
dsn string
oidcProviderURL string
staticFiles string
presentationDefinitionsFile string
// TODO assuming same base path for all hydra endpoints for now
hydraURL string
mode string
didCommParameters *didCommParameters // didcomm
trustblocDomain string
}
type server interface {
ListenAndServe(host string, router http.Handler) error
}
// HTTPServer represents an actual HTTP server implementation.
type HTTPServer struct{}
// ListenAndServe starts the server using the standard Go HTTP server implementation.
func (s *HTTPServer) ListenAndServe(host string, router http.Handler) error {
return http.ListenAndServe(host, router)
}
// GetStartCmd returns the Cobra start command.
func GetStartCmd(srv server) *cobra.Command {
startCmd := createStartCmd(srv)
createFlags(startCmd)
return startCmd
}
func createStartCmd(srv server) *cobra.Command {
return &cobra.Command{
Use: "start",
Short: "Start adapter-rest",
Long: "Start adapter-rest inside the edge-adapter",
RunE: func(cmd *cobra.Command, args []string) error {
parameters, err := getAdapterRestParameters(cmd)
if err != nil {
return err
}
return startAdapterService(parameters, srv)
},
}
}
//nolint:funlen,gocyclo
func getAdapterRestParameters(cmd *cobra.Command) (*adapterRestParameters, error) {
hostURL, err := cmdutils.GetUserSetVarFromString(cmd, hostURLFlagName, hostURLEnvKey, false)
if err != nil {
return nil, err
}
tlsSystemCertPool, tlsCACerts, err := getTLS(cmd)
if err != nil {
return nil, err
}
dsn, err := cmdutils.GetUserSetVarFromString(cmd, datasourceNameFlagName, datasourceNameEnvKey, true)
if err != nil {
return nil, err
}
oidcURL, err := cmdutils.GetUserSetVarFromString(cmd, oidcProviderURLFlagName, oidcProviderEnvKey, true)
if err != nil {
return nil, err
}
staticFiles, err := cmdutils.GetUserSetVarFromString(cmd, staticFilesPathFlagName, staticFilesPathEnvKey, true)
if err != nil {
return nil, err
}
mode, err := cmdutils.GetUserSetVarFromString(cmd, modeFlagName, modeEnvKey, true)
if err != nil {
return nil, err
}
presentationDefinitionsFile, err := cmdutils.GetUserSetVarFromString(cmd, presentationDefinitionsFlagName,
presentationDefinitionsEnvKey, mode != rpMode)
if err != nil {
return nil, err
}
hydraURL, err := cmdutils.GetUserSetVarFromString(cmd, hydraURLFlagName, hydraURLEnvKey, true)
if err != nil {
return nil, err
}
// didcomm
didCommParameters, err := getDIDCommParams(cmd)
if err != nil {
return nil, err
}
trustblocDomain, err := cmdutils.GetUserSetVarFromString(cmd, trustblocDomainFlagName, trustblocDomainEnvKey, true)
if err != nil {
return nil, err
}
return &adapterRestParameters{
hostURL: hostURL,
tlsSystemCertPool: tlsSystemCertPool,
tlsCACerts: tlsCACerts,
dsn: dsn,
oidcProviderURL: oidcURL,
staticFiles: staticFiles,
presentationDefinitionsFile: presentationDefinitionsFile,
hydraURL: hydraURL,
mode: mode,
didCommParameters: didCommParameters,
trustblocDomain: trustblocDomain,
}, nil
}
func getDIDCommParams(cmd *cobra.Command) (*didCommParameters, error) {
inboundHostInternal, err := cmdutils.GetUserSetVarFromString(cmd, didCommInboundHostFlagName,
didCommInboundHostEnvKey, true)
if err != nil {
return nil, err
}
inboundHostExternal, err := cmdutils.GetUserSetVarFromString(cmd, didCommInboundHostExternalFlagName,
didCommInboundHostExternalEnvKey, true)
if err != nil {
return nil, err
}
dbPath, err := cmdutils.GetUserSetVarFromString(cmd, didCommDBPathFlagName, didCommDBPathEnvKey, true)
if err != nil {
return nil, err
}
return &didCommParameters{
inboundHostInternal: inboundHostInternal,
inboundHostExternal: inboundHostExternal,
dbPath: dbPath,
}, nil
}
func getTLS(cmd *cobra.Command) (bool, []string, error) | return tlsSystemCertPool, tlsCACerts, nil
}
func createFlags(startCmd *cobra.Command) {
startCmd.Flags().StringP(hostURLFlagName, hostURLFlagShorthand, "", hostURLFlagUsage)
startCmd.Flags().StringP(tlsSystemCertPoolFlagName, "", "", tlsSystemCertPoolFlagUsage)
start | {
tlsSystemCertPoolString, err := cmdutils.GetUserSetVarFromString(cmd, tlsSystemCertPoolFlagName,
tlsSystemCertPoolEnvKey, true)
if err != nil {
return false, nil, err
}
tlsSystemCertPool := false
if tlsSystemCertPoolString != "" {
tlsSystemCertPool, err = strconv.ParseBool(tlsSystemCertPoolString)
if err != nil {
return false, nil, err
}
}
tlsCACerts, err := cmdutils.GetUserSetVarFromArrayString(cmd, tlsCACertsFlagName, tlsCACertsEnvKey, true)
if err != nil {
return false, nil, err
}
| identifier_body |
start.go | err != nil {
return nil, err
}
return &didCommParameters{
inboundHostInternal: inboundHostInternal,
inboundHostExternal: inboundHostExternal,
dbPath: dbPath,
}, nil
}
func getTLS(cmd *cobra.Command) (bool, []string, error) {
tlsSystemCertPoolString, err := cmdutils.GetUserSetVarFromString(cmd, tlsSystemCertPoolFlagName,
tlsSystemCertPoolEnvKey, true)
if err != nil {
return false, nil, err
}
tlsSystemCertPool := false
if tlsSystemCertPoolString != "" {
tlsSystemCertPool, err = strconv.ParseBool(tlsSystemCertPoolString)
if err != nil {
return false, nil, err
}
}
tlsCACerts, err := cmdutils.GetUserSetVarFromArrayString(cmd, tlsCACertsFlagName, tlsCACertsEnvKey, true)
if err != nil {
return false, nil, err
}
return tlsSystemCertPool, tlsCACerts, nil
}
func createFlags(startCmd *cobra.Command) {
startCmd.Flags().StringP(hostURLFlagName, hostURLFlagShorthand, "", hostURLFlagUsage)
startCmd.Flags().StringP(tlsSystemCertPoolFlagName, "", "", tlsSystemCertPoolFlagUsage)
startCmd.Flags().StringArrayP(tlsCACertsFlagName, "", []string{}, tlsCACertsFlagUsage)
startCmd.Flags().StringP(oidcProviderURLFlagName, "", "", oidcProviderURLFlagUsage)
startCmd.Flags().StringP(datasourceNameFlagName, "", "", datasourceNameFlagUsage)
startCmd.Flags().StringP(staticFilesPathFlagName, "", "", staticFilesPathFlagUsage)
startCmd.Flags().StringP(presentationDefinitionsFlagName, "", "", presentationDefinitionsFlagUsage)
startCmd.Flags().StringP(hydraURLFlagName, "", "", hydraURLFlagUsage)
startCmd.Flags().StringP(modeFlagName, "", "", modeFlagUsage)
// didcomm
startCmd.Flags().StringP(didCommInboundHostFlagName, "", "", didCommInboundHostFlagUsage)
startCmd.Flags().StringP(didCommInboundHostExternalFlagName, "", "", didCommInboundHostExternalFlagUsage)
startCmd.Flags().StringP(didCommDBPathFlagName, "", "", didCommDBPathFlagUsage)
startCmd.Flags().StringP(trustblocDomainFlagName, "", "", trustblocDomainFlagUsage)
}
func startAdapterService(parameters *adapterRestParameters, srv server) error {
rootCAs, err := tlsutils.GetCertPool(parameters.tlsSystemCertPool, parameters.tlsCACerts)
if err != nil {
return err
}
logger.Debugf("root ca's %v", rootCAs)
router := mux.NewRouter()
// add health check endpoint
healthCheckService := healthcheck.New()
healthCheckHandlers := healthCheckService.GetOperations()
for _, handler := range healthCheckHandlers {
router.HandleFunc(handler.Path(), handler.Handle()).Methods(handler.Method())
}
ariesCtx, err := createAriesAgent(parameters, &tls.Config{RootCAs: rootCAs})
if err != nil {
return err
}
// add endpoints
switch parameters.mode {
case rpMode:
err = addRPHandlers(parameters, ariesCtx, router, rootCAs)
if err != nil {
return nil
}
case issuerMode:
err = addIssuerHandlers(parameters, ariesCtx, router)
if err != nil {
return nil
}
default:
return fmt.Errorf("invalid mode : %s", parameters.mode)
}
logger.Infof("starting %s adapter rest server on host %s", parameters.mode, parameters.hostURL)
return srv.ListenAndServe(parameters.hostURL, constructCORSHandler(router))
}
func addRPHandlers(
parameters *adapterRestParameters, ctx ariespai.CtxProvider, router *mux.Router, rootCAs *x509.CertPool) error {
presentationExProvider, err := presentationex.New(parameters.presentationDefinitionsFile)
if err != nil {
return err
}
hydraURL, err := url.Parse(parameters.hydraURL)
if err != nil {
return err
}
didClient, err := didexchange.New(ctx)
if err != nil {
return fmt.Errorf("failed to initialized didexchange client : %w", err)
}
presentProofClient, err := presentproof.New(ctx)
if err != nil {
return err
}
// TODO init OIDC stuff in iteration 2 - https://github.com/trustbloc/edge-adapter/issues/24
// add rp endpoints
rpService, err := rp.New(&rpops.Config{
PresentationExProvider: presentationExProvider,
Hydra: hydra.NewClient(hydraURL, rootCAs),
UIEndpoint: uiEndpoint,
DIDExchClient: didClient,
Store: memstore.NewProvider(),
PublicDIDCreator: did.NewTrustblocDIDCreator(
parameters.trustblocDomain,
parameters.didCommParameters.inboundHostExternal,
ctx.KMS(),
rootCAs),
AriesStorageProvider: ctx,
PresentProofClient: presentProofClient,
})
if err != nil {
return err
}
rpHandlers := rpService.GetOperations()
for _, handler := range rpHandlers {
router.HandleFunc(handler.Path(), handler.Handle()).Methods(handler.Method())
}
// static frontend
router.PathPrefix(uiEndpoint).
Subrouter().
Methods(http.MethodGet).
HandlerFunc(uiHandler(parameters.staticFiles, http.ServeFile))
return nil
}
func addIssuerHandlers(parameters *adapterRestParameters, ariesCtx ariespai.CtxProvider, router *mux.Router) error {
// add issuer endpoints
issuerService, err := issuer.New(&issuerops.Config{
AriesCtx: ariesCtx,
UIEndpoint: uiEndpoint,
// TODO https://github.com/trustbloc/edge-adapter/issues/42 use sql store
StoreProvider: memstore.NewProvider(),
})
if err != nil {
return err
}
rpHandlers := issuerService.GetOperations()
for _, handler := range rpHandlers {
router.HandleFunc(handler.Path(), handler.Handle()).Methods(handler.Method())
}
// static frontend
router.PathPrefix(uiEndpoint).
Subrouter().
Methods(http.MethodGet).
HandlerFunc(uiHandler(parameters.staticFiles, http.ServeFile))
return nil
}
func uiHandler(
basePath string,
fileServer func(http.ResponseWriter, *http.Request, string)) func(http.ResponseWriter, *http.Request) {
return func(w http.ResponseWriter, r *http.Request) {
if r.URL.Path == uiEndpoint {
fileServer(w, r, strings.ReplaceAll(basePath+"/index.html", "//", "/"))
return
}
fileServer(w, r, strings.ReplaceAll(basePath+"/"+r.URL.Path[len(uiEndpoint):], "//", "/"))
}
}
func constructCORSHandler(handler http.Handler) http.Handler {
return cors.New(
cors.Options{
AllowedMethods: []string{http.MethodGet, http.MethodPost},
AllowedHeaders: []string{"Origin", "Accept", "Content-Type", "X-Requested-With", "Authorization"},
},
).Handler(handler)
}
//nolint:deadcode,unused
func initDB(dsn string) (*sql.DB, error) {
const (
sleep = 1 * time.Second
numRetries = 30
)
var dbms *sql.DB
err := backoff.RetryNotify(
func() error {
var openErr error
dbms, openErr = dburl.Open(dsn)
return openErr
},
backoff.WithMaxRetries(backoff.NewConstantBackOff(sleep), numRetries),
func(retryErr error, t time.Duration) {
logger.Warnf(
"failed to connect to database, will sleep for %d before trying again : %s\n",
t, retryErr)
},
)
if err != nil {
return nil, fmt.Errorf("failed to connect to database at %s : %w", dsn, err)
}
return dbms, nil
}
func createAriesAgent(parameters *adapterRestParameters, tlsConfig *tls.Config) (*ariesctx.Provider, error) {
var opts []aries.Option
if parameters.didCommParameters.inboundHostInternal == "" {
return nil, errors.New("didcomm inbound host is mandatory")
}
if parameters.didCommParameters.dbPath != "" {
opts = append(opts, defaults.WithStorePath(parameters.didCommParameters.dbPath))
}
inboundTransportOpt := defaults.WithInboundHTTPAddr(parameters.didCommParameters.inboundHostInternal,
parameters.didCommParameters.inboundHostExternal)
opts = append(opts, inboundTransportOpt)
outbound, err := arieshttp.NewOutbound(arieshttp.WithOutboundTLSConfig(tlsConfig))
if err != nil {
return nil, fmt.Errorf("aries-framework - failed to create outbound tranpsort opts : %w", err)
}
opts = append(opts, aries.WithOutboundTransports(outbound))
framework, err := aries.New(opts...)
if err != nil {
return nil, fmt.Errorf("aries-framework - failed to initialize framework : %w", err)
}
ctx, err := framework.Context()
if err != nil | {
return nil, fmt.Errorf("aries-framework - failed to get aries context : %w", err)
} | conditional_block | |
start.go | oidcProviderURLFlagName, oidcProviderEnvKey, true)
if err != nil {
return nil, err
}
staticFiles, err := cmdutils.GetUserSetVarFromString(cmd, staticFilesPathFlagName, staticFilesPathEnvKey, true)
if err != nil {
return nil, err
}
mode, err := cmdutils.GetUserSetVarFromString(cmd, modeFlagName, modeEnvKey, true)
if err != nil {
return nil, err
}
presentationDefinitionsFile, err := cmdutils.GetUserSetVarFromString(cmd, presentationDefinitionsFlagName,
presentationDefinitionsEnvKey, mode != rpMode)
if err != nil {
return nil, err
}
hydraURL, err := cmdutils.GetUserSetVarFromString(cmd, hydraURLFlagName, hydraURLEnvKey, true)
if err != nil {
return nil, err
}
// didcomm
didCommParameters, err := getDIDCommParams(cmd)
if err != nil {
return nil, err
}
trustblocDomain, err := cmdutils.GetUserSetVarFromString(cmd, trustblocDomainFlagName, trustblocDomainEnvKey, true)
if err != nil {
return nil, err
}
return &adapterRestParameters{
hostURL: hostURL,
tlsSystemCertPool: tlsSystemCertPool,
tlsCACerts: tlsCACerts,
dsn: dsn,
oidcProviderURL: oidcURL,
staticFiles: staticFiles,
presentationDefinitionsFile: presentationDefinitionsFile,
hydraURL: hydraURL,
mode: mode,
didCommParameters: didCommParameters,
trustblocDomain: trustblocDomain,
}, nil
}
func getDIDCommParams(cmd *cobra.Command) (*didCommParameters, error) {
inboundHostInternal, err := cmdutils.GetUserSetVarFromString(cmd, didCommInboundHostFlagName,
didCommInboundHostEnvKey, true)
if err != nil {
return nil, err
}
inboundHostExternal, err := cmdutils.GetUserSetVarFromString(cmd, didCommInboundHostExternalFlagName,
didCommInboundHostExternalEnvKey, true)
if err != nil {
return nil, err
}
dbPath, err := cmdutils.GetUserSetVarFromString(cmd, didCommDBPathFlagName, didCommDBPathEnvKey, true)
if err != nil {
return nil, err
}
return &didCommParameters{
inboundHostInternal: inboundHostInternal,
inboundHostExternal: inboundHostExternal,
dbPath: dbPath,
}, nil
}
func getTLS(cmd *cobra.Command) (bool, []string, error) {
tlsSystemCertPoolString, err := cmdutils.GetUserSetVarFromString(cmd, tlsSystemCertPoolFlagName,
tlsSystemCertPoolEnvKey, true)
if err != nil {
return false, nil, err
}
tlsSystemCertPool := false
if tlsSystemCertPoolString != "" {
tlsSystemCertPool, err = strconv.ParseBool(tlsSystemCertPoolString)
if err != nil {
return false, nil, err
}
}
tlsCACerts, err := cmdutils.GetUserSetVarFromArrayString(cmd, tlsCACertsFlagName, tlsCACertsEnvKey, true)
if err != nil {
return false, nil, err
}
return tlsSystemCertPool, tlsCACerts, nil
}
func createFlags(startCmd *cobra.Command) {
startCmd.Flags().StringP(hostURLFlagName, hostURLFlagShorthand, "", hostURLFlagUsage)
startCmd.Flags().StringP(tlsSystemCertPoolFlagName, "", "", tlsSystemCertPoolFlagUsage)
startCmd.Flags().StringArrayP(tlsCACertsFlagName, "", []string{}, tlsCACertsFlagUsage)
startCmd.Flags().StringP(oidcProviderURLFlagName, "", "", oidcProviderURLFlagUsage)
startCmd.Flags().StringP(datasourceNameFlagName, "", "", datasourceNameFlagUsage)
startCmd.Flags().StringP(staticFilesPathFlagName, "", "", staticFilesPathFlagUsage)
startCmd.Flags().StringP(presentationDefinitionsFlagName, "", "", presentationDefinitionsFlagUsage)
startCmd.Flags().StringP(hydraURLFlagName, "", "", hydraURLFlagUsage)
startCmd.Flags().StringP(modeFlagName, "", "", modeFlagUsage)
// didcomm
startCmd.Flags().StringP(didCommInboundHostFlagName, "", "", didCommInboundHostFlagUsage)
startCmd.Flags().StringP(didCommInboundHostExternalFlagName, "", "", didCommInboundHostExternalFlagUsage)
startCmd.Flags().StringP(didCommDBPathFlagName, "", "", didCommDBPathFlagUsage)
startCmd.Flags().StringP(trustblocDomainFlagName, "", "", trustblocDomainFlagUsage)
}
func startAdapterService(parameters *adapterRestParameters, srv server) error {
rootCAs, err := tlsutils.GetCertPool(parameters.tlsSystemCertPool, parameters.tlsCACerts)
if err != nil {
return err
}
logger.Debugf("root ca's %v", rootCAs)
router := mux.NewRouter()
// add health check endpoint
healthCheckService := healthcheck.New()
healthCheckHandlers := healthCheckService.GetOperations()
for _, handler := range healthCheckHandlers {
router.HandleFunc(handler.Path(), handler.Handle()).Methods(handler.Method())
}
ariesCtx, err := createAriesAgent(parameters, &tls.Config{RootCAs: rootCAs})
if err != nil {
return err
}
// add endpoints
switch parameters.mode {
case rpMode:
err = addRPHandlers(parameters, ariesCtx, router, rootCAs)
if err != nil {
return nil
}
case issuerMode:
err = addIssuerHandlers(parameters, ariesCtx, router)
if err != nil {
return nil
}
default:
return fmt.Errorf("invalid mode : %s", parameters.mode)
}
logger.Infof("starting %s adapter rest server on host %s", parameters.mode, parameters.hostURL)
return srv.ListenAndServe(parameters.hostURL, constructCORSHandler(router))
}
func addRPHandlers(
parameters *adapterRestParameters, ctx ariespai.CtxProvider, router *mux.Router, rootCAs *x509.CertPool) error {
presentationExProvider, err := presentationex.New(parameters.presentationDefinitionsFile)
if err != nil {
return err
}
hydraURL, err := url.Parse(parameters.hydraURL)
if err != nil {
return err
}
didClient, err := didexchange.New(ctx)
if err != nil {
return fmt.Errorf("failed to initialized didexchange client : %w", err)
}
presentProofClient, err := presentproof.New(ctx)
if err != nil {
return err
}
// TODO init OIDC stuff in iteration 2 - https://github.com/trustbloc/edge-adapter/issues/24
// add rp endpoints
rpService, err := rp.New(&rpops.Config{
PresentationExProvider: presentationExProvider,
Hydra: hydra.NewClient(hydraURL, rootCAs),
UIEndpoint: uiEndpoint,
DIDExchClient: didClient,
Store: memstore.NewProvider(),
PublicDIDCreator: did.NewTrustblocDIDCreator(
parameters.trustblocDomain,
parameters.didCommParameters.inboundHostExternal,
ctx.KMS(),
rootCAs),
AriesStorageProvider: ctx,
PresentProofClient: presentProofClient,
})
if err != nil {
return err
}
rpHandlers := rpService.GetOperations()
for _, handler := range rpHandlers {
router.HandleFunc(handler.Path(), handler.Handle()).Methods(handler.Method())
}
// static frontend
router.PathPrefix(uiEndpoint).
Subrouter().
Methods(http.MethodGet).
HandlerFunc(uiHandler(parameters.staticFiles, http.ServeFile))
return nil
}
func addIssuerHandlers(parameters *adapterRestParameters, ariesCtx ariespai.CtxProvider, router *mux.Router) error {
// add issuer endpoints
issuerService, err := issuer.New(&issuerops.Config{
AriesCtx: ariesCtx,
UIEndpoint: uiEndpoint,
// TODO https://github.com/trustbloc/edge-adapter/issues/42 use sql store
StoreProvider: memstore.NewProvider(),
})
if err != nil {
return err
}
rpHandlers := issuerService.GetOperations()
for _, handler := range rpHandlers {
router.HandleFunc(handler.Path(), handler.Handle()).Methods(handler.Method())
}
// static frontend
router.PathPrefix(uiEndpoint).
Subrouter().
Methods(http.MethodGet).
HandlerFunc(uiHandler(parameters.staticFiles, http.ServeFile))
return nil
}
func uiHandler(
basePath string,
fileServer func(http.ResponseWriter, *http.Request, string)) func(http.ResponseWriter, *http.Request) {
return func(w http.ResponseWriter, r *http.Request) {
if r.URL.Path == uiEndpoint {
fileServer(w, r, strings.ReplaceAll(basePath+"/index.html", "//", "/"))
return
}
fileServer(w, r, strings.ReplaceAll(basePath+"/"+r.URL.Path[len(uiEndpoint):], "//", "/"))
}
}
func | constructCORSHandler | identifier_name | |
start.go | {}
// ListenAndServe starts the server using the standard Go HTTP server implementation.
func (s *HTTPServer) ListenAndServe(host string, router http.Handler) error {
return http.ListenAndServe(host, router)
}
// GetStartCmd returns the Cobra start command.
func GetStartCmd(srv server) *cobra.Command {
startCmd := createStartCmd(srv)
createFlags(startCmd)
return startCmd
}
func createStartCmd(srv server) *cobra.Command {
return &cobra.Command{
Use: "start",
Short: "Start adapter-rest",
Long: "Start adapter-rest inside the edge-adapter",
RunE: func(cmd *cobra.Command, args []string) error {
parameters, err := getAdapterRestParameters(cmd)
if err != nil {
return err
}
return startAdapterService(parameters, srv)
},
}
}
//nolint:funlen,gocyclo
func getAdapterRestParameters(cmd *cobra.Command) (*adapterRestParameters, error) {
hostURL, err := cmdutils.GetUserSetVarFromString(cmd, hostURLFlagName, hostURLEnvKey, false)
if err != nil {
return nil, err
}
tlsSystemCertPool, tlsCACerts, err := getTLS(cmd)
if err != nil {
return nil, err
}
dsn, err := cmdutils.GetUserSetVarFromString(cmd, datasourceNameFlagName, datasourceNameEnvKey, true)
if err != nil {
return nil, err
}
oidcURL, err := cmdutils.GetUserSetVarFromString(cmd, oidcProviderURLFlagName, oidcProviderEnvKey, true)
if err != nil {
return nil, err
}
staticFiles, err := cmdutils.GetUserSetVarFromString(cmd, staticFilesPathFlagName, staticFilesPathEnvKey, true)
if err != nil {
return nil, err
}
mode, err := cmdutils.GetUserSetVarFromString(cmd, modeFlagName, modeEnvKey, true)
if err != nil {
return nil, err
}
presentationDefinitionsFile, err := cmdutils.GetUserSetVarFromString(cmd, presentationDefinitionsFlagName,
presentationDefinitionsEnvKey, mode != rpMode)
if err != nil {
return nil, err
}
hydraURL, err := cmdutils.GetUserSetVarFromString(cmd, hydraURLFlagName, hydraURLEnvKey, true)
if err != nil {
return nil, err
}
// didcomm
didCommParameters, err := getDIDCommParams(cmd)
if err != nil {
return nil, err
}
trustblocDomain, err := cmdutils.GetUserSetVarFromString(cmd, trustblocDomainFlagName, trustblocDomainEnvKey, true)
if err != nil {
return nil, err
}
return &adapterRestParameters{
hostURL: hostURL,
tlsSystemCertPool: tlsSystemCertPool,
tlsCACerts: tlsCACerts,
dsn: dsn,
oidcProviderURL: oidcURL,
staticFiles: staticFiles,
presentationDefinitionsFile: presentationDefinitionsFile,
hydraURL: hydraURL,
mode: mode,
didCommParameters: didCommParameters,
trustblocDomain: trustblocDomain,
}, nil
}
func getDIDCommParams(cmd *cobra.Command) (*didCommParameters, error) {
inboundHostInternal, err := cmdutils.GetUserSetVarFromString(cmd, didCommInboundHostFlagName,
didCommInboundHostEnvKey, true)
if err != nil {
return nil, err
}
inboundHostExternal, err := cmdutils.GetUserSetVarFromString(cmd, didCommInboundHostExternalFlagName,
didCommInboundHostExternalEnvKey, true)
if err != nil {
return nil, err
}
dbPath, err := cmdutils.GetUserSetVarFromString(cmd, didCommDBPathFlagName, didCommDBPathEnvKey, true)
if err != nil {
return nil, err
}
return &didCommParameters{
inboundHostInternal: inboundHostInternal,
inboundHostExternal: inboundHostExternal,
dbPath: dbPath,
}, nil
}
func getTLS(cmd *cobra.Command) (bool, []string, error) {
tlsSystemCertPoolString, err := cmdutils.GetUserSetVarFromString(cmd, tlsSystemCertPoolFlagName,
tlsSystemCertPoolEnvKey, true)
if err != nil {
return false, nil, err
}
tlsSystemCertPool := false
if tlsSystemCertPoolString != "" {
tlsSystemCertPool, err = strconv.ParseBool(tlsSystemCertPoolString)
if err != nil {
return false, nil, err
}
}
tlsCACerts, err := cmdutils.GetUserSetVarFromArrayString(cmd, tlsCACertsFlagName, tlsCACertsEnvKey, true)
if err != nil {
return false, nil, err
}
return tlsSystemCertPool, tlsCACerts, nil
}
func createFlags(startCmd *cobra.Command) {
startCmd.Flags().StringP(hostURLFlagName, hostURLFlagShorthand, "", hostURLFlagUsage)
startCmd.Flags().StringP(tlsSystemCertPoolFlagName, "", "", tlsSystemCertPoolFlagUsage)
startCmd.Flags().StringArrayP(tlsCACertsFlagName, "", []string{}, tlsCACertsFlagUsage)
startCmd.Flags().StringP(oidcProviderURLFlagName, "", "", oidcProviderURLFlagUsage)
startCmd.Flags().StringP(datasourceNameFlagName, "", "", datasourceNameFlagUsage)
startCmd.Flags().StringP(staticFilesPathFlagName, "", "", staticFilesPathFlagUsage)
startCmd.Flags().StringP(presentationDefinitionsFlagName, "", "", presentationDefinitionsFlagUsage)
startCmd.Flags().StringP(hydraURLFlagName, "", "", hydraURLFlagUsage)
startCmd.Flags().StringP(modeFlagName, "", "", modeFlagUsage)
// didcomm
startCmd.Flags().StringP(didCommInboundHostFlagName, "", "", didCommInboundHostFlagUsage)
startCmd.Flags().StringP(didCommInboundHostExternalFlagName, "", "", didCommInboundHostExternalFlagUsage)
startCmd.Flags().StringP(didCommDBPathFlagName, "", "", didCommDBPathFlagUsage)
startCmd.Flags().StringP(trustblocDomainFlagName, "", "", trustblocDomainFlagUsage)
}
func startAdapterService(parameters *adapterRestParameters, srv server) error {
rootCAs, err := tlsutils.GetCertPool(parameters.tlsSystemCertPool, parameters.tlsCACerts)
if err != nil {
return err
}
logger.Debugf("root ca's %v", rootCAs)
router := mux.NewRouter()
// add health check endpoint
healthCheckService := healthcheck.New()
healthCheckHandlers := healthCheckService.GetOperations()
for _, handler := range healthCheckHandlers {
router.HandleFunc(handler.Path(), handler.Handle()).Methods(handler.Method())
}
ariesCtx, err := createAriesAgent(parameters, &tls.Config{RootCAs: rootCAs})
if err != nil {
return err
}
// add endpoints
switch parameters.mode {
case rpMode:
err = addRPHandlers(parameters, ariesCtx, router, rootCAs)
if err != nil {
return nil
}
case issuerMode:
err = addIssuerHandlers(parameters, ariesCtx, router)
if err != nil {
return nil
}
default:
return fmt.Errorf("invalid mode : %s", parameters.mode)
}
logger.Infof("starting %s adapter rest server on host %s", parameters.mode, parameters.hostURL)
return srv.ListenAndServe(parameters.hostURL, constructCORSHandler(router))
}
func addRPHandlers(
parameters *adapterRestParameters, ctx ariespai.CtxProvider, router *mux.Router, rootCAs *x509.CertPool) error {
presentationExProvider, err := presentationex.New(parameters.presentationDefinitionsFile)
if err != nil {
return err
}
hydraURL, err := url.Parse(parameters.hydraURL)
if err != nil {
return err
}
didClient, err := didexchange.New(ctx)
if err != nil {
return fmt.Errorf("failed to initialized didexchange client : %w", err)
}
presentProofClient, err := presentproof.New(ctx)
if err != nil {
return err
}
// TODO init OIDC stuff in iteration 2 - https://github.com/trustbloc/edge-adapter/issues/24
// add rp endpoints
rpService, err := rp.New(&rpops.Config{
PresentationExProvider: presentationExProvider,
Hydra: hydra.NewClient(hydraURL, rootCAs),
UIEndpoint: uiEndpoint,
DIDExchClient: didClient,
Store: memstore.NewProvider(),
PublicDIDCreator: did.NewTrustblocDIDCreator(
parameters.trustblocDomain,
parameters.didCommParameters.inboundHostExternal,
ctx.KMS(),
rootCAs),
AriesStorageProvider: ctx,
PresentProofClient: presentProofClient,
})
if err != nil {
return err
}
rpHandlers := rpService.GetOperations()
for _, handler := range rpHandlers { | router.HandleFunc(handler.Path(), handler.Handle()).Methods(handler.Method()) | random_line_split | |
parse.go | // Definitions in the given headers and definitions
// with the given name will not be added to the returned list of type definitions.
// We'll need to manually create these structures.
func parseGodotHeaders(
packagePath string,
constructorIndex ConstructorIndex,
methodIndex MethodIndex,
excludeHeaders, excludeStructs []string) GoTypeDefIndex {
var (
index = GoTypeDefIndex{}
relPath string
err error
godotHeaderPath = filepath.Join(packagePath, "godot_headers")
)
// Walk through all of the godot filename files
err = filepath.Walk(godotHeaderPath, func(path string, f os.FileInfo, err error) error {
if !f.IsDir() && filepath.Ext(path) == ".h" {
relPath, err = filepath.Rel(godotHeaderPath, path)
if err != nil {
panic(err)
}
// Read the filename
content, err := ioutil.ReadFile(path)
if err != nil {
panic(err)
}
// Find all of the type definitions in the filename file
// fmt.Println("Parsing File ", path, "...")
foundTypesLines := findTypeDefs(content)
// After extracting the lines, we can now parse the type definition to
// a structure that we can use to build a Go wrapper.
for _, foundTypeLines := range foundTypesLines {
typeDef := parseTypeDef(foundTypeLines, relPath)
typeDef.Constructors = constructorIndex[typeDef.CName]
typeDef.Methods = methodIndex[typeDef.CName]
// Only add the type if it's not in our exclude list.
if !strInSlice(typeDef.CName, excludeStructs) && !strInSlice(typeDef.CHeaderFilename, excludeHeaders) {
if tdMap, ok := index[relPath]; ok {
tdMap[typeDef.CName] = typeDef
} else {
index[relPath] = map[string]gdnativeapijson.GoTypeDef{
typeDef.CName: typeDef,
}
}
}
}
}
return nil
})
if err != nil {
panic(err)
}
return index
}
func | (typeLines []string, headerName string) gdnativeapijson.GoTypeDef {
// Create a structure for our type definition.
typeDef := gdnativeapijson.GoTypeDef{
CHeaderFilename: headerName,
Properties: []gdnativeapijson.GoProperty{},
}
// Small function for splitting a line to get the uncommented line and
// get the comment itself.
getComment := func(line string) (def, comment string) {
halves := strings.Split(line, "//")
def = halves[0]
if len(halves) > 1 {
comment = strings.TrimSpace(halves[1])
}
if strings.HasPrefix(comment, "/") {
comment = strings.Replace(comment, "/", "", 1)
}
return def, comment
}
// If the type definition is a single line, handle it a little differently
if len(typeLines) == 1 {
// Extract the comment if there is one.
line, comment := getComment(typeLines[0])
// Check to see if the property is a pointer type
if strings.Contains(line, "*") {
line = strings.Replace(line, "*", "", 1)
typeDef.IsPointer = true
}
var err error
// Get the words of the line
words := strings.Split(line, " ")
typeDef.CName = strings.Replace(words[len(words)-1], ";", "", 1)
goTypeName, usage := gdnativeapijson.ToGoTypeName(typeDef.CName)
typeDef.Name = goTypeName
typeDef.Base = words[len(words)-2]
typeDef.Comment = comment
typeDef.Usage = usage
if err != nil {
panic(fmt.Errorf("%s\n%w", line, err))
}
return typeDef
}
// Extract the name of the type.
lastLine := typeLines[len(typeLines)-1]
words := strings.Split(lastLine, " ")
typeDef.CName = strings.Replace(words[len(words)-1], ";", "", 1)
var err error
// Extract the base type
firstLine := typeLines[0]
words = strings.Split(firstLine, " ")
typeDef.Base = words[1]
if err != nil {
panic(fmt.Errorf("%s\n%w", strings.Join(typeLines, "\n"), err))
}
// Convert the name of the type to a Go name
typeDef.Name, _ = gdnativeapijson.ToGoTypeName(typeDef.CName)
if len(typeDef.Name) == 0 {
typeDef.Name = words[2]
}
// Extract the properties from the type
var properties []string
if strings.HasSuffix(strings.TrimSpace(firstLine), "{") {
properties = typeLines[1 : len(typeLines)-1]
} else {
properties = typeLines[2 : len(typeLines)-1]
}
var accumLines string
// Loop through each property line
for _, line := range properties {
if strings.HasPrefix(strings.TrimSpace(line), "//") || len(strings.TrimSpace(line)) == 0 {
continue
}
if !strings.Contains(line, ";") && typeDef.Base != "enum" {
accumLines += line
} else {
line = accumLines + line
accumLines = ""
}
// Skip function definitions
if strings.Contains(line, "(*") {
continue
}
// Create a type definition for the property
property := gdnativeapijson.GoProperty{}
// Extract the comment if there is one.
line, comment := getComment(line)
property.Comment = comment
// Sanitize the line
line = strings.TrimSpace(line)
line = strings.Split(line, ";")[0]
line = strings.Replace(line, "unsigned ", "u", 1)
line = strings.Replace(line, "const ", "", 1)
// Split the line by spaces
words = strings.Split(line, " ")
// Check to see if the line is just a comment
if words[0] == "//" || (strings.Index(line, "/*") == 0 && strings.Index(line, "*/") == (len(line)-2)) {
continue
}
// Set the property details
if typeDef.Base == "enum" {
// Strip any commas in the name
words[0] = strings.Replace(words[0], ",", "", 1)
property.CName = words[0]
property.Name = casee.ToPascalCase(strings.Replace(words[0], "GODOT_", "", 1))
} else {
if len(words) < 2 {
fmt.Println("Skipping irregular line:", line)
continue
}
property.Base = words[0]
property.CName = words[1]
property.Name = casee.ToPascalCase(strings.Replace(words[1], "godot_", "", 1))
}
// Check to see if the property is a pointer type
if strings.Contains(property.CName, "*") {
property.CName = strings.Replace(property.CName, "*", "", 1)
property.Name = strings.Replace(property.Name, "*", "", 1)
property.IsPointer = true
}
// Skip empty property names
if property.Name == "" {
continue
}
if strings.Contains(property.Name, "}") {
panic(fmt.Errorf("malformed Name: %+v", property))
}
// Append the property to the type definition
typeDef.Properties = append(typeDef.Properties, property)
}
return typeDef
}
type block int8
const (
externBlock block = iota
typedefBlock
localStructBlock
enumBlock
)
// findTypeDefs will return a list of type definition lines.
func findTypeDefs(content []byte) [][]string {
lines := strings.Split(string(content), "\n")
// Create a structure that will hold the lines that define the type.
var (
singleType []string
foundTypes [][]string
blocks []block
)
for i, line := range lines {
if strings.Index(line, "extern \"C\" {") == 0 {
// fmt.Println("Line", i ,": START EXTERN BLOCK")
blocks = append(blocks, externBlock)
continue
} else if strings.Index(line, "struct ") == 0 {
// fmt.Println("Line", i ,": START LOCAL STRUCT BLOCK")
blocks = append(blocks, localStructBlock)
continue
} else if strings.Index(line, "enum ") == 0 {
// fmt.Println("Line", i ,": START ENUM BLOCK")
blocks = append(blocks, enumBlock)
continue
} else if strings.Index(line, "}") == 0 {
if len(blocks) == 0 {
panic(fmt.Sprintln("\tLine", i, ": extra closing bracket encountered", line))
}
n := len(blocks)-1
b := blocks[n]
blocks = blocks[:n]
switch b {
case localStructBlock:
// fmt.Println("Line", i ,": END LOCAL STRUCT BLOCK")
continue
case externBlock:
| parseTypeDef | identifier_name |
parse.go | // Definitions in the given headers and definitions
// with the given name will not be added to the returned list of type definitions.
// We'll need to manually create these structures.
func parseGodotHeaders(
packagePath string,
constructorIndex ConstructorIndex,
methodIndex MethodIndex,
excludeHeaders, excludeStructs []string) GoTypeDefIndex {
var (
index = GoTypeDefIndex{}
relPath string
err error
godotHeaderPath = filepath.Join(packagePath, "godot_headers")
)
// Walk through all of the godot filename files
err = filepath.Walk(godotHeaderPath, func(path string, f os.FileInfo, err error) error {
if !f.IsDir() && filepath.Ext(path) == ".h" {
relPath, err = filepath.Rel(godotHeaderPath, path)
if err != nil {
panic(err)
}
// Read the filename
content, err := ioutil.ReadFile(path)
if err != nil |
// Find all of the type definitions in the filename file
// fmt.Println("Parsing File ", path, "...")
foundTypesLines := findTypeDefs(content)
// After extracting the lines, we can now parse the type definition to
// a structure that we can use to build a Go wrapper.
for _, foundTypeLines := range foundTypesLines {
typeDef := parseTypeDef(foundTypeLines, relPath)
typeDef.Constructors = constructorIndex[typeDef.CName]
typeDef.Methods = methodIndex[typeDef.CName]
// Only add the type if it's not in our exclude list.
if !strInSlice(typeDef.CName, excludeStructs) && !strInSlice(typeDef.CHeaderFilename, excludeHeaders) {
if tdMap, ok := index[relPath]; ok {
tdMap[typeDef.CName] = typeDef
} else {
index[relPath] = map[string]gdnativeapijson.GoTypeDef{
typeDef.CName: typeDef,
}
}
}
}
}
return nil
})
if err != nil {
panic(err)
}
return index
}
func parseTypeDef(typeLines []string, headerName string) gdnativeapijson.GoTypeDef {
// Create a structure for our type definition.
typeDef := gdnativeapijson.GoTypeDef{
CHeaderFilename: headerName,
Properties: []gdnativeapijson.GoProperty{},
}
// Small function for splitting a line to get the uncommented line and
// get the comment itself.
getComment := func(line string) (def, comment string) {
halves := strings.Split(line, "//")
def = halves[0]
if len(halves) > 1 {
comment = strings.TrimSpace(halves[1])
}
if strings.HasPrefix(comment, "/") {
comment = strings.Replace(comment, "/", "", 1)
}
return def, comment
}
// If the type definition is a single line, handle it a little differently
if len(typeLines) == 1 {
// Extract the comment if there is one.
line, comment := getComment(typeLines[0])
// Check to see if the property is a pointer type
if strings.Contains(line, "*") {
line = strings.Replace(line, "*", "", 1)
typeDef.IsPointer = true
}
var err error
// Get the words of the line
words := strings.Split(line, " ")
typeDef.CName = strings.Replace(words[len(words)-1], ";", "", 1)
goTypeName, usage := gdnativeapijson.ToGoTypeName(typeDef.CName)
typeDef.Name = goTypeName
typeDef.Base = words[len(words)-2]
typeDef.Comment = comment
typeDef.Usage = usage
if err != nil {
panic(fmt.Errorf("%s\n%w", line, err))
}
return typeDef
}
// Extract the name of the type.
lastLine := typeLines[len(typeLines)-1]
words := strings.Split(lastLine, " ")
typeDef.CName = strings.Replace(words[len(words)-1], ";", "", 1)
var err error
// Extract the base type
firstLine := typeLines[0]
words = strings.Split(firstLine, " ")
typeDef.Base = words[1]
if err != nil {
panic(fmt.Errorf("%s\n%w", strings.Join(typeLines, "\n"), err))
}
// Convert the name of the type to a Go name
typeDef.Name, _ = gdnativeapijson.ToGoTypeName(typeDef.CName)
if len(typeDef.Name) == 0 {
typeDef.Name = words[2]
}
// Extract the properties from the type
var properties []string
if strings.HasSuffix(strings.TrimSpace(firstLine), "{") {
properties = typeLines[1 : len(typeLines)-1]
} else {
properties = typeLines[2 : len(typeLines)-1]
}
var accumLines string
// Loop through each property line
for _, line := range properties {
if strings.HasPrefix(strings.TrimSpace(line), "//") || len(strings.TrimSpace(line)) == 0 {
continue
}
if !strings.Contains(line, ";") && typeDef.Base != "enum" {
accumLines += line
} else {
line = accumLines + line
accumLines = ""
}
// Skip function definitions
if strings.Contains(line, "(*") {
continue
}
// Create a type definition for the property
property := gdnativeapijson.GoProperty{}
// Extract the comment if there is one.
line, comment := getComment(line)
property.Comment = comment
// Sanitize the line
line = strings.TrimSpace(line)
line = strings.Split(line, ";")[0]
line = strings.Replace(line, "unsigned ", "u", 1)
line = strings.Replace(line, "const ", "", 1)
// Split the line by spaces
words = strings.Split(line, " ")
// Check to see if the line is just a comment
if words[0] == "//" || (strings.Index(line, "/*") == 0 && strings.Index(line, "*/") == (len(line)-2)) {
continue
}
// Set the property details
if typeDef.Base == "enum" {
// Strip any commas in the name
words[0] = strings.Replace(words[0], ",", "", 1)
property.CName = words[0]
property.Name = casee.ToPascalCase(strings.Replace(words[0], "GODOT_", "", 1))
} else {
if len(words) < 2 {
fmt.Println("Skipping irregular line:", line)
continue
}
property.Base = words[0]
property.CName = words[1]
property.Name = casee.ToPascalCase(strings.Replace(words[1], "godot_", "", 1))
}
// Check to see if the property is a pointer type
if strings.Contains(property.CName, "*") {
property.CName = strings.Replace(property.CName, "*", "", 1)
property.Name = strings.Replace(property.Name, "*", "", 1)
property.IsPointer = true
}
// Skip empty property names
if property.Name == "" {
continue
}
if strings.Contains(property.Name, "}") {
panic(fmt.Errorf("malformed Name: %+v", property))
}
// Append the property to the type definition
typeDef.Properties = append(typeDef.Properties, property)
}
return typeDef
}
type block int8
const (
externBlock block = iota
typedefBlock
localStructBlock
enumBlock
)
// findTypeDefs will return a list of type definition lines.
func findTypeDefs(content []byte) [][]string {
lines := strings.Split(string(content), "\n")
// Create a structure that will hold the lines that define the type.
var (
singleType []string
foundTypes [][]string
blocks []block
)
for i, line := range lines {
if strings.Index(line, "extern \"C\" {") == 0 {
// fmt.Println("Line", i ,": START EXTERN BLOCK")
blocks = append(blocks, externBlock)
continue
} else if strings.Index(line, "struct ") == 0 {
// fmt.Println("Line", i ,": START LOCAL STRUCT BLOCK")
blocks = append(blocks, localStructBlock)
continue
} else if strings.Index(line, "enum ") == 0 {
// fmt.Println("Line", i ,": START ENUM BLOCK")
blocks = append(blocks, enumBlock)
continue
} else if strings.Index(line, "}") == 0 {
if len(blocks) == 0 {
panic(fmt.Sprintln("\tLine", i, ": extra closing bracket encountered", line))
}
n := len(blocks)-1
b := blocks[n]
blocks = blocks[:n]
switch b {
case localStructBlock:
// fmt.Println("Line", i ,": END LOCAL STRUCT BLOCK")
continue
case externBlock:
| {
panic(err)
} | conditional_block |
parse.go | // Definitions in the given headers and definitions
// with the given name will not be added to the returned list of type definitions.
// We'll need to manually create these structures.
func parseGodotHeaders(
packagePath string,
constructorIndex ConstructorIndex,
methodIndex MethodIndex,
excludeHeaders, excludeStructs []string) GoTypeDefIndex {
var (
index = GoTypeDefIndex{}
relPath string
err error
godotHeaderPath = filepath.Join(packagePath, "godot_headers")
)
// Walk through all of the godot filename files
err = filepath.Walk(godotHeaderPath, func(path string, f os.FileInfo, err error) error {
if !f.IsDir() && filepath.Ext(path) == ".h" {
relPath, err = filepath.Rel(godotHeaderPath, path)
if err != nil {
panic(err)
}
// Read the filename
content, err := ioutil.ReadFile(path)
if err != nil {
panic(err)
}
// Find all of the type definitions in the filename file
// fmt.Println("Parsing File ", path, "...")
foundTypesLines := findTypeDefs(content)
// After extracting the lines, we can now parse the type definition to
// a structure that we can use to build a Go wrapper.
for _, foundTypeLines := range foundTypesLines {
typeDef := parseTypeDef(foundTypeLines, relPath)
typeDef.Constructors = constructorIndex[typeDef.CName]
typeDef.Methods = methodIndex[typeDef.CName]
| tdMap[typeDef.CName] = typeDef
} else {
index[relPath] = map[string]gdnativeapijson.GoTypeDef{
typeDef.CName: typeDef,
}
}
}
}
}
return nil
})
if err != nil {
panic(err)
}
return index
}
func parseTypeDef(typeLines []string, headerName string) gdnativeapijson.GoTypeDef {
// Create a structure for our type definition.
typeDef := gdnativeapijson.GoTypeDef{
CHeaderFilename: headerName,
Properties: []gdnativeapijson.GoProperty{},
}
// Small function for splitting a line to get the uncommented line and
// get the comment itself.
getComment := func(line string) (def, comment string) {
halves := strings.Split(line, "//")
def = halves[0]
if len(halves) > 1 {
comment = strings.TrimSpace(halves[1])
}
if strings.HasPrefix(comment, "/") {
comment = strings.Replace(comment, "/", "", 1)
}
return def, comment
}
// If the type definition is a single line, handle it a little differently
if len(typeLines) == 1 {
// Extract the comment if there is one.
line, comment := getComment(typeLines[0])
// Check to see if the property is a pointer type
if strings.Contains(line, "*") {
line = strings.Replace(line, "*", "", 1)
typeDef.IsPointer = true
}
var err error
// Get the words of the line
words := strings.Split(line, " ")
typeDef.CName = strings.Replace(words[len(words)-1], ";", "", 1)
goTypeName, usage := gdnativeapijson.ToGoTypeName(typeDef.CName)
typeDef.Name = goTypeName
typeDef.Base = words[len(words)-2]
typeDef.Comment = comment
typeDef.Usage = usage
if err != nil {
panic(fmt.Errorf("%s\n%w", line, err))
}
return typeDef
}
// Extract the name of the type.
lastLine := typeLines[len(typeLines)-1]
words := strings.Split(lastLine, " ")
typeDef.CName = strings.Replace(words[len(words)-1], ";", "", 1)
var err error
// Extract the base type
firstLine := typeLines[0]
words = strings.Split(firstLine, " ")
typeDef.Base = words[1]
if err != nil {
panic(fmt.Errorf("%s\n%w", strings.Join(typeLines, "\n"), err))
}
// Convert the name of the type to a Go name
typeDef.Name, _ = gdnativeapijson.ToGoTypeName(typeDef.CName)
if len(typeDef.Name) == 0 {
typeDef.Name = words[2]
}
// Extract the properties from the type
var properties []string
if strings.HasSuffix(strings.TrimSpace(firstLine), "{") {
properties = typeLines[1 : len(typeLines)-1]
} else {
properties = typeLines[2 : len(typeLines)-1]
}
var accumLines string
// Loop through each property line
for _, line := range properties {
if strings.HasPrefix(strings.TrimSpace(line), "//") || len(strings.TrimSpace(line)) == 0 {
continue
}
if !strings.Contains(line, ";") && typeDef.Base != "enum" {
accumLines += line
} else {
line = accumLines + line
accumLines = ""
}
// Skip function definitions
if strings.Contains(line, "(*") {
continue
}
// Create a type definition for the property
property := gdnativeapijson.GoProperty{}
// Extract the comment if there is one.
line, comment := getComment(line)
property.Comment = comment
// Sanitize the line
line = strings.TrimSpace(line)
line = strings.Split(line, ";")[0]
line = strings.Replace(line, "unsigned ", "u", 1)
line = strings.Replace(line, "const ", "", 1)
// Split the line by spaces
words = strings.Split(line, " ")
// Check to see if the line is just a comment
if words[0] == "//" || (strings.Index(line, "/*") == 0 && strings.Index(line, "*/") == (len(line)-2)) {
continue
}
// Set the property details
if typeDef.Base == "enum" {
// Strip any commas in the name
words[0] = strings.Replace(words[0], ",", "", 1)
property.CName = words[0]
property.Name = casee.ToPascalCase(strings.Replace(words[0], "GODOT_", "", 1))
} else {
if len(words) < 2 {
fmt.Println("Skipping irregular line:", line)
continue
}
property.Base = words[0]
property.CName = words[1]
property.Name = casee.ToPascalCase(strings.Replace(words[1], "godot_", "", 1))
}
// Check to see if the property is a pointer type
if strings.Contains(property.CName, "*") {
property.CName = strings.Replace(property.CName, "*", "", 1)
property.Name = strings.Replace(property.Name, "*", "", 1)
property.IsPointer = true
}
// Skip empty property names
if property.Name == "" {
continue
}
if strings.Contains(property.Name, "}") {
panic(fmt.Errorf("malformed Name: %+v", property))
}
// Append the property to the type definition
typeDef.Properties = append(typeDef.Properties, property)
}
return typeDef
}
type block int8
const (
externBlock block = iota
typedefBlock
localStructBlock
enumBlock
)
// findTypeDefs will return a list of type definition lines.
func findTypeDefs(content []byte) [][]string {
lines := strings.Split(string(content), "\n")
// Create a structure that will hold the lines that define the type.
var (
singleType []string
foundTypes [][]string
blocks []block
)
for i, line := range lines {
if strings.Index(line, "extern \"C\" {") == 0 {
// fmt.Println("Line", i ,": START EXTERN BLOCK")
blocks = append(blocks, externBlock)
continue
} else if strings.Index(line, "struct ") == 0 {
// fmt.Println("Line", i ,": START LOCAL STRUCT BLOCK")
blocks = append(blocks, localStructBlock)
continue
} else if strings.Index(line, "enum ") == 0 {
// fmt.Println("Line", i ,": START ENUM BLOCK")
blocks = append(blocks, enumBlock)
continue
} else if strings.Index(line, "}") == 0 {
if len(blocks) == 0 {
panic(fmt.Sprintln("\tLine", i, ": extra closing bracket encountered", line))
}
n := len(blocks)-1
b := blocks[n]
blocks = blocks[:n]
switch b {
case localStructBlock:
// fmt.Println("Line", i ,": END LOCAL STRUCT BLOCK")
continue
case externBlock:
// | // Only add the type if it's not in our exclude list.
if !strInSlice(typeDef.CName, excludeStructs) && !strInSlice(typeDef.CHeaderFilename, excludeHeaders) {
if tdMap, ok := index[relPath]; ok { | random_line_split |
parse.go | // Definitions in the given headers and definitions
// with the given name will not be added to the returned list of type definitions.
// We'll need to manually create these structures.
func parseGodotHeaders(
packagePath string,
constructorIndex ConstructorIndex,
methodIndex MethodIndex,
excludeHeaders, excludeStructs []string) GoTypeDefIndex {
var (
index = GoTypeDefIndex{}
relPath string
err error
godotHeaderPath = filepath.Join(packagePath, "godot_headers")
)
// Walk through all of the godot filename files
err = filepath.Walk(godotHeaderPath, func(path string, f os.FileInfo, err error) error {
if !f.IsDir() && filepath.Ext(path) == ".h" {
relPath, err = filepath.Rel(godotHeaderPath, path)
if err != nil {
panic(err)
}
// Read the filename
content, err := ioutil.ReadFile(path)
if err != nil {
panic(err)
}
// Find all of the type definitions in the filename file
// fmt.Println("Parsing File ", path, "...")
foundTypesLines := findTypeDefs(content)
// After extracting the lines, we can now parse the type definition to
// a structure that we can use to build a Go wrapper.
for _, foundTypeLines := range foundTypesLines {
typeDef := parseTypeDef(foundTypeLines, relPath)
typeDef.Constructors = constructorIndex[typeDef.CName]
typeDef.Methods = methodIndex[typeDef.CName]
// Only add the type if it's not in our exclude list.
if !strInSlice(typeDef.CName, excludeStructs) && !strInSlice(typeDef.CHeaderFilename, excludeHeaders) {
if tdMap, ok := index[relPath]; ok {
tdMap[typeDef.CName] = typeDef
} else {
index[relPath] = map[string]gdnativeapijson.GoTypeDef{
typeDef.CName: typeDef,
}
}
}
}
}
return nil
})
if err != nil {
panic(err)
}
return index
}
func parseTypeDef(typeLines []string, headerName string) gdnativeapijson.GoTypeDef | }
// If the type definition is a single line, handle it a little differently
if len(typeLines) == 1 {
// Extract the comment if there is one.
line, comment := getComment(typeLines[0])
// Check to see if the property is a pointer type
if strings.Contains(line, "*") {
line = strings.Replace(line, "*", "", 1)
typeDef.IsPointer = true
}
var err error
// Get the words of the line
words := strings.Split(line, " ")
typeDef.CName = strings.Replace(words[len(words)-1], ";", "", 1)
goTypeName, usage := gdnativeapijson.ToGoTypeName(typeDef.CName)
typeDef.Name = goTypeName
typeDef.Base = words[len(words)-2]
typeDef.Comment = comment
typeDef.Usage = usage
if err != nil {
panic(fmt.Errorf("%s\n%w", line, err))
}
return typeDef
}
// Extract the name of the type.
lastLine := typeLines[len(typeLines)-1]
words := strings.Split(lastLine, " ")
typeDef.CName = strings.Replace(words[len(words)-1], ";", "", 1)
var err error
// Extract the base type
firstLine := typeLines[0]
words = strings.Split(firstLine, " ")
typeDef.Base = words[1]
if err != nil {
panic(fmt.Errorf("%s\n%w", strings.Join(typeLines, "\n"), err))
}
// Convert the name of the type to a Go name
typeDef.Name, _ = gdnativeapijson.ToGoTypeName(typeDef.CName)
if len(typeDef.Name) == 0 {
typeDef.Name = words[2]
}
// Extract the properties from the type
var properties []string
if strings.HasSuffix(strings.TrimSpace(firstLine), "{") {
properties = typeLines[1 : len(typeLines)-1]
} else {
properties = typeLines[2 : len(typeLines)-1]
}
var accumLines string
// Loop through each property line
for _, line := range properties {
if strings.HasPrefix(strings.TrimSpace(line), "//") || len(strings.TrimSpace(line)) == 0 {
continue
}
if !strings.Contains(line, ";") && typeDef.Base != "enum" {
accumLines += line
} else {
line = accumLines + line
accumLines = ""
}
// Skip function definitions
if strings.Contains(line, "(*") {
continue
}
// Create a type definition for the property
property := gdnativeapijson.GoProperty{}
// Extract the comment if there is one.
line, comment := getComment(line)
property.Comment = comment
// Sanitize the line
line = strings.TrimSpace(line)
line = strings.Split(line, ";")[0]
line = strings.Replace(line, "unsigned ", "u", 1)
line = strings.Replace(line, "const ", "", 1)
// Split the line by spaces
words = strings.Split(line, " ")
// Check to see if the line is just a comment
if words[0] == "//" || (strings.Index(line, "/*") == 0 && strings.Index(line, "*/") == (len(line)-2)) {
continue
}
// Set the property details
if typeDef.Base == "enum" {
// Strip any commas in the name
words[0] = strings.Replace(words[0], ",", "", 1)
property.CName = words[0]
property.Name = casee.ToPascalCase(strings.Replace(words[0], "GODOT_", "", 1))
} else {
if len(words) < 2 {
fmt.Println("Skipping irregular line:", line)
continue
}
property.Base = words[0]
property.CName = words[1]
property.Name = casee.ToPascalCase(strings.Replace(words[1], "godot_", "", 1))
}
// Check to see if the property is a pointer type
if strings.Contains(property.CName, "*") {
property.CName = strings.Replace(property.CName, "*", "", 1)
property.Name = strings.Replace(property.Name, "*", "", 1)
property.IsPointer = true
}
// Skip empty property names
if property.Name == "" {
continue
}
if strings.Contains(property.Name, "}") {
panic(fmt.Errorf("malformed Name: %+v", property))
}
// Append the property to the type definition
typeDef.Properties = append(typeDef.Properties, property)
}
return typeDef
}
type block int8
const (
externBlock block = iota
typedefBlock
localStructBlock
enumBlock
)
// findTypeDefs will return a list of type definition lines.
func findTypeDefs(content []byte) [][]string {
lines := strings.Split(string(content), "\n")
// Create a structure that will hold the lines that define the type.
var (
singleType []string
foundTypes [][]string
blocks []block
)
for i, line := range lines {
if strings.Index(line, "extern \"C\" {") == 0 {
// fmt.Println("Line", i ,": START EXTERN BLOCK")
blocks = append(blocks, externBlock)
continue
} else if strings.Index(line, "struct ") == 0 {
// fmt.Println("Line", i ,": START LOCAL STRUCT BLOCK")
blocks = append(blocks, localStructBlock)
continue
} else if strings.Index(line, "enum ") == 0 {
// fmt.Println("Line", i ,": START ENUM BLOCK")
blocks = append(blocks, enumBlock)
continue
} else if strings.Index(line, "}") == 0 {
if len(blocks) == 0 {
panic(fmt.Sprintln("\tLine", i, ": extra closing bracket encountered", line))
}
n := len(blocks)-1
b := blocks[n]
blocks = blocks[:n]
switch b {
case localStructBlock:
// fmt.Println("Line", i ,": END LOCAL STRUCT BLOCK")
continue
case externBlock:
// | {
// Create a structure for our type definition.
typeDef := gdnativeapijson.GoTypeDef{
CHeaderFilename: headerName,
Properties: []gdnativeapijson.GoProperty{},
}
// Small function for splitting a line to get the uncommented line and
// get the comment itself.
getComment := func(line string) (def, comment string) {
halves := strings.Split(line, "//")
def = halves[0]
if len(halves) > 1 {
comment = strings.TrimSpace(halves[1])
}
if strings.HasPrefix(comment, "/") {
comment = strings.Replace(comment, "/", "", 1)
}
return def, comment | identifier_body |
weapon.rs | ,
WeaponKind::PlasmaRifle => 2
}
}
pub fn new(id: u32) -> Result<Self, String> {
match id {
0 => Ok(WeaponKind::M4),
1 => Ok(WeaponKind::Ak47),
2 => Ok(WeaponKind::PlasmaRifle),
_ => return Err(format!("unknown weapon kind {}", id))
}
}
}
pub struct Weapon {
self_handle: Handle<Weapon>,
kind: WeaponKind,
model: Handle<Node>,
laser_dot: Handle<Node>,
shot_point: Handle<Node>,
offset: Vec3,
dest_offset: Vec3,
last_shot_time: f64,
shot_position: Vec3,
owner: Handle<Actor>,
ammo: u32,
definition: &'static WeaponDefinition,
}
pub struct WeaponDefinition {
model: &'static str,
shot_sound: &'static str,
ammo: u32,
}
impl HandleFromSelf<Weapon> for Weapon {
fn self_handle(&self) -> Handle<Weapon> {
self.self_handle
}
}
impl Default for Weapon {
fn default() -> Self {
Self {
self_handle: Default::default(),
kind: WeaponKind::M4,
laser_dot: Handle::NONE,
model: Handle::NONE,
offset: Vec3::ZERO,
shot_point: Handle::NONE,
dest_offset: Vec3::ZERO,
last_shot_time: 0.0,
shot_position: Vec3::ZERO,
owner: Handle::NONE,
ammo: 250,
definition: Self::get_definition(WeaponKind::M4),
}
}
}
impl Visit for Weapon {
fn visit(&mut self, name: &str, visitor: &mut Visitor) -> VisitResult {
visitor.enter_region(name)?;
let mut kind_id = self.kind.id();
kind_id.visit("KindId", visitor)?;
if visitor.is_reading() {
self.kind = WeaponKind::new(kind_id)?
}
self.definition = Self::get_definition(self.kind);
self.self_handle.visit("SelfHandle", visitor)?;
self.model.visit("Model", visitor)?;
self.laser_dot.visit("LaserDot", visitor)?;
self.offset.visit("Offset", visitor)?;
self.dest_offset.visit("DestOffset", visitor)?;
self.last_shot_time.visit("LastShotTime", visitor)?;
self.owner.visit("Owner", visitor)?;
self.ammo.visit("Ammo", visitor)?;
visitor.leave_region()
}
}
impl Weapon {
pub fn get_definition(kind: WeaponKind) -> &'static WeaponDefinition {
match kind {
WeaponKind::M4 => {
static DEFINITION: WeaponDefinition = WeaponDefinition {
model: "data/models/m4.FBX",
shot_sound: "data/sounds/m4_shot.wav",
ammo: 115,
};
&DEFINITION
}
WeaponKind::Ak47 => {
static DEFINITION: WeaponDefinition = WeaponDefinition {
model: "data/models/ak47.FBX",
shot_sound: "data/sounds/m4_shot.wav",
ammo: 100,
};
&DEFINITION
}
WeaponKind::PlasmaRifle => {
static DEFINITION: WeaponDefinition = WeaponDefinition {
model: "data/models/plasma_rifle.FBX",
shot_sound: "data/sounds/plasma_shot.wav",
ammo: 40,
};
&DEFINITION
}
}
}
pub fn new(kind: WeaponKind, resource_manager: &mut ResourceManager, scene: &mut Scene) -> Weapon {
let definition = Self::get_definition(kind);
let model = Model::instantiate(
resource_manager.request_model(Path::new(definition.model)).unwrap(), scene).root;
let SceneInterfaceMut { graph, .. } = scene.interface_mut();
let laser_dot = graph.add_node(Node::Light(
LightBuilder::new(LightKind::Point(PointLight::new(0.5)), BaseBuilder::new())
.with_color(Color::opaque(255, 0, 0))
.cast_shadows(false)
.build()));
let shot_point = graph.find_by_name(model, "Weapon:ShotPoint");
if shot_point.is_none() {
println!("Shot point not found!");
}
Weapon {
kind,
laser_dot,
model,
shot_point,
definition,
ammo: definition.ammo,
..Default::default()
}
}
pub fn set_visibility(&self, visibility: bool, graph: &mut Graph) {
graph.get_mut(self.model).base_mut().set_visibility(visibility);
graph.get_mut(self.laser_dot).base_mut().set_visibility(visibility);
}
pub fn get_model(&self) -> Handle<Node> {
self.model
}
pub fn update(&mut self, scene: &mut Scene) {
let SceneInterfaceMut { graph, physics, .. } = scene.interface_mut();
self.offset.follow(&self.dest_offset, 0.2);
self.update_laser_sight(graph, physics);
let node = graph.get_mut(self.model);
node.base_mut().get_local_transform_mut().set_position(self.offset);
self.shot_position = node.base().get_global_position();
}
fn get_shot_position(&self, graph: &Graph) -> Vec3 {
if self.shot_point.is_some() {
graph.get(self.shot_point).base().get_global_position()
} else {
// Fallback
graph.get(self.model).base().get_global_position()
}
}
pub fn get_kind(&self) -> WeaponKind {
self.kind
}
pub fn add_ammo(&mut self, amount: u32) {
self.ammo += amount;
}
fn update_laser_sight(&self, graph: &mut Graph, physics: &Physics) {
let mut laser_dot_position = Vec3::ZERO;
let model = graph.get(self.model);
let begin = model.base().get_global_position();
let end = begin + model.base().get_look_vector().scale(100.0);
if let Some(ray) = Ray::from_two_points(&begin, &end) {
let mut result = Vec::new();
if physics.ray_cast(&ray, RayCastOptions::default(), &mut result) {
let offset = result[0].normal.normalized().unwrap_or_default().scale(0.2);
laser_dot_position = result[0].position + offset;
}
}
graph.get_mut(self.laser_dot).base_mut().get_local_transform_mut().set_position(laser_dot_position);
}
fn play_shot_sound(&self, resource_manager: &mut ResourceManager, sound_context: Arc<Mutex<Context>>) {
let mut sound_context = sound_context.lock().unwrap();
let shot_buffer = resource_manager.request_sound_buffer(
Path::new(self.definition.shot_sound), BufferKind::Normal).unwrap();
let mut shot_sound = Source::new_spatial(shot_buffer).unwrap();
shot_sound.set_play_once(true);
shot_sound.play();
shot_sound.as_spatial_mut().set_position(&self.shot_position);
sound_context.add_source(shot_sound);
}
pub fn get_ammo(&self) -> u32 {
self.ammo
}
pub fn get_owner(&self) -> Handle<Actor> {
self.owner
}
pub fn set_owner(&mut self, owner: Handle<Actor>) {
self.owner = owner;
}
pub fn try_shoot(&mut self,
scene: &mut Scene,
resource_manager: &mut ResourceManager,
sound_context: Arc<Mutex<Context>>,
time: GameTime,
weapon_velocity: Vec3) -> Option<Projectile> {
if self.ammo != 0 && time.elapsed - self.last_shot_time >= 0.1 {
self.ammo -= 1;
self.offset = Vec3::new(0.0, 0.0, -0.05);
self.last_shot_time = time.elapsed;
self.play_shot_sound(resource_manager, sound_context);
let (dir, pos) = {
let graph = scene.interface().graph;
(graph.get(self.model).base().get_look_vector(), self.get_shot_position(graph))
};
match self.kind {
WeaponKind::M4 | WeaponKind::Ak47 => {
Some(Projectile::new(ProjectileKind::Bullet, resource_manager, scene,
dir, pos, self.self_handle, weapon_velocity))
}
WeaponKind::PlasmaRifle => {
Some(Projectile::new(ProjectileKind::Plasma, resource_manager, scene,
dir, pos, self.self_handle, weapon_velocity))
}
}
} else {
None
}
}
}
impl CleanUp for Weapon {
fn clean_up(&mut self, scene: &mut Scene) {
let SceneInterfaceMut { graph, .. } = scene.interface_mut();
graph.remove_node(self.model);
graph.remove_node(self.laser_dot);
}
}
pub struct WeaponContainer {
pool: Pool<Weapon>
}
impl WeaponContainer {
pub fn new() -> Self {
Self {
pool: Pool::new()
}
}
pub fn add(&mut self, weapon: Weapon) -> Handle<Weapon> {
let handle = self.pool.spawn(weapon);
self.pool.borrow_mut(handle).self_handle = handle;
handle
}
pub fn | iter | identifier_name | |
weapon.rs | Light,
},
base::{BaseBuilder, AsBase},
},
core::{
pool::{
Pool,
PoolIterator,
PoolIteratorMut,
Handle,
},
color::Color,
visitor::{
Visit,
VisitResult,
Visitor,
},
math::{vec3::Vec3, ray::Ray},
},
};
#[derive(Copy, Clone, PartialEq, Eq)]
pub enum WeaponKind {
M4,
Ak47,
PlasmaRifle,
}
impl WeaponKind {
pub fn id(&self) -> u32 {
match self {
WeaponKind::M4 => 0,
WeaponKind::Ak47 => 1,
WeaponKind::PlasmaRifle => 2
}
}
pub fn new(id: u32) -> Result<Self, String> {
match id {
0 => Ok(WeaponKind::M4),
1 => Ok(WeaponKind::Ak47),
2 => Ok(WeaponKind::PlasmaRifle),
_ => return Err(format!("unknown weapon kind {}", id))
}
}
}
pub struct Weapon {
self_handle: Handle<Weapon>,
kind: WeaponKind,
model: Handle<Node>,
laser_dot: Handle<Node>,
shot_point: Handle<Node>,
offset: Vec3,
dest_offset: Vec3,
last_shot_time: f64,
shot_position: Vec3,
owner: Handle<Actor>,
ammo: u32,
definition: &'static WeaponDefinition,
}
pub struct WeaponDefinition {
model: &'static str,
shot_sound: &'static str,
ammo: u32,
}
impl HandleFromSelf<Weapon> for Weapon {
fn self_handle(&self) -> Handle<Weapon> {
self.self_handle
}
}
impl Default for Weapon {
fn default() -> Self {
Self {
self_handle: Default::default(),
kind: WeaponKind::M4,
laser_dot: Handle::NONE,
model: Handle::NONE,
offset: Vec3::ZERO,
shot_point: Handle::NONE,
dest_offset: Vec3::ZERO,
last_shot_time: 0.0,
shot_position: Vec3::ZERO,
owner: Handle::NONE,
ammo: 250,
definition: Self::get_definition(WeaponKind::M4),
}
}
}
impl Visit for Weapon {
fn visit(&mut self, name: &str, visitor: &mut Visitor) -> VisitResult {
visitor.enter_region(name)?;
let mut kind_id = self.kind.id();
kind_id.visit("KindId", visitor)?;
if visitor.is_reading() {
self.kind = WeaponKind::new(kind_id)?
}
self.definition = Self::get_definition(self.kind);
self.self_handle.visit("SelfHandle", visitor)?;
self.model.visit("Model", visitor)?;
self.laser_dot.visit("LaserDot", visitor)?;
self.offset.visit("Offset", visitor)?;
self.dest_offset.visit("DestOffset", visitor)?;
self.last_shot_time.visit("LastShotTime", visitor)?;
self.owner.visit("Owner", visitor)?;
self.ammo.visit("Ammo", visitor)?;
visitor.leave_region()
}
}
impl Weapon {
pub fn get_definition(kind: WeaponKind) -> &'static WeaponDefinition {
match kind {
WeaponKind::M4 => {
static DEFINITION: WeaponDefinition = WeaponDefinition {
model: "data/models/m4.FBX",
shot_sound: "data/sounds/m4_shot.wav",
ammo: 115,
};
&DEFINITION
}
WeaponKind::Ak47 => {
static DEFINITION: WeaponDefinition = WeaponDefinition {
model: "data/models/ak47.FBX",
shot_sound: "data/sounds/m4_shot.wav",
ammo: 100,
};
&DEFINITION
}
WeaponKind::PlasmaRifle => {
static DEFINITION: WeaponDefinition = WeaponDefinition {
model: "data/models/plasma_rifle.FBX",
shot_sound: "data/sounds/plasma_shot.wav",
ammo: 40,
};
&DEFINITION
}
}
}
pub fn new(kind: WeaponKind, resource_manager: &mut ResourceManager, scene: &mut Scene) -> Weapon {
let definition = Self::get_definition(kind);
let model = Model::instantiate(
resource_manager.request_model(Path::new(definition.model)).unwrap(), scene).root;
let SceneInterfaceMut { graph, .. } = scene.interface_mut();
let laser_dot = graph.add_node(Node::Light(
LightBuilder::new(LightKind::Point(PointLight::new(0.5)), BaseBuilder::new())
.with_color(Color::opaque(255, 0, 0))
.cast_shadows(false)
.build()));
let shot_point = graph.find_by_name(model, "Weapon:ShotPoint");
if shot_point.is_none() {
println!("Shot point not found!");
}
Weapon {
kind,
laser_dot,
model,
shot_point,
definition,
ammo: definition.ammo,
..Default::default()
}
}
pub fn set_visibility(&self, visibility: bool, graph: &mut Graph) {
graph.get_mut(self.model).base_mut().set_visibility(visibility);
graph.get_mut(self.laser_dot).base_mut().set_visibility(visibility);
}
pub fn get_model(&self) -> Handle<Node> {
self.model
}
pub fn update(&mut self, scene: &mut Scene) {
let SceneInterfaceMut { graph, physics, .. } = scene.interface_mut();
self.offset.follow(&self.dest_offset, 0.2);
self.update_laser_sight(graph, physics);
let node = graph.get_mut(self.model);
node.base_mut().get_local_transform_mut().set_position(self.offset);
self.shot_position = node.base().get_global_position();
}
fn get_shot_position(&self, graph: &Graph) -> Vec3 {
if self.shot_point.is_some() {
graph.get(self.shot_point).base().get_global_position()
} else {
// Fallback
graph.get(self.model).base().get_global_position()
}
}
pub fn get_kind(&self) -> WeaponKind {
self.kind
}
pub fn add_ammo(&mut self, amount: u32) {
self.ammo += amount;
}
fn update_laser_sight(&self, graph: &mut Graph, physics: &Physics) {
let mut laser_dot_position = Vec3::ZERO;
let model = graph.get(self.model);
let begin = model.base().get_global_position();
let end = begin + model.base().get_look_vector().scale(100.0);
if let Some(ray) = Ray::from_two_points(&begin, &end) {
let mut result = Vec::new();
if physics.ray_cast(&ray, RayCastOptions::default(), &mut result) {
let offset = result[0].normal.normalized().unwrap_or_default().scale(0.2);
laser_dot_position = result[0].position + offset;
}
}
graph.get_mut(self.laser_dot).base_mut().get_local_transform_mut().set_position(laser_dot_position);
}
fn play_shot_sound(&self, resource_manager: &mut ResourceManager, sound_context: Arc<Mutex<Context>>) {
let mut sound_context = sound_context.lock().unwrap();
let shot_buffer = resource_manager.request_sound_buffer(
Path::new(self.definition.shot_sound), BufferKind::Normal).unwrap();
let mut shot_sound = Source::new_spatial(shot_buffer).unwrap();
shot_sound.set_play_once(true);
shot_sound.play();
shot_sound.as_spatial_mut().set_position(&self.shot_position);
sound_context.add_source(shot_sound);
}
pub fn get_ammo(&self) -> u32 {
self.ammo
}
pub fn get_owner(&self) -> Handle<Actor> {
self.owner
}
pub fn set_owner(&mut self, owner: Handle<Actor>) {
self.owner = owner;
}
pub fn try_shoot(&mut self,
scene: &mut Scene,
resource_manager: &mut ResourceManager,
sound_context: Arc<Mutex<Context>>,
time: GameTime,
weapon_velocity: Vec3) -> Option<Projectile> | Some(Projectile::new(ProjectileKind::Plasma, resource_manager, scene,
dir, pos, self.self_handle, weapon_velocity))
}
}
} else {
None
}
| {
if self.ammo != 0 && time.elapsed - self.last_shot_time >= 0.1 {
self.ammo -= 1;
self.offset = Vec3::new(0.0, 0.0, -0.05);
self.last_shot_time = time.elapsed;
self.play_shot_sound(resource_manager, sound_context);
let (dir, pos) = {
let graph = scene.interface().graph;
(graph.get(self.model).base().get_look_vector(), self.get_shot_position(graph))
};
match self.kind {
WeaponKind::M4 | WeaponKind::Ak47 => {
Some(Projectile::new(ProjectileKind::Bullet, resource_manager, scene,
dir, pos, self.self_handle, weapon_velocity))
}
WeaponKind::PlasmaRifle => { | identifier_body |
weapon.rs | : Handle<Node>,
offset: Vec3,
dest_offset: Vec3,
last_shot_time: f64,
shot_position: Vec3,
owner: Handle<Actor>,
ammo: u32,
definition: &'static WeaponDefinition,
}
pub struct WeaponDefinition {
model: &'static str,
shot_sound: &'static str,
ammo: u32,
}
impl HandleFromSelf<Weapon> for Weapon {
fn self_handle(&self) -> Handle<Weapon> {
self.self_handle
}
}
impl Default for Weapon {
fn default() -> Self {
Self {
self_handle: Default::default(),
kind: WeaponKind::M4,
laser_dot: Handle::NONE,
model: Handle::NONE,
offset: Vec3::ZERO,
shot_point: Handle::NONE,
dest_offset: Vec3::ZERO,
last_shot_time: 0.0,
shot_position: Vec3::ZERO,
owner: Handle::NONE,
ammo: 250,
definition: Self::get_definition(WeaponKind::M4),
}
}
}
impl Visit for Weapon {
fn visit(&mut self, name: &str, visitor: &mut Visitor) -> VisitResult {
visitor.enter_region(name)?;
let mut kind_id = self.kind.id();
kind_id.visit("KindId", visitor)?;
if visitor.is_reading() {
self.kind = WeaponKind::new(kind_id)?
}
self.definition = Self::get_definition(self.kind);
self.self_handle.visit("SelfHandle", visitor)?;
self.model.visit("Model", visitor)?;
self.laser_dot.visit("LaserDot", visitor)?;
self.offset.visit("Offset", visitor)?;
self.dest_offset.visit("DestOffset", visitor)?;
self.last_shot_time.visit("LastShotTime", visitor)?;
self.owner.visit("Owner", visitor)?;
self.ammo.visit("Ammo", visitor)?;
visitor.leave_region()
}
}
impl Weapon {
pub fn get_definition(kind: WeaponKind) -> &'static WeaponDefinition {
match kind {
WeaponKind::M4 => {
static DEFINITION: WeaponDefinition = WeaponDefinition {
model: "data/models/m4.FBX",
shot_sound: "data/sounds/m4_shot.wav",
ammo: 115,
};
&DEFINITION
}
WeaponKind::Ak47 => {
static DEFINITION: WeaponDefinition = WeaponDefinition {
model: "data/models/ak47.FBX",
shot_sound: "data/sounds/m4_shot.wav",
ammo: 100,
};
&DEFINITION
}
WeaponKind::PlasmaRifle => {
static DEFINITION: WeaponDefinition = WeaponDefinition {
model: "data/models/plasma_rifle.FBX",
shot_sound: "data/sounds/plasma_shot.wav",
ammo: 40,
};
&DEFINITION
}
}
}
pub fn new(kind: WeaponKind, resource_manager: &mut ResourceManager, scene: &mut Scene) -> Weapon {
let definition = Self::get_definition(kind);
let model = Model::instantiate(
resource_manager.request_model(Path::new(definition.model)).unwrap(), scene).root;
let SceneInterfaceMut { graph, .. } = scene.interface_mut();
let laser_dot = graph.add_node(Node::Light(
LightBuilder::new(LightKind::Point(PointLight::new(0.5)), BaseBuilder::new())
.with_color(Color::opaque(255, 0, 0))
.cast_shadows(false)
.build()));
let shot_point = graph.find_by_name(model, "Weapon:ShotPoint");
if shot_point.is_none() {
println!("Shot point not found!");
}
Weapon {
kind,
laser_dot,
model,
shot_point,
definition,
ammo: definition.ammo,
..Default::default()
}
}
pub fn set_visibility(&self, visibility: bool, graph: &mut Graph) {
graph.get_mut(self.model).base_mut().set_visibility(visibility);
graph.get_mut(self.laser_dot).base_mut().set_visibility(visibility);
}
pub fn get_model(&self) -> Handle<Node> {
self.model
}
pub fn update(&mut self, scene: &mut Scene) {
let SceneInterfaceMut { graph, physics, .. } = scene.interface_mut();
self.offset.follow(&self.dest_offset, 0.2);
self.update_laser_sight(graph, physics);
let node = graph.get_mut(self.model);
node.base_mut().get_local_transform_mut().set_position(self.offset);
self.shot_position = node.base().get_global_position();
}
fn get_shot_position(&self, graph: &Graph) -> Vec3 {
if self.shot_point.is_some() {
graph.get(self.shot_point).base().get_global_position()
} else {
// Fallback
graph.get(self.model).base().get_global_position()
}
}
pub fn get_kind(&self) -> WeaponKind {
self.kind
}
pub fn add_ammo(&mut self, amount: u32) {
self.ammo += amount;
}
fn update_laser_sight(&self, graph: &mut Graph, physics: &Physics) {
let mut laser_dot_position = Vec3::ZERO;
let model = graph.get(self.model);
let begin = model.base().get_global_position();
let end = begin + model.base().get_look_vector().scale(100.0);
if let Some(ray) = Ray::from_two_points(&begin, &end) {
let mut result = Vec::new();
if physics.ray_cast(&ray, RayCastOptions::default(), &mut result) {
let offset = result[0].normal.normalized().unwrap_or_default().scale(0.2);
laser_dot_position = result[0].position + offset;
}
}
graph.get_mut(self.laser_dot).base_mut().get_local_transform_mut().set_position(laser_dot_position);
}
fn play_shot_sound(&self, resource_manager: &mut ResourceManager, sound_context: Arc<Mutex<Context>>) {
let mut sound_context = sound_context.lock().unwrap();
let shot_buffer = resource_manager.request_sound_buffer(
Path::new(self.definition.shot_sound), BufferKind::Normal).unwrap();
let mut shot_sound = Source::new_spatial(shot_buffer).unwrap();
shot_sound.set_play_once(true);
shot_sound.play();
shot_sound.as_spatial_mut().set_position(&self.shot_position);
sound_context.add_source(shot_sound);
}
pub fn get_ammo(&self) -> u32 {
self.ammo
}
pub fn get_owner(&self) -> Handle<Actor> {
self.owner
}
pub fn set_owner(&mut self, owner: Handle<Actor>) {
self.owner = owner;
}
pub fn try_shoot(&mut self,
scene: &mut Scene,
resource_manager: &mut ResourceManager,
sound_context: Arc<Mutex<Context>>,
time: GameTime,
weapon_velocity: Vec3) -> Option<Projectile> {
if self.ammo != 0 && time.elapsed - self.last_shot_time >= 0.1 {
self.ammo -= 1;
self.offset = Vec3::new(0.0, 0.0, -0.05);
self.last_shot_time = time.elapsed;
self.play_shot_sound(resource_manager, sound_context);
let (dir, pos) = {
let graph = scene.interface().graph;
(graph.get(self.model).base().get_look_vector(), self.get_shot_position(graph))
};
match self.kind {
WeaponKind::M4 | WeaponKind::Ak47 => {
Some(Projectile::new(ProjectileKind::Bullet, resource_manager, scene,
dir, pos, self.self_handle, weapon_velocity))
}
WeaponKind::PlasmaRifle => {
Some(Projectile::new(ProjectileKind::Plasma, resource_manager, scene,
dir, pos, self.self_handle, weapon_velocity))
}
}
} else {
None
}
}
}
impl CleanUp for Weapon {
fn clean_up(&mut self, scene: &mut Scene) {
let SceneInterfaceMut { graph, .. } = scene.interface_mut();
graph.remove_node(self.model);
graph.remove_node(self.laser_dot);
}
}
pub struct WeaponContainer {
pool: Pool<Weapon>
}
impl WeaponContainer {
pub fn new() -> Self {
Self {
pool: Pool::new()
}
}
pub fn add(&mut self, weapon: Weapon) -> Handle<Weapon> {
let handle = self.pool.spawn(weapon);
self.pool.borrow_mut(handle).self_handle = handle;
handle
}
pub fn iter(&self) -> PoolIterator<Weapon> {
self.pool.iter()
}
pub fn iter_mut(&mut self) -> PoolIteratorMut<Weapon> {
self.pool.iter_mut()
}
pub fn get(&self, handle: Handle<Weapon>) -> &Weapon {
self.pool.borrow(handle)
}
pub fn get_mut(&mut self, handle: Handle<Weapon>) -> &mut Weapon {
self.pool.borrow_mut(handle)
}
pub fn update(&mut self, scene: &mut Scene) {
for weapon in self.pool.iter_mut() { | weapon.update(scene)
}
} | random_line_split | |
weapon.rs | ,
},
base::{BaseBuilder, AsBase},
},
core::{
pool::{
Pool,
PoolIterator,
PoolIteratorMut,
Handle,
},
color::Color,
visitor::{
Visit,
VisitResult,
Visitor,
},
math::{vec3::Vec3, ray::Ray},
},
};
#[derive(Copy, Clone, PartialEq, Eq)]
pub enum WeaponKind {
M4,
Ak47,
PlasmaRifle,
}
impl WeaponKind {
pub fn id(&self) -> u32 {
match self {
WeaponKind::M4 => 0,
WeaponKind::Ak47 => 1,
WeaponKind::PlasmaRifle => 2
}
}
pub fn new(id: u32) -> Result<Self, String> {
match id {
0 => Ok(WeaponKind::M4),
1 => Ok(WeaponKind::Ak47),
2 => Ok(WeaponKind::PlasmaRifle),
_ => return Err(format!("unknown weapon kind {}", id))
}
}
}
pub struct Weapon {
self_handle: Handle<Weapon>,
kind: WeaponKind,
model: Handle<Node>,
laser_dot: Handle<Node>,
shot_point: Handle<Node>,
offset: Vec3,
dest_offset: Vec3,
last_shot_time: f64,
shot_position: Vec3,
owner: Handle<Actor>,
ammo: u32,
definition: &'static WeaponDefinition,
}
pub struct WeaponDefinition {
model: &'static str,
shot_sound: &'static str,
ammo: u32,
}
impl HandleFromSelf<Weapon> for Weapon {
fn self_handle(&self) -> Handle<Weapon> {
self.self_handle
}
}
impl Default for Weapon {
fn default() -> Self {
Self {
self_handle: Default::default(),
kind: WeaponKind::M4,
laser_dot: Handle::NONE,
model: Handle::NONE,
offset: Vec3::ZERO,
shot_point: Handle::NONE,
dest_offset: Vec3::ZERO,
last_shot_time: 0.0,
shot_position: Vec3::ZERO,
owner: Handle::NONE,
ammo: 250,
definition: Self::get_definition(WeaponKind::M4),
}
}
}
impl Visit for Weapon {
fn visit(&mut self, name: &str, visitor: &mut Visitor) -> VisitResult {
visitor.enter_region(name)?;
let mut kind_id = self.kind.id();
kind_id.visit("KindId", visitor)?;
if visitor.is_reading() {
self.kind = WeaponKind::new(kind_id)?
}
self.definition = Self::get_definition(self.kind);
self.self_handle.visit("SelfHandle", visitor)?;
self.model.visit("Model", visitor)?;
self.laser_dot.visit("LaserDot", visitor)?;
self.offset.visit("Offset", visitor)?;
self.dest_offset.visit("DestOffset", visitor)?;
self.last_shot_time.visit("LastShotTime", visitor)?;
self.owner.visit("Owner", visitor)?;
self.ammo.visit("Ammo", visitor)?;
visitor.leave_region()
}
}
impl Weapon {
pub fn get_definition(kind: WeaponKind) -> &'static WeaponDefinition {
match kind {
WeaponKind::M4 => {
static DEFINITION: WeaponDefinition = WeaponDefinition {
model: "data/models/m4.FBX",
shot_sound: "data/sounds/m4_shot.wav",
ammo: 115,
};
&DEFINITION
}
WeaponKind::Ak47 => {
static DEFINITION: WeaponDefinition = WeaponDefinition {
model: "data/models/ak47.FBX",
shot_sound: "data/sounds/m4_shot.wav",
ammo: 100,
};
&DEFINITION
}
WeaponKind::PlasmaRifle => {
static DEFINITION: WeaponDefinition = WeaponDefinition {
model: "data/models/plasma_rifle.FBX",
shot_sound: "data/sounds/plasma_shot.wav",
ammo: 40,
};
&DEFINITION
}
}
}
pub fn new(kind: WeaponKind, resource_manager: &mut ResourceManager, scene: &mut Scene) -> Weapon {
let definition = Self::get_definition(kind);
let model = Model::instantiate(
resource_manager.request_model(Path::new(definition.model)).unwrap(), scene).root;
let SceneInterfaceMut { graph, .. } = scene.interface_mut();
let laser_dot = graph.add_node(Node::Light(
LightBuilder::new(LightKind::Point(PointLight::new(0.5)), BaseBuilder::new())
.with_color(Color::opaque(255, 0, 0))
.cast_shadows(false)
.build()));
let shot_point = graph.find_by_name(model, "Weapon:ShotPoint");
if shot_point.is_none() {
println!("Shot point not found!");
}
Weapon {
kind,
laser_dot,
model,
shot_point,
definition,
ammo: definition.ammo,
..Default::default()
}
}
pub fn set_visibility(&self, visibility: bool, graph: &mut Graph) {
graph.get_mut(self.model).base_mut().set_visibility(visibility);
graph.get_mut(self.laser_dot).base_mut().set_visibility(visibility);
}
pub fn get_model(&self) -> Handle<Node> {
self.model
}
pub fn update(&mut self, scene: &mut Scene) {
let SceneInterfaceMut { graph, physics, .. } = scene.interface_mut();
self.offset.follow(&self.dest_offset, 0.2);
self.update_laser_sight(graph, physics);
let node = graph.get_mut(self.model);
node.base_mut().get_local_transform_mut().set_position(self.offset);
self.shot_position = node.base().get_global_position();
}
fn get_shot_position(&self, graph: &Graph) -> Vec3 {
if self.shot_point.is_some() {
graph.get(self.shot_point).base().get_global_position()
} else {
// Fallback
graph.get(self.model).base().get_global_position()
}
}
pub fn get_kind(&self) -> WeaponKind {
self.kind
}
pub fn add_ammo(&mut self, amount: u32) {
self.ammo += amount;
}
fn update_laser_sight(&self, graph: &mut Graph, physics: &Physics) {
let mut laser_dot_position = Vec3::ZERO;
let model = graph.get(self.model);
let begin = model.base().get_global_position();
let end = begin + model.base().get_look_vector().scale(100.0);
if let Some(ray) = Ray::from_two_points(&begin, &end) {
let mut result = Vec::new();
if physics.ray_cast(&ray, RayCastOptions::default(), &mut result) {
let offset = result[0].normal.normalized().unwrap_or_default().scale(0.2);
laser_dot_position = result[0].position + offset;
}
}
graph.get_mut(self.laser_dot).base_mut().get_local_transform_mut().set_position(laser_dot_position);
}
fn play_shot_sound(&self, resource_manager: &mut ResourceManager, sound_context: Arc<Mutex<Context>>) {
let mut sound_context = sound_context.lock().unwrap();
let shot_buffer = resource_manager.request_sound_buffer(
Path::new(self.definition.shot_sound), BufferKind::Normal).unwrap();
let mut shot_sound = Source::new_spatial(shot_buffer).unwrap();
shot_sound.set_play_once(true);
shot_sound.play();
shot_sound.as_spatial_mut().set_position(&self.shot_position);
sound_context.add_source(shot_sound);
}
pub fn get_ammo(&self) -> u32 {
self.ammo
}
pub fn get_owner(&self) -> Handle<Actor> {
self.owner
}
pub fn set_owner(&mut self, owner: Handle<Actor>) {
self.owner = owner;
}
pub fn try_shoot(&mut self,
scene: &mut Scene,
resource_manager: &mut ResourceManager,
sound_context: Arc<Mutex<Context>>,
time: GameTime,
weapon_velocity: Vec3) -> Option<Projectile> {
if self.ammo != 0 && time.elapsed - self.last_shot_time >= 0.1 {
self.ammo -= 1;
self.offset = Vec3::new(0.0, 0.0, -0.05);
self.last_shot_time = time.elapsed;
self.play_shot_sound(resource_manager, sound_context);
let (dir, pos) = {
let graph = scene.interface().graph;
(graph.get(self.model).base().get_look_vector(), self.get_shot_position(graph))
};
match self.kind {
WeaponKind::M4 | WeaponKind::Ak47 => |
WeaponKind::PlasmaRifle => {
Some(Projectile::new(ProjectileKind::Plasma, resource_manager, scene,
dir, pos, self.self_handle, weapon_velocity))
}
}
} else {
None
}
| {
Some(Projectile::new(ProjectileKind::Bullet, resource_manager, scene,
dir, pos, self.self_handle, weapon_velocity))
} | conditional_block |
unet3d.py | ".format(self.name))
# The last convolution
self.lastconv = lm.Convolution3dModule(
3, 1, stride=1, padding=0, activation=None,
bias=False,
name="{}_lconv".format(self.name))
def forward(self, x):
self.instance += 1
x_concat = []
for i in range(self.NUM_LEVELS):
x = self.downconvs[i](x)
x_concat.append(x)
x = lbann.Pooling(
x, num_dims=3, has_vectors=False,
pool_dims_i=2, pool_pads_i=0, pool_strides_i=2,
pool_mode="max",
name="{}_pool{}_instance{}".format(
self.name, i+1, self.instance))
x = self.bottomconv(x)
for i in range(self.NUM_LEVELS):
x = self.deconvs[i](x)
x = self.upconvs[i](x, x_concat=x_concat[self.NUM_LEVELS-1-i])
x = self.lastconv(x)
x = lbann.Softmax(
x,
softmax_mode="channel")
return x
class UNet3DConvBlock(lm.Module):
"""Basic block of an optional concatenation layer and
a list of 3D convolutional layers.
"""
def __init__(self, out_channels_list, name):
super().__init__()
self.name = name
self.instance = 0
assert len(out_channels_list) == 2
self.convs = []
for i, channels in enumerate(out_channels_list):
self.convs.append(Convolution3dBNModule(
channels,
3,
stride=1,
padding=1,
activation=lbann.Relu,
bias=False,
name="{}_conv_block_{}".format(self.name, i+1)))
def forward(self, x, x_concat=None):
self.instance += 1
if x_concat is not None:
x = lbann.Concatenation(
[x, x_concat],
axis=0)
for c in self.convs:
x = c(x)
return x
class Convolution3dBNModule(lm.Module):
"""Basic block of a batch-normalization layer, 3D convolutional
layer, and an optional activation layer.
"""
def __init__(self, *args, **kwargs):
super().__init__()
self.name = kwargs["name"]
self.activation = None if "activation" not in kwargs.keys() \
else kwargs["activation"]
kwargs["activation"] = None
self.conv = lm.Convolution3dModule(*args, **kwargs)
bn_scale = lbann.Weights(
initializer=lbann.ConstantInitializer(value=1.0),
name="{}_bn_scale".format(self.name))
bn_bias = lbann.Weights(
initializer=lbann.ConstantInitializer(value=0.0),
name="{}_bn_bias".format(self.name))
self.bn_weights = [bn_scale, bn_bias]
self.instance = 0
def forward(self, x):
self.instance += 1
x = self.conv(x)
x = lbann.BatchNormalization(
x,
weights=self.bn_weights,
statistics_group_size=-1,
name="{}_bn_instance{}".format(
self.name,
self.instance))
if self.activation is not None:
x = self.activation(x)
return x
class Deconvolution3dModule(lm.ConvolutionModule):
"""Basic block for 3D deconvolutional neural networks.
Applies a deconvolution and a nonlinear activation function.
This is a wrapper class for ConvolutionModule.
"""
def __init__(self, *args, **kwargs):
super().__init__(3, transpose=True, *args, **kwargs)
def create_unet3d_data_reader(train_dir, test_dir):
readers = []
for role, shuffle, role_dir in [
("train", True, train_dir),
("test", False, test_dir)]:
if role_dir is None:
continue
readers.append(lbann.reader_pb2.Reader(
name="hdf5",
role=role,
shuffle=shuffle,
data_file_pattern="{}/*.hdf5".format(role_dir),
validation_percent=0,
percent_of_data_to_use=1.0,
scaling_factor_int16=1.0,
hdf5_key_data="volume",
hdf5_key_labels="segmentation",
hdf5_hyperslab_labels=True,
disable_labels=False,
disable_responses=True,
))
return lbann.reader_pb2.DataReader(reader=readers)
def create_unet3d_optimizer(learn_rate):
# TODO: This is a temporal optimizer copied from CosomoFlow.
adam = lbann.Adam(
learn_rate=learn_rate,
beta1=0.9,
beta2=0.999,
eps=1e-8)
return adam
if __name__ == '__main__':
desc = ('Construct and run the 3D U-Net on a 3D segmentation dataset.'
'Running the experiment is only supported on LC systems.')
parser = argparse.ArgumentParser(description=desc)
lbann.contrib.args.add_scheduler_arguments(parser)
# General arguments
parser.add_argument(
'--job-name', action='store', default='lbann_unet3d', type=str,
help='scheduler job name (default: lbann_unet3d)')
parser.add_argument(
'--mini-batch-size', action='store', default=1, type=int,
help='mini-batch size (default: 1)', metavar='NUM')
parser.add_argument(
'--num-epochs', action='store', default=5, type=int,
help='number of epochs (default: 100)', metavar='NUM')
# Model specific arguments
parser.add_argument(
'--learning-rate', action='store', default=0.001, type=float,
help='the initial learning rate (default: 0.001)')
parser.add_argument(
'--partition-level', action='store', default=4, type=int,
help='the spatial partition level (default: 4)')
parser.add_argument(
'--depth-groups', action='store', type=int, default=4,
help='the number of processes for the depth dimension (default: 4)')
default_lc_dataset = '/p/gpfs1/brainusr/datasets/LiTS/hdf5_dim128_float'
default_train_dir = '{}/train'.format(default_lc_dataset)
default_test_dir = '{}/test'.format(default_lc_dataset)
parser.add_argument(
'--train-dir', action='store', type=str, default=default_train_dir,
help='the directory of the training dataset (default: \'{}\')'
.format(default_train_dir))
parser.add_argument(
'--test-dir', action='store', type=str, default=default_test_dir,
help='the directory of the test dataset (default: \'{}\')'
.format(default_test_dir))
parser.add_argument(
'--dynamically-reclaim-error-signals', action='store_true',
help='Allow LBANN to reclaim error signals buffers (default: False)')
parser.add_argument(
'--batch-job', action='store_true',
help='Run as a batch job (default: false)')
lbann.contrib.args.add_optimizer_arguments(
parser,
default_optimizer="adam",
default_learning_rate=0.001,
)
args = parser.parse_args()
parallel_strategy = get_parallel_strategy_args(
sample_groups=args.mini_batch_size,
depth_groups=args.depth_groups)
# Construct layer graph
input = lbann.Input(
target_mode='label_reconstruction')
volume = lbann.Identity(input)
output = UNet3D()(volume)
segmentation = lbann.Identity(input)
ce = lbann.CrossEntropy(
[output, segmentation],
use_labels=True)
obj = lbann.ObjectiveFunction([ce])
layers = list(lbann.traverse_layer_graph(input))
for l in layers:
l.parallel_strategy = parallel_strategy
# Setup model
metrics = [lbann.Metric(ce, name='CE', unit='')]
callbacks = [
lbann.CallbackPrint(),
lbann.CallbackTimer(),
lbann.CallbackGPUMemoryUsage(),
lbann.CallbackProfiler(skip_init=True),
]
# # TODO: Use polynomial learning rate decay (https://github.com/LLNL/lbann/issues/1581)
# callbacks.append(
# lbann.CallbackPolyLearningRate(
# power=1.0,
# num_epochs=100,
# end_lr=1e-5))
model = lbann.Model(
epochs=args.num_epochs,
layers=layers,
objective_function=obj,
callbacks=callbacks,
)
# Setup optimizer
optimizer = lbann.contrib.args.create_optimizer(args)
# Setup data reader
data_reader = create_unet3d_data_reader(
train_dir=args.train_dir,
test_dir=args.test_dir)
# Setup trainer
trainer = lbann.Trainer(mini_batch_size=args.mini_batch_size)
# Runtime parameters/arguments
environment = lbann.contrib.args.get_distconv_environment(
num_io_partitions=args.depth_groups)
if args.dynamically_reclaim_error_signals:
| environment['LBANN_KEEP_ERROR_SIGNALS'] = 0 | conditional_block | |
unet3d.py | from lbann.core.util import get_parallel_strategy_args
class UNet3D(lm.Module):
"""The 3D U-Net.
See:
\"{O}zg\"{u}n \c{C}i\c{c}ek, Ahmed Abdulkadir, Soeren S. Lienkamp,
Thomas Brox, and Olaf Ronneberger. "3D U-Net: learning dense volumetric
segmentation from sparse annotation." In International conference
on medical image computing and computer-assisted intervention,
pp. 424-432, 2016.
Note that this model assumes the same spatial input/output sizes with
extra padding to simplify the implementation.
"""
global_count = 0 # Static counter, used for default names
def __init__(self, name=None):
"""Initialize 3D U-Net.
Args:
name (str, optional): Module name
(default: 'alexnet_module<index>').
"""
UNet3D.global_count += 1
self.instance = 0
self.name = (name if name
else "unet3d_module{0}".format(UNet3D.global_count))
# The list of ([down-conv filters], [up-conv filters], deconv filters)
self.BLOCKS = [
([32, 64], [64, 64], 128), # bconv1_down, bconv3_up, deconv3
([64, 128], [128, 128], 256), # bconv2_down, bconv2_up, deconv2
([128, 256], [256, 256], 512), # bconv3_down, bconv1_up, deconv1
]
# The list of the number of filters of the "bottom" convolution block
self.BOTTOM_BLOCK = [256, 512]
# The number of pooling/deconvolution layers
self.NUM_LEVELS = len(self.BLOCKS)
# Whether PARTITIONED_LEVELS-th pooling/deconvolution is partitioned
self.PARTITION_INCLUDE_POOL = True
# Deconvolution should have the same number of input/output channels
assert self.BLOCKS[-1][2] == self.BOTTOM_BLOCK[1]
assert all([self.BLOCKS[x][2] == self.BLOCKS[x+1][1][-1]
for x in range(self.NUM_LEVELS-1)])
# Building blocks
self.downconvs = []
self.upconvs = []
self.deconvs = []
for i, blocks in enumerate(self.BLOCKS):
downBlock, upBlock, deconv = blocks
self.downconvs.append(UNet3DConvBlock(
downBlock, name="{}_bconv{}_down".format(self.name, i+1)))
ui = self.NUM_LEVELS-1-i
self.upconvs.insert(0, UNet3DConvBlock(
upBlock, name="{}_bconv{}_up".format(self.name, ui+1)))
self.deconvs.insert(0, Deconvolution3dModule(
deconv, 2, stride=2, padding=0, activation=None,
bias=False,
name="{}_deconv{}".format(self.name, ui+1)))
# The bottom convolution
self.bottomconv = UNet3DConvBlock(
self.BOTTOM_BLOCK, name="{}_bconv_bottom".format(self.name))
# The last convolution
self.lastconv = lm.Convolution3dModule(
3, 1, stride=1, padding=0, activation=None,
bias=False,
name="{}_lconv".format(self.name))
def forward(self, x):
self.instance += 1
x_concat = []
for i in range(self.NUM_LEVELS):
x = self.downconvs[i](x)
x_concat.append(x)
x = lbann.Pooling(
x, num_dims=3, has_vectors=False,
pool_dims_i=2, pool_pads_i=0, pool_strides_i=2,
pool_mode="max",
name="{}_pool{}_instance{}".format(
self.name, i+1, self.instance))
x = self.bottomconv(x)
for i in range(self.NUM_LEVELS):
x = self.deconvs[i](x)
x = self.upconvs[i](x, x_concat=x_concat[self.NUM_LEVELS-1-i])
x = self.lastconv(x)
x = lbann.Softmax(
x,
softmax_mode="channel")
return x
class UNet3DConvBlock(lm.Module):
"""Basic block of an optional concatenation layer and
a list of 3D convolutional layers.
"""
def __init__(self, out_channels_list, name):
super().__init__()
self.name = name
self.instance = 0
assert len(out_channels_list) == 2
self.convs = []
for i, channels in enumerate(out_channels_list):
self.convs.append(Convolution3dBNModule(
channels,
3,
stride=1,
padding=1,
activation=lbann.Relu,
bias=False,
name="{}_conv_block_{}".format(self.name, i+1)))
def forward(self, x, x_concat=None):
self.instance += 1
if x_concat is not None:
x = lbann.Concatenation(
[x, x_concat],
axis=0)
for c in self.convs:
x = c(x)
return x
class Convolution3dBNModule(lm.Module):
"""Basic block of a batch-normalization layer, 3D convolutional
layer, and an optional activation layer.
"""
def __init__(self, *args, **kwargs):
super().__init__()
self.name = kwargs["name"]
self.activation = None if "activation" not in kwargs.keys() \
else kwargs["activation"]
kwargs["activation"] = None
self.conv = lm.Convolution3dModule(*args, **kwargs)
bn_scale = lbann.Weights(
initializer=lbann.ConstantInitializer(value=1.0),
name="{}_bn_scale".format(self.name))
bn_bias = lbann.Weights(
initializer=lbann.ConstantInitializer(value=0.0),
name="{}_bn_bias".format(self.name))
self.bn_weights = [bn_scale, bn_bias]
self.instance = 0
def forward(self, x):
self.instance += 1
x = self.conv(x)
x = lbann.BatchNormalization(
x,
weights=self.bn_weights,
statistics_group_size=-1,
name="{}_bn_instance{}".format(
self.name,
self.instance))
if self.activation is not None:
x = self.activation(x)
return x
class Deconvolution3dModule(lm.ConvolutionModule):
"""Basic block for 3D deconvolutional neural networks.
Applies a deconvolution and a nonlinear activation function.
This is a wrapper class for ConvolutionModule.
"""
def __init__(self, *args, **kwargs):
super().__init__(3, transpose=True, *args, **kwargs)
def create_unet3d_data_reader(train_dir, test_dir):
readers = []
for role, shuffle, role_dir in [
("train", True, train_dir),
("test", False, test_dir)]:
if role_dir is None:
continue
readers.append(lbann.reader_pb2.Reader(
name="hdf5",
role=role,
shuffle=shuffle,
data_file_pattern="{}/*.hdf5".format(role_dir),
validation_percent=0,
percent_of_data_to_use=1.0,
scaling_factor_int16=1.0,
hdf5_key_data="volume",
hdf5_key_labels="segmentation",
hdf5_hyperslab_labels=True,
disable_labels=False,
disable_responses=True,
))
return lbann.reader_pb2.DataReader(reader=readers)
def create_unet3d_optimizer(learn_rate):
# TODO: This is a temporal optimizer copied from CosomoFlow.
adam = lbann.Adam(
learn_rate=learn_rate,
beta1=0.9,
beta2=0.999,
eps=1e-8)
return adam
if __name__ == '__main__':
desc = ('Construct and run the 3D U-Net on a 3D segmentation dataset.'
'Running the experiment is only supported on LC systems.')
parser = argparse.ArgumentParser(description=desc)
lbann.contrib.args.add_scheduler_arguments(parser)
# General arguments
parser.add_argument(
'--job-name', action='store', default='lbann_unet3d', type=str,
help='scheduler job name (default: lbann_unet3d)')
parser.add_argument(
'--mini-batch-size', action='store', default=1, type=int,
help='mini-batch size (default: 1)', metavar='NUM')
parser.add_argument(
'--num-epochs', action='store', default= | import lbann
import lbann.models
import lbann.contrib.args
import lbann.contrib.launcher
import lbann.modules as lm | random_line_split | |
unet3d.py | self.name = (name if name
else "unet3d_module{0}".format(UNet3D.global_count))
# The list of ([down-conv filters], [up-conv filters], deconv filters)
self.BLOCKS = [
([32, 64], [64, 64], 128), # bconv1_down, bconv3_up, deconv3
([64, 128], [128, 128], 256), # bconv2_down, bconv2_up, deconv2
([128, 256], [256, 256], 512), # bconv3_down, bconv1_up, deconv1
]
# The list of the number of filters of the "bottom" convolution block
self.BOTTOM_BLOCK = [256, 512]
# The number of pooling/deconvolution layers
self.NUM_LEVELS = len(self.BLOCKS)
# Whether PARTITIONED_LEVELS-th pooling/deconvolution is partitioned
self.PARTITION_INCLUDE_POOL = True
# Deconvolution should have the same number of input/output channels
assert self.BLOCKS[-1][2] == self.BOTTOM_BLOCK[1]
assert all([self.BLOCKS[x][2] == self.BLOCKS[x+1][1][-1]
for x in range(self.NUM_LEVELS-1)])
# Building blocks
self.downconvs = []
self.upconvs = []
self.deconvs = []
for i, blocks in enumerate(self.BLOCKS):
downBlock, upBlock, deconv = blocks
self.downconvs.append(UNet3DConvBlock(
downBlock, name="{}_bconv{}_down".format(self.name, i+1)))
ui = self.NUM_LEVELS-1-i
self.upconvs.insert(0, UNet3DConvBlock(
upBlock, name="{}_bconv{}_up".format(self.name, ui+1)))
self.deconvs.insert(0, Deconvolution3dModule(
deconv, 2, stride=2, padding=0, activation=None,
bias=False,
name="{}_deconv{}".format(self.name, ui+1)))
# The bottom convolution
self.bottomconv = UNet3DConvBlock(
self.BOTTOM_BLOCK, name="{}_bconv_bottom".format(self.name))
# The last convolution
self.lastconv = lm.Convolution3dModule(
3, 1, stride=1, padding=0, activation=None,
bias=False,
name="{}_lconv".format(self.name))
def forward(self, x):
self.instance += 1
x_concat = []
for i in range(self.NUM_LEVELS):
x = self.downconvs[i](x)
x_concat.append(x)
x = lbann.Pooling(
x, num_dims=3, has_vectors=False,
pool_dims_i=2, pool_pads_i=0, pool_strides_i=2,
pool_mode="max",
name="{}_pool{}_instance{}".format(
self.name, i+1, self.instance))
x = self.bottomconv(x)
for i in range(self.NUM_LEVELS):
x = self.deconvs[i](x)
x = self.upconvs[i](x, x_concat=x_concat[self.NUM_LEVELS-1-i])
x = self.lastconv(x)
x = lbann.Softmax(
x,
softmax_mode="channel")
return x
class UNet3DConvBlock(lm.Module):
"""Basic block of an optional concatenation layer and
a list of 3D convolutional layers.
"""
def __init__(self, out_channels_list, name):
super().__init__()
self.name = name
self.instance = 0
assert len(out_channels_list) == 2
self.convs = []
for i, channels in enumerate(out_channels_list):
self.convs.append(Convolution3dBNModule(
channels,
3,
stride=1,
padding=1,
activation=lbann.Relu,
bias=False,
name="{}_conv_block_{}".format(self.name, i+1)))
def forward(self, x, x_concat=None):
self.instance += 1
if x_concat is not None:
x = lbann.Concatenation(
[x, x_concat],
axis=0)
for c in self.convs:
x = c(x)
return x
class Convolution3dBNModule(lm.Module):
"""Basic block of a batch-normalization layer, 3D convolutional
layer, and an optional activation layer.
"""
def __init__(self, *args, **kwargs):
|
def forward(self, x):
self.instance += 1
x = self.conv(x)
x = lbann.BatchNormalization(
x,
weights=self.bn_weights,
statistics_group_size=-1,
name="{}_bn_instance{}".format(
self.name,
self.instance))
if self.activation is not None:
x = self.activation(x)
return x
class Deconvolution3dModule(lm.ConvolutionModule):
"""Basic block for 3D deconvolutional neural networks.
Applies a deconvolution and a nonlinear activation function.
This is a wrapper class for ConvolutionModule.
"""
def __init__(self, *args, **kwargs):
super().__init__(3, transpose=True, *args, **kwargs)
def create_unet3d_data_reader(train_dir, test_dir):
readers = []
for role, shuffle, role_dir in [
("train", True, train_dir),
("test", False, test_dir)]:
if role_dir is None:
continue
readers.append(lbann.reader_pb2.Reader(
name="hdf5",
role=role,
shuffle=shuffle,
data_file_pattern="{}/*.hdf5".format(role_dir),
validation_percent=0,
percent_of_data_to_use=1.0,
scaling_factor_int16=1.0,
hdf5_key_data="volume",
hdf5_key_labels="segmentation",
hdf5_hyperslab_labels=True,
disable_labels=False,
disable_responses=True,
))
return lbann.reader_pb2.DataReader(reader=readers)
def create_unet3d_optimizer(learn_rate):
# TODO: This is a temporal optimizer copied from CosomoFlow.
adam = lbann.Adam(
learn_rate=learn_rate,
beta1=0.9,
beta2=0.999,
eps=1e-8)
return adam
if __name__ == '__main__':
desc = ('Construct and run the 3D U-Net on a 3D segmentation dataset.'
'Running the experiment is only supported on LC systems.')
parser = argparse.ArgumentParser(description=desc)
lbann.contrib.args.add_scheduler_arguments(parser)
# General arguments
parser.add_argument(
'--job-name', action='store', default='lbann_unet3d', type=str,
help='scheduler job name (default: lbann_unet3d)')
parser.add_argument(
'--mini-batch-size', action='store', default=1, type=int,
help='mini-batch size (default: 1)', metavar='NUM')
parser.add_argument(
'--num-epochs', action='store', default=5, type=int,
help='number of epochs (default: 100)', metavar='NUM')
# Model specific arguments
parser.add_argument(
'--learning-rate', action='store', default=0.001, type=float,
help='the initial learning rate (default: 0.001)')
parser.add_argument(
'--partition-level', action='store', default=4, type=int,
help='the spatial partition level (default: 4)')
parser.add_argument(
'--depth-groups', action='store', type=int, default=4,
help='the number of processes for the depth dimension (default: 4)')
default_lc_dataset = '/p/gpfs1/brainusr/datasets/LiTS/hdf5_dim128_float'
default_train_dir = '{}/train'.format(default_lc_dataset)
default_test_dir = '{}/test'.format(default_lc_dataset)
parser.add_argument(
'--train-dir', action='store', type=str, default=default_train_dir,
help='the directory of the training dataset (default: \'{}\')'
.format(default_train_dir))
parser.add_argument(
'--test-dir', action='store', type=str, default=default_test_dir,
| super().__init__()
self.name = kwargs["name"]
self.activation = None if "activation" not in kwargs.keys() \
else kwargs["activation"]
kwargs["activation"] = None
self.conv = lm.Convolution3dModule(*args, **kwargs)
bn_scale = lbann.Weights(
initializer=lbann.ConstantInitializer(value=1.0),
name="{}_bn_scale".format(self.name))
bn_bias = lbann.Weights(
initializer=lbann.ConstantInitializer(value=0.0),
name="{}_bn_bias".format(self.name))
self.bn_weights = [bn_scale, bn_bias]
self.instance = 0 | identifier_body |
unet3d.py | self.name = (name if name
else "unet3d_module{0}".format(UNet3D.global_count))
# The list of ([down-conv filters], [up-conv filters], deconv filters)
self.BLOCKS = [
([32, 64], [64, 64], 128), # bconv1_down, bconv3_up, deconv3
([64, 128], [128, 128], 256), # bconv2_down, bconv2_up, deconv2
([128, 256], [256, 256], 512), # bconv3_down, bconv1_up, deconv1
]
# The list of the number of filters of the "bottom" convolution block
self.BOTTOM_BLOCK = [256, 512]
# The number of pooling/deconvolution layers
self.NUM_LEVELS = len(self.BLOCKS)
# Whether PARTITIONED_LEVELS-th pooling/deconvolution is partitioned
self.PARTITION_INCLUDE_POOL = True
# Deconvolution should have the same number of input/output channels
assert self.BLOCKS[-1][2] == self.BOTTOM_BLOCK[1]
assert all([self.BLOCKS[x][2] == self.BLOCKS[x+1][1][-1]
for x in range(self.NUM_LEVELS-1)])
# Building blocks
self.downconvs = []
self.upconvs = []
self.deconvs = []
for i, blocks in enumerate(self.BLOCKS):
downBlock, upBlock, deconv = blocks
self.downconvs.append(UNet3DConvBlock(
downBlock, name="{}_bconv{}_down".format(self.name, i+1)))
ui = self.NUM_LEVELS-1-i
self.upconvs.insert(0, UNet3DConvBlock(
upBlock, name="{}_bconv{}_up".format(self.name, ui+1)))
self.deconvs.insert(0, Deconvolution3dModule(
deconv, 2, stride=2, padding=0, activation=None,
bias=False,
name="{}_deconv{}".format(self.name, ui+1)))
# The bottom convolution
self.bottomconv = UNet3DConvBlock(
self.BOTTOM_BLOCK, name="{}_bconv_bottom".format(self.name))
# The last convolution
self.lastconv = lm.Convolution3dModule(
3, 1, stride=1, padding=0, activation=None,
bias=False,
name="{}_lconv".format(self.name))
def forward(self, x):
self.instance += 1
x_concat = []
for i in range(self.NUM_LEVELS):
x = self.downconvs[i](x)
x_concat.append(x)
x = lbann.Pooling(
x, num_dims=3, has_vectors=False,
pool_dims_i=2, pool_pads_i=0, pool_strides_i=2,
pool_mode="max",
name="{}_pool{}_instance{}".format(
self.name, i+1, self.instance))
x = self.bottomconv(x)
for i in range(self.NUM_LEVELS):
x = self.deconvs[i](x)
x = self.upconvs[i](x, x_concat=x_concat[self.NUM_LEVELS-1-i])
x = self.lastconv(x)
x = lbann.Softmax(
x,
softmax_mode="channel")
return x
class UNet3DConvBlock(lm.Module):
"""Basic block of an optional concatenation layer and
a list of 3D convolutional layers.
"""
def __init__(self, out_channels_list, name):
super().__init__()
self.name = name
self.instance = 0
assert len(out_channels_list) == 2
self.convs = []
for i, channels in enumerate(out_channels_list):
self.convs.append(Convolution3dBNModule(
channels,
3,
stride=1,
padding=1,
activation=lbann.Relu,
bias=False,
name="{}_conv_block_{}".format(self.name, i+1)))
def forward(self, x, x_concat=None):
self.instance += 1
if x_concat is not None:
x = lbann.Concatenation(
[x, x_concat],
axis=0)
for c in self.convs:
x = c(x)
return x
class Convolution3dBNModule(lm.Module):
"""Basic block of a batch-normalization layer, 3D convolutional
layer, and an optional activation layer.
"""
def __init__(self, *args, **kwargs):
super().__init__()
self.name = kwargs["name"]
self.activation = None if "activation" not in kwargs.keys() \
else kwargs["activation"]
kwargs["activation"] = None
self.conv = lm.Convolution3dModule(*args, **kwargs)
bn_scale = lbann.Weights(
initializer=lbann.ConstantInitializer(value=1.0),
name="{}_bn_scale".format(self.name))
bn_bias = lbann.Weights(
initializer=lbann.ConstantInitializer(value=0.0),
name="{}_bn_bias".format(self.name))
self.bn_weights = [bn_scale, bn_bias]
self.instance = 0
def forward(self, x):
self.instance += 1
x = self.conv(x)
x = lbann.BatchNormalization(
x,
weights=self.bn_weights,
statistics_group_size=-1,
name="{}_bn_instance{}".format(
self.name,
self.instance))
if self.activation is not None:
x = self.activation(x)
return x
class Deconvolution3dModule(lm.ConvolutionModule):
"""Basic block for 3D deconvolutional neural networks.
Applies a deconvolution and a nonlinear activation function.
This is a wrapper class for ConvolutionModule.
"""
def __init__(self, *args, **kwargs):
super().__init__(3, transpose=True, *args, **kwargs)
def create_unet3d_data_reader(train_dir, test_dir):
readers = []
for role, shuffle, role_dir in [
("train", True, train_dir),
("test", False, test_dir)]:
if role_dir is None:
continue
readers.append(lbann.reader_pb2.Reader(
name="hdf5",
role=role,
shuffle=shuffle,
data_file_pattern="{}/*.hdf5".format(role_dir),
validation_percent=0,
percent_of_data_to_use=1.0,
scaling_factor_int16=1.0,
hdf5_key_data="volume",
hdf5_key_labels="segmentation",
hdf5_hyperslab_labels=True,
disable_labels=False,
disable_responses=True,
))
return lbann.reader_pb2.DataReader(reader=readers)
def | (learn_rate):
# TODO: This is a temporal optimizer copied from CosomoFlow.
adam = lbann.Adam(
learn_rate=learn_rate,
beta1=0.9,
beta2=0.999,
eps=1e-8)
return adam
if __name__ == '__main__':
desc = ('Construct and run the 3D U-Net on a 3D segmentation dataset.'
'Running the experiment is only supported on LC systems.')
parser = argparse.ArgumentParser(description=desc)
lbann.contrib.args.add_scheduler_arguments(parser)
# General arguments
parser.add_argument(
'--job-name', action='store', default='lbann_unet3d', type=str,
help='scheduler job name (default: lbann_unet3d)')
parser.add_argument(
'--mini-batch-size', action='store', default=1, type=int,
help='mini-batch size (default: 1)', metavar='NUM')
parser.add_argument(
'--num-epochs', action='store', default=5, type=int,
help='number of epochs (default: 100)', metavar='NUM')
# Model specific arguments
parser.add_argument(
'--learning-rate', action='store', default=0.001, type=float,
help='the initial learning rate (default: 0.001)')
parser.add_argument(
'--partition-level', action='store', default=4, type=int,
help='the spatial partition level (default: 4)')
parser.add_argument(
'--depth-groups', action='store', type=int, default=4,
help='the number of processes for the depth dimension (default: 4)')
default_lc_dataset = '/p/gpfs1/brainusr/datasets/LiTS/hdf5_dim128_float'
default_train_dir = '{}/train'.format(default_lc_dataset)
default_test_dir = '{}/test'.format(default_lc_dataset)
parser.add_argument(
'--train-dir', action='store', type=str, default=default_train_dir,
help='the directory of the training dataset (default: \'{}\')'
.format(default_train_dir))
parser.add_argument(
'--test-dir', action='store', type=str, default=default_test_dir,
| create_unet3d_optimizer | identifier_name |
package.rs | Package {
fn contains(&self, component: &str, short_name: Option<&str>) -> bool {
self.components.contains(component)
|| if let Some(n) = short_name {
self.components.contains(n)
} else {
false
}
}
fn install<'a>(
&self,
target: &Components,
name: &str,
short_name: Option<&str>,
tx: Transaction<'a>,
) -> Result<Transaction<'a>> {
let actual_name = if self.components.contains(name) {
name
} else if let Some(n) = short_name {
n
} else {
name
};
let root = self.path.join(actual_name);
let manifest = utils::read_file("package manifest", &root.join("manifest.in"))?;
let mut builder = target.add(name, tx);
for l in manifest.lines() {
let part = ComponentPart::decode(l)
.ok_or_else(|| ErrorKind::CorruptComponent(name.to_owned()))?;
let path = part.1;
let src_path = root.join(&path);
match &*part.0 {
"file" => {
if self.copy {
builder.copy_file(path.clone(), &src_path)?
} else {
builder.move_file(path.clone(), &src_path)?
}
}
"dir" => {
if self.copy {
builder.copy_dir(path.clone(), &src_path)?
} else {
builder.move_dir(path.clone(), &src_path)?
}
}
_ => return Err(ErrorKind::CorruptComponent(name.to_owned()).into()),
}
set_file_perms(&target.prefix().path().join(path), &src_path)?;
}
let tx = builder.finish()?;
Ok(tx)
}
fn components(&self) -> Vec<String> {
self.components.iter().cloned().collect()
}
}
// On Unix we need to set up the file permissions correctly so
// binaries are executable and directories readable. This shouldn't be
// necessary: the source files *should* have the right permissions,
// but due to rust-lang/rust#25479 they don't.
#[cfg(unix)]
fn set_file_perms(dest_path: &Path, src_path: &Path) -> Result<()> {
use std::fs::{self, Metadata};
use std::os::unix::fs::PermissionsExt;
use walkdir::WalkDir;
// Compute whether this entry needs the X bit
fn needs_x(meta: &Metadata) -> bool {
meta.is_dir() || // Directories need it
meta.permissions().mode() & 0o700 == 0o700 // If it is rwx for the user, it gets the X bit
}
// By convention, anything in the bin/ directory of the package is a binary
let is_bin = if let Some(p) = src_path.parent() {
p.ends_with("bin")
} else {
false
};
let is_dir = utils::is_directory(dest_path);
if is_dir {
// Walk the directory setting everything
for entry in WalkDir::new(dest_path) {
let entry = entry.chain_err(|| ErrorKind::ComponentDirPermissionsFailed)?;
let meta = entry
.metadata()
.chain_err(|| ErrorKind::ComponentDirPermissionsFailed)?;
let mut perm = meta.permissions();
perm.set_mode(if needs_x(&meta) { 0o755 } else { 0o644 });
fs::set_permissions(entry.path(), perm)
.chain_err(|| ErrorKind::ComponentFilePermissionsFailed)?;
}
} else {
let meta =
fs::metadata(dest_path).chain_err(|| ErrorKind::ComponentFilePermissionsFailed)?;
let mut perm = meta.permissions();
perm.set_mode(if is_bin || needs_x(&meta) {
0o755
} else {
0o644
});
fs::set_permissions(dest_path, perm)
.chain_err(|| ErrorKind::ComponentFilePermissionsFailed)?;
}
Ok(())
}
#[cfg(windows)]
fn set_file_perms(_dest_path: &Path, _src_path: &Path) -> Result<()> {
Ok(())
}
#[derive(Debug)]
pub struct TarPackage<'a>(DirectoryPackage, temp::Dir<'a>);
impl<'a> TarPackage<'a> {
pub fn new<R: Read>(
stream: R,
temp_cfg: &'a temp::Cfg,
notify_handler: Option<&'a dyn Fn(Notification<'_>)>,
) -> Result<Self> {
let temp_dir = temp_cfg.new_directory()?;
let mut archive = tar::Archive::new(stream);
// The rust-installer packages unpack to a directory called
// $pkgname-$version-$target. Skip that directory when
// unpacking.
unpack_without_first_dir(&mut archive, &*temp_dir, notify_handler)?;
Ok(TarPackage(
DirectoryPackage::new(temp_dir.to_owned(), false)?,
temp_dir,
))
}
}
#[cfg(windows)]
mod unpacker {
use std::sync::atomic::{AtomicUsize, Ordering};
use std::sync::Arc;
use threadpool;
use crate::utils::notifications::Notification;
pub struct Unpacker<'a> {
n_files: Arc<AtomicUsize>,
pool: threadpool::ThreadPool,
notify_handler: Option<&'a dyn Fn(Notification<'_>)>,
}
impl<'a> Unpacker<'a> {
pub fn new(notify_handler: Option<&'a dyn Fn(Notification<'_>)>) -> Self |
pub fn handle(&mut self, unpacked: tar::Unpacked) {
if let tar::Unpacked::File(f) = unpacked {
self.n_files.fetch_add(1, Ordering::Relaxed);
let n_files = self.n_files.clone();
self.pool.execute(move || {
drop(f);
n_files.fetch_sub(1, Ordering::Relaxed);
});
}
}
}
impl<'a> Drop for Unpacker<'a> {
fn drop(&mut self) {
// Some explanation is in order. Even though the tar we are reading from (if
// any) will have had its FileWithProgress download tracking
// completed before we hit drop, that is not true if we are unwinding due to a
// failure, where the logical ownership of the progress bar is
// ambiguous, and as the tracker itself is abstracted out behind
// notifications etc we cannot just query for that. So: we assume no
// more reads of the underlying tar will take place: either the
// error unwinding will stop reads, or we completed; either way, we
// notify finished to the tracker to force a reset to zero; we set
// the units to files, show our progress, and set our units back
// afterwards. The largest archives today - rust docs - have ~20k
// items, and the download tracker's progress is confounded with
// actual handling of data today, we synthesis a data buffer and
// pretend to have bytes to deliver.
self.notify_handler
.map(|handler| handler(Notification::DownloadFinished));
self.notify_handler
.map(|handler| handler(Notification::DownloadPushUnits("handles")));
let mut prev_files = self.n_files.load(Ordering::Relaxed);
self.notify_handler.map(|handler| {
handler(Notification::DownloadContentLengthReceived(
prev_files as u64,
))
});
if prev_files > 50 {
println!("Closing {} deferred file handles", prev_files);
}
let buf: Vec<u8> = vec![0; prev_files];
assert!(32767 > prev_files);
let mut current_files = prev_files;
while current_files != 0 {
use std::thread::sleep;
sleep(std::time::Duration::from_millis(100));
prev_files = current_files;
current_files = self.n_files.load(Ordering::Relaxed);
let step_count = prev_files - current_files;
self.notify_handler.map(|handler| {
handler(Notification::DownloadDataReceived(&buf[0..step_count]))
});
}
self.pool.join();
self.notify_handler
.map(|handler| handler(Notification::DownloadFinished));
self.notify_handler
.map(|handler| handler(Notification::DownloadPopUnits));
}
}
}
#[cfg(not(windows))]
mod unpacker {
use crate::utils::notifications::Notification;
pub struct Unpacker {}
impl Unpacker {
pub fn new<'a>(_notify_handler: Option<&'a dyn Fn(Notification<'_>)>) -> Unpacker {
Unpacker {}
}
pub fn handle(&mut self, _unpacked: tar::Unpacked) {}
}
}
fn unpack_without_first_dir<'a, R: Read>(
| {
// Defaults to hardware thread count threads; this is suitable for
// our needs as IO bound operations tend to show up as write latencies
// rather than close latencies, so we don't need to look at
// more threads to get more IO dispatched at this stage in the process.
let pool = threadpool::Builder::new()
.thread_name("CloseHandle".into())
.build();
Unpacker {
n_files: Arc::new(AtomicUsize::new(0)),
pool: pool,
notify_handler: notify_handler,
}
} | identifier_body |
package.rs | Package {
fn contains(&self, component: &str, short_name: Option<&str>) -> bool {
self.components.contains(component)
|| if let Some(n) = short_name {
self.components.contains(n)
} else {
false
}
}
fn install<'a>(
&self,
target: &Components,
name: &str,
short_name: Option<&str>,
tx: Transaction<'a>,
) -> Result<Transaction<'a>> {
let actual_name = if self.components.contains(name) {
name
} else if let Some(n) = short_name {
n
} else {
name
};
let root = self.path.join(actual_name);
let manifest = utils::read_file("package manifest", &root.join("manifest.in"))?;
let mut builder = target.add(name, tx);
for l in manifest.lines() {
let part = ComponentPart::decode(l)
.ok_or_else(|| ErrorKind::CorruptComponent(name.to_owned()))?;
let path = part.1;
let src_path = root.join(&path);
match &*part.0 {
"file" => {
if self.copy {
builder.copy_file(path.clone(), &src_path)?
} else {
builder.move_file(path.clone(), &src_path)?
}
}
"dir" => {
if self.copy {
builder.copy_dir(path.clone(), &src_path)?
} else {
builder.move_dir(path.clone(), &src_path)?
}
}
_ => return Err(ErrorKind::CorruptComponent(name.to_owned()).into()),
}
set_file_perms(&target.prefix().path().join(path), &src_path)?;
}
let tx = builder.finish()?;
Ok(tx)
}
fn components(&self) -> Vec<String> {
self.components.iter().cloned().collect()
}
}
// On Unix we need to set up the file permissions correctly so
// binaries are executable and directories readable. This shouldn't be
// necessary: the source files *should* have the right permissions,
// but due to rust-lang/rust#25479 they don't.
#[cfg(unix)]
fn set_file_perms(dest_path: &Path, src_path: &Path) -> Result<()> {
use std::fs::{self, Metadata};
use std::os::unix::fs::PermissionsExt;
use walkdir::WalkDir;
// Compute whether this entry needs the X bit
fn needs_x(meta: &Metadata) -> bool {
meta.is_dir() || // Directories need it
meta.permissions().mode() & 0o700 == 0o700 // If it is rwx for the user, it gets the X bit
}
// By convention, anything in the bin/ directory of the package is a binary
let is_bin = if let Some(p) = src_path.parent() {
p.ends_with("bin")
} else {
false
};
let is_dir = utils::is_directory(dest_path);
if is_dir {
// Walk the directory setting everything
for entry in WalkDir::new(dest_path) {
let entry = entry.chain_err(|| ErrorKind::ComponentDirPermissionsFailed)?;
let meta = entry
.metadata()
.chain_err(|| ErrorKind::ComponentDirPermissionsFailed)?;
let mut perm = meta.permissions();
perm.set_mode(if needs_x(&meta) { 0o755 } else { 0o644 });
fs::set_permissions(entry.path(), perm)
.chain_err(|| ErrorKind::ComponentFilePermissionsFailed)?;
}
} else {
let meta =
fs::metadata(dest_path).chain_err(|| ErrorKind::ComponentFilePermissionsFailed)?;
let mut perm = meta.permissions();
perm.set_mode(if is_bin || needs_x(&meta) {
0o755
} else {
0o644
});
fs::set_permissions(dest_path, perm)
.chain_err(|| ErrorKind::ComponentFilePermissionsFailed)?;
}
Ok(())
}
#[cfg(windows)]
fn set_file_perms(_dest_path: &Path, _src_path: &Path) -> Result<()> {
Ok(())
}
#[derive(Debug)]
pub struct TarPackage<'a>(DirectoryPackage, temp::Dir<'a>);
impl<'a> TarPackage<'a> {
pub fn new<R: Read>(
stream: R,
temp_cfg: &'a temp::Cfg,
notify_handler: Option<&'a dyn Fn(Notification<'_>)>,
) -> Result<Self> {
let temp_dir = temp_cfg.new_directory()?;
let mut archive = tar::Archive::new(stream);
// The rust-installer packages unpack to a directory called
// $pkgname-$version-$target. Skip that directory when
// unpacking.
unpack_without_first_dir(&mut archive, &*temp_dir, notify_handler)?;
Ok(TarPackage(
DirectoryPackage::new(temp_dir.to_owned(), false)?,
temp_dir,
))
}
}
#[cfg(windows)]
mod unpacker {
use std::sync::atomic::{AtomicUsize, Ordering};
use std::sync::Arc;
use threadpool;
use crate::utils::notifications::Notification;
pub struct | <'a> {
n_files: Arc<AtomicUsize>,
pool: threadpool::ThreadPool,
notify_handler: Option<&'a dyn Fn(Notification<'_>)>,
}
impl<'a> Unpacker<'a> {
pub fn new(notify_handler: Option<&'a dyn Fn(Notification<'_>)>) -> Self {
// Defaults to hardware thread count threads; this is suitable for
// our needs as IO bound operations tend to show up as write latencies
// rather than close latencies, so we don't need to look at
// more threads to get more IO dispatched at this stage in the process.
let pool = threadpool::Builder::new()
.thread_name("CloseHandle".into())
.build();
Unpacker {
n_files: Arc::new(AtomicUsize::new(0)),
pool: pool,
notify_handler: notify_handler,
}
}
pub fn handle(&mut self, unpacked: tar::Unpacked) {
if let tar::Unpacked::File(f) = unpacked {
self.n_files.fetch_add(1, Ordering::Relaxed);
let n_files = self.n_files.clone();
self.pool.execute(move || {
drop(f);
n_files.fetch_sub(1, Ordering::Relaxed);
});
}
}
}
impl<'a> Drop for Unpacker<'a> {
fn drop(&mut self) {
// Some explanation is in order. Even though the tar we are reading from (if
// any) will have had its FileWithProgress download tracking
// completed before we hit drop, that is not true if we are unwinding due to a
// failure, where the logical ownership of the progress bar is
// ambiguous, and as the tracker itself is abstracted out behind
// notifications etc we cannot just query for that. So: we assume no
// more reads of the underlying tar will take place: either the
// error unwinding will stop reads, or we completed; either way, we
// notify finished to the tracker to force a reset to zero; we set
// the units to files, show our progress, and set our units back
// afterwards. The largest archives today - rust docs - have ~20k
// items, and the download tracker's progress is confounded with
// actual handling of data today, we synthesis a data buffer and
// pretend to have bytes to deliver.
self.notify_handler
.map(|handler| handler(Notification::DownloadFinished));
self.notify_handler
.map(|handler| handler(Notification::DownloadPushUnits("handles")));
let mut prev_files = self.n_files.load(Ordering::Relaxed);
self.notify_handler.map(|handler| {
handler(Notification::DownloadContentLengthReceived(
prev_files as u64,
))
});
if prev_files > 50 {
println!("Closing {} deferred file handles", prev_files);
}
let buf: Vec<u8> = vec![0; prev_files];
assert!(32767 > prev_files);
let mut current_files = prev_files;
while current_files != 0 {
use std::thread::sleep;
sleep(std::time::Duration::from_millis(100));
prev_files = current_files;
current_files = self.n_files.load(Ordering::Relaxed);
let step_count = prev_files - current_files;
self.notify_handler.map(|handler| {
handler(Notification::DownloadDataReceived(&buf[0..step_count]))
});
}
self.pool.join();
self.notify_handler
.map(|handler| handler(Notification::DownloadFinished));
self.notify_handler
.map(|handler| handler(Notification::DownloadPopUnits));
}
}
}
#[cfg(not(windows))]
mod unpacker {
use crate::utils::notifications::Notification;
pub struct Unpacker {}
impl Unpacker {
pub fn new<'a>(_notify_handler: Option<&'a dyn Fn(Notification<'_>)>) -> Unpacker {
Unpacker {}
}
pub fn handle(&mut self, _unpacked: tar::Unpacked) {}
}
}
fn unpack_without_first_dir<'a, R: Read>(
| Unpacker | identifier_name |
package.rs | Package {
fn contains(&self, component: &str, short_name: Option<&str>) -> bool {
self.components.contains(component)
|| if let Some(n) = short_name {
self.components.contains(n)
} else {
false
}
}
fn install<'a>(
&self,
target: &Components,
name: &str,
short_name: Option<&str>,
tx: Transaction<'a>,
) -> Result<Transaction<'a>> {
let actual_name = if self.components.contains(name) {
name
} else if let Some(n) = short_name {
n
} else {
name
};
let root = self.path.join(actual_name);
let manifest = utils::read_file("package manifest", &root.join("manifest.in"))?;
let mut builder = target.add(name, tx);
for l in manifest.lines() {
let part = ComponentPart::decode(l)
.ok_or_else(|| ErrorKind::CorruptComponent(name.to_owned()))?;
let path = part.1;
let src_path = root.join(&path);
match &*part.0 {
"file" => {
if self.copy {
builder.copy_file(path.clone(), &src_path)?
} else {
builder.move_file(path.clone(), &src_path)?
}
}
"dir" => {
if self.copy {
builder.copy_dir(path.clone(), &src_path)?
} else {
builder.move_dir(path.clone(), &src_path)?
}
}
_ => return Err(ErrorKind::CorruptComponent(name.to_owned()).into()),
}
set_file_perms(&target.prefix().path().join(path), &src_path)?;
}
let tx = builder.finish()?;
Ok(tx)
}
fn components(&self) -> Vec<String> {
self.components.iter().cloned().collect()
}
}
// On Unix we need to set up the file permissions correctly so
// binaries are executable and directories readable. This shouldn't be
// necessary: the source files *should* have the right permissions,
// but due to rust-lang/rust#25479 they don't.
#[cfg(unix)]
fn set_file_perms(dest_path: &Path, src_path: &Path) -> Result<()> {
use std::fs::{self, Metadata};
use std::os::unix::fs::PermissionsExt;
use walkdir::WalkDir;
// Compute whether this entry needs the X bit
fn needs_x(meta: &Metadata) -> bool {
meta.is_dir() || // Directories need it
meta.permissions().mode() & 0o700 == 0o700 // If it is rwx for the user, it gets the X bit
}
// By convention, anything in the bin/ directory of the package is a binary
let is_bin = if let Some(p) = src_path.parent() {
p.ends_with("bin")
} else {
false
};
let is_dir = utils::is_directory(dest_path);
if is_dir {
// Walk the directory setting everything
for entry in WalkDir::new(dest_path) {
let entry = entry.chain_err(|| ErrorKind::ComponentDirPermissionsFailed)?;
let meta = entry
.metadata()
.chain_err(|| ErrorKind::ComponentDirPermissionsFailed)?;
let mut perm = meta.permissions();
perm.set_mode(if needs_x(&meta) { 0o755 } else { 0o644 });
fs::set_permissions(entry.path(), perm)
.chain_err(|| ErrorKind::ComponentFilePermissionsFailed)?;
}
} else {
let meta =
fs::metadata(dest_path).chain_err(|| ErrorKind::ComponentFilePermissionsFailed)?;
let mut perm = meta.permissions();
perm.set_mode(if is_bin || needs_x(&meta) {
0o755
} else {
0o644
});
fs::set_permissions(dest_path, perm)
.chain_err(|| ErrorKind::ComponentFilePermissionsFailed)?;
}
Ok(())
}
#[cfg(windows)]
fn set_file_perms(_dest_path: &Path, _src_path: &Path) -> Result<()> {
Ok(())
}
#[derive(Debug)]
pub struct TarPackage<'a>(DirectoryPackage, temp::Dir<'a>);
impl<'a> TarPackage<'a> {
pub fn new<R: Read>(
stream: R,
temp_cfg: &'a temp::Cfg,
notify_handler: Option<&'a dyn Fn(Notification<'_>)>,
) -> Result<Self> {
let temp_dir = temp_cfg.new_directory()?;
let mut archive = tar::Archive::new(stream);
// The rust-installer packages unpack to a directory called
// $pkgname-$version-$target. Skip that directory when
// unpacking.
unpack_without_first_dir(&mut archive, &*temp_dir, notify_handler)?;
Ok(TarPackage(
DirectoryPackage::new(temp_dir.to_owned(), false)?,
temp_dir,
))
}
}
#[cfg(windows)]
mod unpacker {
use std::sync::atomic::{AtomicUsize, Ordering};
use std::sync::Arc;
use threadpool;
use crate::utils::notifications::Notification;
pub struct Unpacker<'a> {
n_files: Arc<AtomicUsize>,
pool: threadpool::ThreadPool,
notify_handler: Option<&'a dyn Fn(Notification<'_>)>,
}
impl<'a> Unpacker<'a> {
pub fn new(notify_handler: Option<&'a dyn Fn(Notification<'_>)>) -> Self {
// Defaults to hardware thread count threads; this is suitable for
// our needs as IO bound operations tend to show up as write latencies
// rather than close latencies, so we don't need to look at
// more threads to get more IO dispatched at this stage in the process.
let pool = threadpool::Builder::new()
.thread_name("CloseHandle".into())
.build();
Unpacker {
n_files: Arc::new(AtomicUsize::new(0)),
pool: pool,
notify_handler: notify_handler,
}
}
pub fn handle(&mut self, unpacked: tar::Unpacked) {
if let tar::Unpacked::File(f) = unpacked {
self.n_files.fetch_add(1, Ordering::Relaxed);
let n_files = self.n_files.clone();
self.pool.execute(move || {
drop(f);
n_files.fetch_sub(1, Ordering::Relaxed);
});
}
}
}
impl<'a> Drop for Unpacker<'a> {
fn drop(&mut self) {
// Some explanation is in order. Even though the tar we are reading from (if
// any) will have had its FileWithProgress download tracking
// completed before we hit drop, that is not true if we are unwinding due to a
// failure, where the logical ownership of the progress bar is
// ambiguous, and as the tracker itself is abstracted out behind
// notifications etc we cannot just query for that. So: we assume no
// more reads of the underlying tar will take place: either the
// error unwinding will stop reads, or we completed; either way, we
// notify finished to the tracker to force a reset to zero; we set
// the units to files, show our progress, and set our units back
// afterwards. The largest archives today - rust docs - have ~20k
// items, and the download tracker's progress is confounded with
// actual handling of data today, we synthesis a data buffer and
// pretend to have bytes to deliver.
self.notify_handler
.map(|handler| handler(Notification::DownloadFinished));
self.notify_handler
.map(|handler| handler(Notification::DownloadPushUnits("handles"))); | handler(Notification::DownloadContentLengthReceived(
prev_files as u64,
))
});
if prev_files > 50 {
println!("Closing {} deferred file handles", prev_files);
}
let buf: Vec<u8> = vec![0; prev_files];
assert!(32767 > prev_files);
let mut current_files = prev_files;
while current_files != 0 {
use std::thread::sleep;
sleep(std::time::Duration::from_millis(100));
prev_files = current_files;
current_files = self.n_files.load(Ordering::Relaxed);
let step_count = prev_files - current_files;
self.notify_handler.map(|handler| {
handler(Notification::DownloadDataReceived(&buf[0..step_count]))
});
}
self.pool.join();
self.notify_handler
.map(|handler| handler(Notification::DownloadFinished));
self.notify_handler
.map(|handler| handler(Notification::DownloadPopUnits));
}
}
}
#[cfg(not(windows))]
mod unpacker {
use crate::utils::notifications::Notification;
pub struct Unpacker {}
impl Unpacker {
pub fn new<'a>(_notify_handler: Option<&'a dyn Fn(Notification<'_>)>) -> Unpacker {
Unpacker {}
}
pub fn handle(&mut self, _unpacked: tar::Unpacked) {}
}
}
fn unpack_without_first_dir<'a, R: Read>(
archive | let mut prev_files = self.n_files.load(Ordering::Relaxed);
self.notify_handler.map(|handler| { | random_line_split |
package.rs | Package {
fn contains(&self, component: &str, short_name: Option<&str>) -> bool {
self.components.contains(component)
|| if let Some(n) = short_name {
self.components.contains(n)
} else {
false
}
}
fn install<'a>(
&self,
target: &Components,
name: &str,
short_name: Option<&str>,
tx: Transaction<'a>,
) -> Result<Transaction<'a>> {
let actual_name = if self.components.contains(name) {
name
} else if let Some(n) = short_name {
n
} else {
name
};
let root = self.path.join(actual_name);
let manifest = utils::read_file("package manifest", &root.join("manifest.in"))?;
let mut builder = target.add(name, tx);
for l in manifest.lines() {
let part = ComponentPart::decode(l)
.ok_or_else(|| ErrorKind::CorruptComponent(name.to_owned()))?;
let path = part.1;
let src_path = root.join(&path);
match &*part.0 {
"file" => {
if self.copy {
builder.copy_file(path.clone(), &src_path)?
} else {
builder.move_file(path.clone(), &src_path)?
}
}
"dir" => {
if self.copy {
builder.copy_dir(path.clone(), &src_path)?
} else {
builder.move_dir(path.clone(), &src_path)?
}
}
_ => return Err(ErrorKind::CorruptComponent(name.to_owned()).into()),
}
set_file_perms(&target.prefix().path().join(path), &src_path)?;
}
let tx = builder.finish()?;
Ok(tx)
}
fn components(&self) -> Vec<String> {
self.components.iter().cloned().collect()
}
}
// On Unix we need to set up the file permissions correctly so
// binaries are executable and directories readable. This shouldn't be
// necessary: the source files *should* have the right permissions,
// but due to rust-lang/rust#25479 they don't.
#[cfg(unix)]
fn set_file_perms(dest_path: &Path, src_path: &Path) -> Result<()> {
use std::fs::{self, Metadata};
use std::os::unix::fs::PermissionsExt;
use walkdir::WalkDir;
// Compute whether this entry needs the X bit
fn needs_x(meta: &Metadata) -> bool {
meta.is_dir() || // Directories need it
meta.permissions().mode() & 0o700 == 0o700 // If it is rwx for the user, it gets the X bit
}
// By convention, anything in the bin/ directory of the package is a binary
let is_bin = if let Some(p) = src_path.parent() {
p.ends_with("bin")
} else {
false
};
let is_dir = utils::is_directory(dest_path);
if is_dir {
// Walk the directory setting everything
for entry in WalkDir::new(dest_path) {
let entry = entry.chain_err(|| ErrorKind::ComponentDirPermissionsFailed)?;
let meta = entry
.metadata()
.chain_err(|| ErrorKind::ComponentDirPermissionsFailed)?;
let mut perm = meta.permissions();
perm.set_mode(if needs_x(&meta) { 0o755 } else { 0o644 });
fs::set_permissions(entry.path(), perm)
.chain_err(|| ErrorKind::ComponentFilePermissionsFailed)?;
}
} else {
let meta =
fs::metadata(dest_path).chain_err(|| ErrorKind::ComponentFilePermissionsFailed)?;
let mut perm = meta.permissions();
perm.set_mode(if is_bin || needs_x(&meta) | else {
0o644
});
fs::set_permissions(dest_path, perm)
.chain_err(|| ErrorKind::ComponentFilePermissionsFailed)?;
}
Ok(())
}
#[cfg(windows)]
fn set_file_perms(_dest_path: &Path, _src_path: &Path) -> Result<()> {
Ok(())
}
#[derive(Debug)]
pub struct TarPackage<'a>(DirectoryPackage, temp::Dir<'a>);
impl<'a> TarPackage<'a> {
pub fn new<R: Read>(
stream: R,
temp_cfg: &'a temp::Cfg,
notify_handler: Option<&'a dyn Fn(Notification<'_>)>,
) -> Result<Self> {
let temp_dir = temp_cfg.new_directory()?;
let mut archive = tar::Archive::new(stream);
// The rust-installer packages unpack to a directory called
// $pkgname-$version-$target. Skip that directory when
// unpacking.
unpack_without_first_dir(&mut archive, &*temp_dir, notify_handler)?;
Ok(TarPackage(
DirectoryPackage::new(temp_dir.to_owned(), false)?,
temp_dir,
))
}
}
#[cfg(windows)]
mod unpacker {
use std::sync::atomic::{AtomicUsize, Ordering};
use std::sync::Arc;
use threadpool;
use crate::utils::notifications::Notification;
pub struct Unpacker<'a> {
n_files: Arc<AtomicUsize>,
pool: threadpool::ThreadPool,
notify_handler: Option<&'a dyn Fn(Notification<'_>)>,
}
impl<'a> Unpacker<'a> {
pub fn new(notify_handler: Option<&'a dyn Fn(Notification<'_>)>) -> Self {
// Defaults to hardware thread count threads; this is suitable for
// our needs as IO bound operations tend to show up as write latencies
// rather than close latencies, so we don't need to look at
// more threads to get more IO dispatched at this stage in the process.
let pool = threadpool::Builder::new()
.thread_name("CloseHandle".into())
.build();
Unpacker {
n_files: Arc::new(AtomicUsize::new(0)),
pool: pool,
notify_handler: notify_handler,
}
}
pub fn handle(&mut self, unpacked: tar::Unpacked) {
if let tar::Unpacked::File(f) = unpacked {
self.n_files.fetch_add(1, Ordering::Relaxed);
let n_files = self.n_files.clone();
self.pool.execute(move || {
drop(f);
n_files.fetch_sub(1, Ordering::Relaxed);
});
}
}
}
impl<'a> Drop for Unpacker<'a> {
fn drop(&mut self) {
// Some explanation is in order. Even though the tar we are reading from (if
// any) will have had its FileWithProgress download tracking
// completed before we hit drop, that is not true if we are unwinding due to a
// failure, where the logical ownership of the progress bar is
// ambiguous, and as the tracker itself is abstracted out behind
// notifications etc we cannot just query for that. So: we assume no
// more reads of the underlying tar will take place: either the
// error unwinding will stop reads, or we completed; either way, we
// notify finished to the tracker to force a reset to zero; we set
// the units to files, show our progress, and set our units back
// afterwards. The largest archives today - rust docs - have ~20k
// items, and the download tracker's progress is confounded with
// actual handling of data today, we synthesis a data buffer and
// pretend to have bytes to deliver.
self.notify_handler
.map(|handler| handler(Notification::DownloadFinished));
self.notify_handler
.map(|handler| handler(Notification::DownloadPushUnits("handles")));
let mut prev_files = self.n_files.load(Ordering::Relaxed);
self.notify_handler.map(|handler| {
handler(Notification::DownloadContentLengthReceived(
prev_files as u64,
))
});
if prev_files > 50 {
println!("Closing {} deferred file handles", prev_files);
}
let buf: Vec<u8> = vec![0; prev_files];
assert!(32767 > prev_files);
let mut current_files = prev_files;
while current_files != 0 {
use std::thread::sleep;
sleep(std::time::Duration::from_millis(100));
prev_files = current_files;
current_files = self.n_files.load(Ordering::Relaxed);
let step_count = prev_files - current_files;
self.notify_handler.map(|handler| {
handler(Notification::DownloadDataReceived(&buf[0..step_count]))
});
}
self.pool.join();
self.notify_handler
.map(|handler| handler(Notification::DownloadFinished));
self.notify_handler
.map(|handler| handler(Notification::DownloadPopUnits));
}
}
}
#[cfg(not(windows))]
mod unpacker {
use crate::utils::notifications::Notification;
pub struct Unpacker {}
impl Unpacker {
pub fn new<'a>(_notify_handler: Option<&'a dyn Fn(Notification<'_>)>) -> Unpacker {
Unpacker {}
}
pub fn handle(&mut self, _unpacked: tar::Unpacked) {}
}
}
fn unpack_without_first_dir<'a, R: Read>(
| {
0o755
} | conditional_block |
app.js | '],
computed: {
isUpgrade : function() {
return this.plan.price > this.active.price;
//this.plan.price
//this.active.price
}
},
methods: {
setActivePlan: function(){
this.active = this.plan;
}
}
}
}
});
new Vue({
el:"#app5",
data: {
tasks: [
{ body: 'Go to the store', completed:false,price:100},
{ body: 'Go to the bank', completed:false,price:100 },
{ body: 'Go to the doctor', completed:true,price:100 }
]
},
components: {
taskcomp: {
template:'#tasks-template',
props:['list'],
computed: {
remaining: function() {
// completedTasks =
// inProgress = !
var vm = this;
// return this.list.filter(function(task){
// return !vm.isCompleted(task);
// }).length;
return this.list.filter(this.isInProgress).length;
}
},
methods: {
setPrice: function(task) {
task.price = 50;
},
isCompleted:function(task) {
return task.completed;
},
isInProgress:function(task) {
return !this.isCompleted(task);
},
deleteTask: function(task) {
this.list.$remove(task);
},
clearCompleted: function(){
this.list = this.list.filter(this.isInProgress);
}
}
}
}
});
Vue.component('tasks',{
template: '#tasks-template',
data: function(){
return {
list: []
};
},
created: function() {
this.fetchTaskList();
// var vm = this;
// $.getJSON('api/tasks', function(tasks){
// console.log(tasks);
// // console.log(data);
// //assign these data to a list
// vm.list = tasks;
// }.bind(this));
},
//with api/tasks, no longer pass props through from our sever side.
// props:['list'],
//when this components is initially created, I want to go ahead and make a AJAX request.
// created() is shorthand
// created() {
// // this.list = JSON.parse(this.list);
// },
//
methods: {
fetchTaskList: function(){
var resource = this.$resource('api/tasks{/id}');
resource.get({}).then((response) => {
this.list = response.data;
});
// resource.update({id:5},{body:'Update task body'}).then((response) =>{
// this.list = response.data;
// });
},
// this.$http.get('api/tasks').then((response) => {
// console.log(response.);
// }, (response) => {
// // error callback
// });
// }
// $.getJSON('api/tasks', function(tasks){
// this.list = tasks;
// }.bind(this));
deleteTask: function(task){
this.list.$remove(task);
},
getTaskID:function(task) {
console.log(task.id);
}
}
});
new Vue({
el:'#app6'
});
Vue.filter('jsonIt',function(value){
return JSON.stringify(value);
});
Vue.filter('role',function(value,role){
return value.filter(function(item) {
return item.role == role;
});
});// people | role 'admin'
Vue.component('message',{
template:'#message-template',
data:function() {
return { message :''};
},
methods: {
storeMessage: function() {
// console.log('Storing ' + this.message);
// $dispatch, parent can listen for that event, which is handled
this.$dispatch('new-message',this.message);
// $broadcast
this.message = '';
}
}
});
// new Vue({
// el:'#app',
// data: {
// people: [
// {name:'Joe',role:'admin'},
// {name:'Susan',role:'admin'},
// {name:'Frank',role:'student'},
// {name:'Jeffrey',role:'admin'},
// ],
// messages: []
// },
// methods:{
// handleNewMessage: function(message) {
// this.messages.push(message);
// // console.log('Parent is handling ' + message);
// }
// },
// // events: {
// // "new-message": function(message) {
// // console.log('Parent is handling ' + message);
// // }
// // },
// components: { Alert },
// ready() {
// // alert('Ready to go!');
// }
// })
//Lesson
// Vue.directive('ajax',{
// //The way of this work is:
// //when you attach this ('ajax') directive to html tag,
// //vue will instantly call this bind() method
// bind: function() {
// },
// //Next, as soon as that finishs, the update() method will
// //instantly be called, and the way they work is : this will
// //repeatly be called, whenever the bind value(value) changes
// update: function(value) {
// },
// //And finally, as you might expect, when the directive is unbind,
// //will trigger this method, where you can remove any of their listeners or same stuffs like that
// unbind: function() {
// }
// });
// // keep it simple
// Vue.directive('ajax',function(value){});
// Vue.http.headers.common['X-CSRF-TOKEN'] = document.querySelector('input[name="_token"]').value;
Vue.directive('ajax',{
params: ['complete'],
bind: function() {
// console.log(this.params);
//First thing we need to do, directive object, which we can fetch.
//In our situation, we need to add a event listener to the form element
//if you using jQuery, you could say: $(this.el).on('---')
//Let's listen for when the form is submitted, and then will trigger a method called "onSubmit"
this.el.addEventListener('submit',this.onSubmit.bind(this));
},
update: function(value) {
// alert('update');
},
//when we trigger this,
onSubmit:function(e) {
//"this"-->will no longer refer to which is bund in the bind() method (this.el.---),
//"this" now refer to the form that will be submitted
//so if we wanna say: no, within the context of this method, we still wanna this to refer
//to it did before, JavaScript did it in a very weird stuffs: just make sure we bind the obeject:
//.bind(this) in the above method
//
//We prevent the default action, so we do not submit the form, and instead we use
//view resource to submit (this.vm in the following)
e.preventDefault();
// Vue.http.post
//
// this.vm refers to the viewmodel (new Vue({el:"#app7"}))
// Quick note on using the array syntax here:
// a lot people don't know this with javaScript, in this example, we could say:
// vm.$http.post or your can format it with vm.$http['post']
// this options is very good when you referencing a variable like [requestType],
// because obviously when you use the dot-synatax: vm.$http.requestType that's will not going to work
//this.el.action = 'tasks/1'
this.vm
.$http[this.getRequestType()](this.el.action)
.then(this.onComplete.bind(this))
//change response in app/Exceptions/Handler.php
//.success or .fail in jQuery
.catch(this.onError.bind(this));
},
onComplete:function () {
if(this.params.complete) {
alert(this.params.complete);
}
},
onError: function(response) {
alert(response.data.message); //flash message in real life
},
//We need to figure out what the RequestType should be for this form.
getRequestType: function() {
//see the console, and find out the name of the hidden input of DELETE
var method = this.el.querySelector('input[name="_method"]');
//if not, we just grab the method attribute of the form
//that will be a 'DELETE'->'delete' or 'POST'->'post'
return (method ? method.value : this.el.method).toLowerCase();
}
});
new Vue({
el:'#app7',
http: {
headers: {
'X-CSRF-TOKEN': document.querySelector('input[name="_token"]').value
}
}
});
var min = Vue.extend({
template: '#clock-min'
});
var hour = Vue.extend({
template: '#clock-hour'
});
var sec = Vue.extend({
template: '#clock-sec'
});
var time = Vue.extend({
template: '#clock-time',
props: ['myMessage']
});
Vue.component('clock', {
template: '#clock-template',
components: {
'min-component': min,
'hour-component': hour,
'sec-component': sec,
'time-component': time
},
data () {
return { time: "00:00:00" }
},
ready () | {
this.startTime()
} | identifier_body | |
app.js | plan-template',
props:['plan','active'],
computed: {
isUpgrade : function() {
return this.plan.price > this.active.price;
//this.plan.price
//this.active.price
}
},
methods: {
setActivePlan: function(){
this.active = this.plan;
}
}
}
}
});
new Vue({
el:"#app5",
data: {
tasks: [
{ body: 'Go to the store', completed:false,price:100},
{ body: 'Go to the bank', completed:false,price:100 },
{ body: 'Go to the doctor', completed:true,price:100 }
]
},
components: {
taskcomp: {
template:'#tasks-template',
props:['list'],
computed: {
remaining: function() {
// completedTasks =
// inProgress = !
var vm = this;
// return this.list.filter(function(task){
// return !vm.isCompleted(task);
// }).length;
return this.list.filter(this.isInProgress).length;
}
},
methods: {
setPrice: function(task) {
task.price = 50;
},
isCompleted:function(task) {
return task.completed;
},
isInProgress:function(task) {
return !this.isCompleted(task);
},
deleteTask: function(task) {
this.list.$remove(task);
},
clearCompleted: function(){
this.list = this.list.filter(this.isInProgress);
}
}
}
}
});
Vue.component('tasks',{
template: '#tasks-template',
data: function(){
return {
list: []
};
},
created: function() {
this.fetchTaskList();
// var vm = this;
// $.getJSON('api/tasks', function(tasks){
// console.log(tasks);
// // console.log(data);
// //assign these data to a list
// vm.list = tasks;
// }.bind(this));
},
//with api/tasks, no longer pass props through from our sever side.
// props:['list'],
//when this components is initially created, I want to go ahead and make a AJAX request.
// created() is shorthand
// created() {
// // this.list = JSON.parse(this.list);
// },
//
methods: {
fetchTaskList: function(){
var resource = this.$resource('api/tasks{/id}');
resource.get({}).then((response) => {
this.list = response.data;
});
// resource.update({id:5},{body:'Update task body'}).then((response) =>{
// this.list = response.data;
// });
},
// this.$http.get('api/tasks').then((response) => {
// console.log(response.);
// }, (response) => {
// // error callback
// });
// }
// $.getJSON('api/tasks', function(tasks){
// this.list = tasks;
// }.bind(this));
deleteTask: function(task){
this.list.$remove(task);
},
getTaskID:function(task) {
console.log(task.id);
}
}
});
new Vue({
el:'#app6'
});
Vue.filter('jsonIt',function(value){
return JSON.stringify(value);
});
Vue.filter('role',function(value,role){
return value.filter(function(item) {
return item.role == role;
});
});// people | role 'admin'
Vue.component('message',{
template:'#message-template',
data:function() {
return { message :''};
},
methods: {
storeMessage: function() {
// console.log('Storing ' + this.message);
// $dispatch, parent can listen for that event, which is handled
this.$dispatch('new-message',this.message);
// $broadcast
this.message = '';
}
}
});
// new Vue({
// el:'#app',
// data: {
// people: [
// {name:'Joe',role:'admin'},
// {name:'Susan',role:'admin'},
// {name:'Frank',role:'student'},
// {name:'Jeffrey',role:'admin'},
// ],
// messages: []
// },
// methods:{
// handleNewMessage: function(message) {
// this.messages.push(message);
// // console.log('Parent is handling ' + message);
// }
// },
// // events: {
// // "new-message": function(message) {
// // console.log('Parent is handling ' + message);
// // }
// // },
// components: { Alert },
// ready() {
// // alert('Ready to go!');
// }
// })
//Lesson
// Vue.directive('ajax',{
// //The way of this work is:
// //when you attach this ('ajax') directive to html tag,
// //vue will instantly call this bind() method
// bind: function() {
// },
// //Next, as soon as that finishs, the update() method will
// //instantly be called, and the way they work is : this will
// //repeatly be called, whenever the bind value(value) changes
// update: function(value) {
// },
// //And finally, as you might expect, when the directive is unbind,
// //will trigger this method, where you can remove any of their listeners or same stuffs like that
// unbind: function() {
// }
// });
// // keep it simple
// Vue.directive('ajax',function(value){});
// Vue.http.headers.common['X-CSRF-TOKEN'] = document.querySelector('input[name="_token"]').value;
Vue.directive('ajax',{
params: ['complete'],
bind: function() {
// console.log(this.params);
//First thing we need to do, directive object, which we can fetch.
//In our situation, we need to add a event listener to the form element
//if you using jQuery, you could say: $(this.el).on('---')
//Let's listen for when the form is submitted, and then will trigger a method called "onSubmit"
this.el.addEventListener('submit',this.onSubmit.bind(this));
},
update: function(value) {
// alert('update');
},
//when we trigger this,
onSubmit:function(e) {
//"this"-->will no longer refer to which is bund in the bind() method (this.el.---),
//"this" now refer to the form that will be submitted
//so if we wanna say: no, within the context of this method, we still wanna this to refer
//to it did before, JavaScript did it in a very weird stuffs: just make sure we bind the obeject:
//.bind(this) in the above method
//
//We prevent the default action, so we do not submit the form, and instead we use
//view resource to submit (this.vm in the following)
e.preventDefault();
// Vue.http.post
//
// this.vm refers to the viewmodel (new Vue({el:"#app7"}))
// Quick note on using the array syntax here:
// a lot people don't know this with javaScript, in this example, we could say:
// vm.$http.post or your can format it with vm.$http['post']
// this options is very good when you referencing a variable like [requestType],
// because obviously when you use the dot-synatax: vm.$http.requestType that's will not going to work
//this.el.action = 'tasks/1'
this.vm
.$http[this.getRequestType()](this.el.action)
.then(this.onComplete.bind(this))
//change response in app/Exceptions/Handler.php
//.success or .fail in jQuery
.catch(this.onError.bind(this));
},
onComplete:function () {
if(this.params.complete) {
alert(this.params.complete);
}
},
onError: function(response) {
alert(response.data.message); //flash message in real life
},
//We need to figure out what the RequestType should be for this form.
getRequestType: function() {
//see the console, and find out the name of the hidden input of DELETE
var method = this.el.querySelector('input[name="_method"]');
//if not, we just grab the method attribute of the form
//that will be a 'DELETE'->'delete' or 'POST'->'post'
return (method ? method.value : this.el.method).toLowerCase();
}
});
new Vue({
el:'#app7',
http: {
headers: {
'X-CSRF-TOKEN': document.querySelector('input[name="_token"]').value
}
}
});
var min = Vue.extend({
template: '#clock-min'
});
var hour = Vue.extend({
template: '#clock-hour'
});
var sec = Vue.extend({
template: '#clock-sec'
});
var time = Vue.extend({
template: '#clock-time',
props: ['myMessage']
});
Vue.component('clock', {
template: '#clock-template',
components: {
'min-component': min,
'hour-component': hour,
'sec-component': sec,
'time-component': time
},
data () {
return { time: "00:00:00" }
},
| ready | identifier_name | |
app.js | }
});
Vue.component('counter',{
template:'#counter-template',
props:['subject'],
data:function() {
return {
count:0
};
}
});
new Vue({
el: '#app3',
data: {
points: 50,
first: 'Xiajun',
last: 'Yan',
fullname: 'Xiajun Yan',
},
computed: {
skill: function() {
if (this.points <= 50) {
return 'B';
}
return 'A';
},
fullname: function() {
return this.first + ' ' + this.last;
},
},
watch:{
first:function(first){
this.fullname = first + ' ' + this.last;
},
last:function(last){
this.fullname = this.first + ' ' + last;
},
},
methods: {
handleIt: function() {
alert('Handled');
},
minusCount: function() {
this.count -= 1;
}
},
components: {
counter: {
template: '#counter-template',
props: ['subject'],
data: function() {
return {
count: 0
};
}
}
}
});
new Vue({
el: '#app',
data: {
newTodo: '',
todos: [{
text: 'Add some todos'
}]
},
methods: {
addTodo: function() {
var text = this.newTodo.trim()
if (text) {
this.todos.push({
text: text
})
this.newTodo = ''
}
},
removeTodo: function(index) {
this.todos.splice(index, 1)
}
}
});
new Vue({
el:'#app4',
data: {
// store a list of plans
plans:[
{ name:'Enterprise', price:100 },
{ name:'Pro', price:50 },
{ name:'Personal', price:10 },
{ name:'Free', price:0 },
],
active: {}
},
components: {
plan: {
// template: '#plan-template',
props:['plan','active'],
computed: {
isUpgrade : function() {
return this.plan.price > this.active.price;
//this.plan.price
//this.active.price
}
},
methods: {
setActivePlan: function(){
this.active = this.plan;
}
}
}
}
});
new Vue({
el:"#app5",
data: {
tasks: [
{ body: 'Go to the store', completed:false,price:100},
{ body: 'Go to the bank', completed:false,price:100 },
{ body: 'Go to the doctor', completed:true,price:100 }
]
},
components: {
taskcomp: {
template:'#tasks-template',
props:['list'],
computed: {
remaining: function() {
// completedTasks =
// inProgress = !
var vm = this;
// return this.list.filter(function(task){
// return !vm.isCompleted(task);
// }).length;
return this.list.filter(this.isInProgress).length;
}
},
methods: {
setPrice: function(task) {
task.price = 50;
},
isCompleted:function(task) {
return task.completed;
}, | deleteTask: function(task) {
this.list.$remove(task);
},
clearCompleted: function(){
this.list = this.list.filter(this.isInProgress);
}
}
}
}
});
Vue.component('tasks',{
template: '#tasks-template',
data: function(){
return {
list: []
};
},
created: function() {
this.fetchTaskList();
// var vm = this;
// $.getJSON('api/tasks', function(tasks){
// console.log(tasks);
// // console.log(data);
// //assign these data to a list
// vm.list = tasks;
// }.bind(this));
},
//with api/tasks, no longer pass props through from our sever side.
// props:['list'],
//when this components is initially created, I want to go ahead and make a AJAX request.
// created() is shorthand
// created() {
// // this.list = JSON.parse(this.list);
// },
//
methods: {
fetchTaskList: function(){
var resource = this.$resource('api/tasks{/id}');
resource.get({}).then((response) => {
this.list = response.data;
});
// resource.update({id:5},{body:'Update task body'}).then((response) =>{
// this.list = response.data;
// });
},
// this.$http.get('api/tasks').then((response) => {
// console.log(response.);
// }, (response) => {
// // error callback
// });
// }
// $.getJSON('api/tasks', function(tasks){
// this.list = tasks;
// }.bind(this));
deleteTask: function(task){
this.list.$remove(task);
},
getTaskID:function(task) {
console.log(task.id);
}
}
});
new Vue({
el:'#app6'
});
Vue.filter('jsonIt',function(value){
return JSON.stringify(value);
});
Vue.filter('role',function(value,role){
return value.filter(function(item) {
return item.role == role;
});
});// people | role 'admin'
Vue.component('message',{
template:'#message-template',
data:function() {
return { message :''};
},
methods: {
storeMessage: function() {
// console.log('Storing ' + this.message);
// $dispatch, parent can listen for that event, which is handled
this.$dispatch('new-message',this.message);
// $broadcast
this.message = '';
}
}
});
// new Vue({
// el:'#app',
// data: {
// people: [
// {name:'Joe',role:'admin'},
// {name:'Susan',role:'admin'},
// {name:'Frank',role:'student'},
// {name:'Jeffrey',role:'admin'},
// ],
// messages: []
// },
// methods:{
// handleNewMessage: function(message) {
// this.messages.push(message);
// // console.log('Parent is handling ' + message);
// }
// },
// // events: {
// // "new-message": function(message) {
// // console.log('Parent is handling ' + message);
// // }
// // },
// components: { Alert },
// ready() {
// // alert('Ready to go!');
// }
// })
//Lesson
// Vue.directive('ajax',{
// //The way of this work is:
// //when you attach this ('ajax') directive to html tag,
// //vue will instantly call this bind() method
// bind: function() {
// },
// //Next, as soon as that finishs, the update() method will
// //instantly be called, and the way they work is : this will
// //repeatly be called, whenever the bind value(value) changes
// update: function(value) {
// },
// //And finally, as you might expect, when the directive is unbind,
// //will trigger this method, where you can remove any of their listeners or same stuffs like that
// unbind: function() {
// }
// });
// // keep it simple
// Vue.directive('ajax',function(value){});
// Vue.http.headers.common['X-CSRF-TOKEN'] = document.querySelector('input[name="_token"]').value;
Vue.directive('ajax',{
params: ['complete'],
bind: function() {
// console.log(this.params);
//First thing we need to do, directive object, which we can fetch.
//In our situation, we need to add a event listener to the form element
//if you using jQuery, you could say: $(this.el).on('---')
//Let's listen for when the form is submitted, and then will trigger a method called "onSubmit"
this.el.addEventListener('submit',this.onSubmit.bind(this));
},
update: function(value) {
// alert('update');
},
//when we trigger this,
onSubmit:function(e) {
//"this"-->will no longer refer to which is bund in the bind() method (this.el.---),
//"this" now refer to the form that will be submitted
//so if we wanna say: no, within the context of this method, we still wanna this to refer
//to it did before, JavaScript did it in a very weird stuffs: just make sure we bind the obeject:
//.bind(this) in the above method
//
//We prevent the default action, so we do not submit the form, and instead we use
//view resource to submit (this.vm in the following)
e.preventDefault();
// Vue.http.post
//
// this.vm refers to the viewmodel (new Vue({el:"#app7"}))
// Quick note on using the array syntax here:
// a lot people don't know this with javaScript, in this example, we could | isInProgress:function(task) {
return !this.isCompleted(task);
},
| random_line_split |
app.js | }
});
Vue.component('counter',{
template:'#counter-template',
props:['subject'],
data:function() {
return {
count:0
};
}
});
new Vue({
el: '#app3',
data: {
points: 50,
first: 'Xiajun',
last: 'Yan',
fullname: 'Xiajun Yan',
},
computed: {
skill: function() {
if (this.points <= 50) |
return 'A';
},
fullname: function() {
return this.first + ' ' + this.last;
},
},
watch:{
first:function(first){
this.fullname = first + ' ' + this.last;
},
last:function(last){
this.fullname = this.first + ' ' + last;
},
},
methods: {
handleIt: function() {
alert('Handled');
},
minusCount: function() {
this.count -= 1;
}
},
components: {
counter: {
template: '#counter-template',
props: ['subject'],
data: function() {
return {
count: 0
};
}
}
}
});
new Vue({
el: '#app',
data: {
newTodo: '',
todos: [{
text: 'Add some todos'
}]
},
methods: {
addTodo: function() {
var text = this.newTodo.trim()
if (text) {
this.todos.push({
text: text
})
this.newTodo = ''
}
},
removeTodo: function(index) {
this.todos.splice(index, 1)
}
}
});
new Vue({
el:'#app4',
data: {
// store a list of plans
plans:[
{ name:'Enterprise', price:100 },
{ name:'Pro', price:50 },
{ name:'Personal', price:10 },
{ name:'Free', price:0 },
],
active: {}
},
components: {
plan: {
// template: '#plan-template',
props:['plan','active'],
computed: {
isUpgrade : function() {
return this.plan.price > this.active.price;
//this.plan.price
//this.active.price
}
},
methods: {
setActivePlan: function(){
this.active = this.plan;
}
}
}
}
});
new Vue({
el:"#app5",
data: {
tasks: [
{ body: 'Go to the store', completed:false,price:100},
{ body: 'Go to the bank', completed:false,price:100 },
{ body: 'Go to the doctor', completed:true,price:100 }
]
},
components: {
taskcomp: {
template:'#tasks-template',
props:['list'],
computed: {
remaining: function() {
// completedTasks =
// inProgress = !
var vm = this;
// return this.list.filter(function(task){
// return !vm.isCompleted(task);
// }).length;
return this.list.filter(this.isInProgress).length;
}
},
methods: {
setPrice: function(task) {
task.price = 50;
},
isCompleted:function(task) {
return task.completed;
},
isInProgress:function(task) {
return !this.isCompleted(task);
},
deleteTask: function(task) {
this.list.$remove(task);
},
clearCompleted: function(){
this.list = this.list.filter(this.isInProgress);
}
}
}
}
});
Vue.component('tasks',{
template: '#tasks-template',
data: function(){
return {
list: []
};
},
created: function() {
this.fetchTaskList();
// var vm = this;
// $.getJSON('api/tasks', function(tasks){
// console.log(tasks);
// // console.log(data);
// //assign these data to a list
// vm.list = tasks;
// }.bind(this));
},
//with api/tasks, no longer pass props through from our sever side.
// props:['list'],
//when this components is initially created, I want to go ahead and make a AJAX request.
// created() is shorthand
// created() {
// // this.list = JSON.parse(this.list);
// },
//
methods: {
fetchTaskList: function(){
var resource = this.$resource('api/tasks{/id}');
resource.get({}).then((response) => {
this.list = response.data;
});
// resource.update({id:5},{body:'Update task body'}).then((response) =>{
// this.list = response.data;
// });
},
// this.$http.get('api/tasks').then((response) => {
// console.log(response.);
// }, (response) => {
// // error callback
// });
// }
// $.getJSON('api/tasks', function(tasks){
// this.list = tasks;
// }.bind(this));
deleteTask: function(task){
this.list.$remove(task);
},
getTaskID:function(task) {
console.log(task.id);
}
}
});
new Vue({
el:'#app6'
});
Vue.filter('jsonIt',function(value){
return JSON.stringify(value);
});
Vue.filter('role',function(value,role){
return value.filter(function(item) {
return item.role == role;
});
});// people | role 'admin'
Vue.component('message',{
template:'#message-template',
data:function() {
return { message :''};
},
methods: {
storeMessage: function() {
// console.log('Storing ' + this.message);
// $dispatch, parent can listen for that event, which is handled
this.$dispatch('new-message',this.message);
// $broadcast
this.message = '';
}
}
});
// new Vue({
// el:'#app',
// data: {
// people: [
// {name:'Joe',role:'admin'},
// {name:'Susan',role:'admin'},
// {name:'Frank',role:'student'},
// {name:'Jeffrey',role:'admin'},
// ],
// messages: []
// },
// methods:{
// handleNewMessage: function(message) {
// this.messages.push(message);
// // console.log('Parent is handling ' + message);
// }
// },
// // events: {
// // "new-message": function(message) {
// // console.log('Parent is handling ' + message);
// // }
// // },
// components: { Alert },
// ready() {
// // alert('Ready to go!');
// }
// })
//Lesson
// Vue.directive('ajax',{
// //The way of this work is:
// //when you attach this ('ajax') directive to html tag,
// //vue will instantly call this bind() method
// bind: function() {
// },
// //Next, as soon as that finishs, the update() method will
// //instantly be called, and the way they work is : this will
// //repeatly be called, whenever the bind value(value) changes
// update: function(value) {
// },
// //And finally, as you might expect, when the directive is unbind,
// //will trigger this method, where you can remove any of their listeners or same stuffs like that
// unbind: function() {
// }
// });
// // keep it simple
// Vue.directive('ajax',function(value){});
// Vue.http.headers.common['X-CSRF-TOKEN'] = document.querySelector('input[name="_token"]').value;
Vue.directive('ajax',{
params: ['complete'],
bind: function() {
// console.log(this.params);
//First thing we need to do, directive object, which we can fetch.
//In our situation, we need to add a event listener to the form element
//if you using jQuery, you could say: $(this.el).on('---')
//Let's listen for when the form is submitted, and then will trigger a method called "onSubmit"
this.el.addEventListener('submit',this.onSubmit.bind(this));
},
update: function(value) {
// alert('update');
},
//when we trigger this,
onSubmit:function(e) {
//"this"-->will no longer refer to which is bund in the bind() method (this.el.---),
//"this" now refer to the form that will be submitted
//so if we wanna say: no, within the context of this method, we still wanna this to refer
//to it did before, JavaScript did it in a very weird stuffs: just make sure we bind the obeject:
//.bind(this) in the above method
//
//We prevent the default action, so we do not submit the form, and instead we use
//view resource to submit (this.vm in the following)
e.preventDefault();
// Vue.http.post
//
// this.vm refers to the viewmodel (new Vue({el:"#app7"}))
// Quick note on using the array syntax here:
// a lot people don't know this with javaScript, in this example, | {
return 'B';
} | conditional_block |
analyzeMessageLogsRev3.py | read_file(filename)
# add quick and dirty fix for new Issue Reports (Aug 2019)
tempRaw = commands[-1]['raw_value']
lastRaw = tempRaw.replace('\nstatus:','')
commands[-1]['raw_value'] = lastRaw
# add more stuff and return as a DataFrame
df = generate_table(commands, radio_on_time)
# set up a few reportable values here from df, time is in UTC
first_command = df.iloc[0]['time']
last_command = df.iloc[-1]['time']
send_receive_commands = df.groupby(['type']).size()
number_of_messages = len(df)
thisPerson, thisFinish, thisAntenna = parse_info_from_filename(thisFile)
thisFinish2 = 'Success' # default is 'Success'
if thisFinish == 'WIP':
thisFinish2 = 'WIP' # pod is still running
lastDate = last_command.date()
# Process df to generate the podState associated with every message
# Updates to states occur with pod message (mostly 1d) status
# (the state for extended_bolus_active is NOT included (always False))
# Includes values for requested bolus and TB
# Note that .iloc for df and podState are identical
podState, emptyMessageList, faultProcessedMsg = getPodState(df)
# From the podState, extract some values to use in reports
msgLogHrs = podState.iloc[-1]['timeCumSec']/3600
radioOnHrs = podState.iloc[-1]['radioOnCumSec']/3600
numberOfAssignID = len(podState[podState.message_type=='0x7'])
numberOfSetUpPod = len(podState[podState.message_type=='0x3'])
numberOfNonceResync = len(podState[podState.message_type=='06'])
insulinDelivered = podState.iloc[-1]['insulinDelivered']
sourceString = 'from last 0x1d'
# special handling if an 0x02 messages aka fault was received
if len(faultProcessedMsg):
hasFault = True
thisFault = faultProcessedMsg['logged_fault']
checkInsulin = faultProcessedMsg['insulinDelivered']
rawFault = faultProcessedMsg['raw_value']
if checkInsulin >= insulinDelivered:
insulinDelivered = checkInsulin
sourceString = 'from 0x02 msg'
else:
hasFault = False
rawFault = 'n/a'
thisFault = thisFinish
# checkAction returns actionFrame with indices and times for every action
# completed actions and incomplete requests are separate columns
# see also function getActionDict
# actionFrame dataframe of processed analysis from podState (by action)
# initIdx indices in podState to extract pod initilization
actionFrame, initIdx = checkAction(podState)
if outFile == 2:
# print a few things then returns
lot = podDict['lot']
tid = podDict['tid']
piv = podDict['piVersion']
print(f'{thisPerson},{thisAntenna},{thisFault},{first_command},{last_command},{msgLogHrs},{lot},{tid},{piv}')
actionSummary = []
return df, podState, actionFrame, actionSummary
if True:
# print out summary information to command window
# need this True to get the actionSummary used to fill csv file
print('\n First command in Log :', first_command)
print(' Last command in Log :', last_command)
print(' Lot and TID :', podDict['lot'], podDict['tid'])
print('__________________________________________\n')
print(' Summary for {:s} with {:s} ending'.format(thisFile, thisFinish))
print(' Pod Lot: {:s}, PI: {:s}, PM: {:s}'.format(podDict['lot'], podDict['piVersion'], podDict['pmVersion']))
print(' Total elapsed time in log (hrs) : {:6.1f}'.format(msgLogHrs))
print(' Radio on estimate : {:6.1f}, {:5.1f}%'.format(radioOnHrs, 100*radioOnHrs/msgLogHrs))
print(' Number of messages : {:6d}'.format(number_of_messages))
print(' Number of nonce resyncs : {:6d}'.format(numberOfNonceResync))
print(' Insulin delivered (u) : {:6.2f} ({:s})'.format(insulinDelivered, sourceString))
if hasFault:
thisFinish = thisFault
thisFinish2 = 'Fault'
if thisFault == '0x1C':
print(' An 0x0202 message of {:s} reported - 80 hour time limit'.format(thisFault))
thisFinish2 = 'Success'
elif thisFault == '0x18':
print(' An 0x0202 message of {:s} reported - out of insulin'.format(thisFault))
thisFinish2 = 'Success'
elif thisFault == '0x34':
print(' An 0x0202 message of {:s} reported - this wipes out registers'.format(thisFault))
else:
print(' An 0x0202 message of {:s} reported - details later'.format(thisFault))
print('\n Pod was initialized with {:d} messages, {:d} AssignID, {:d} SetUpPod required'.format(len(initIdx), \
numberOfAssignID, numberOfSetUpPod))
if emptyMessageList:
print(' *** Detected {:d} empty message(s) during life of the pod'.format(len(emptyMessageList)))
print(' *** indices:', emptyMessageList)
# process the action frame (returns a dictionary plus total completed message count)
actionSummary, totalCompletedMessages = processActionFrame(actionFrame, podState)
printActionSummary(actionSummary)
percentCompleted = 100*totalCompletedMessages/number_of_messages
print(' #Messages in completed actions : {:5d} : {:.1f}%'.format( \
totalCompletedMessages, percentCompleted))
if hasFault:
print('\nFault Details')
printDict(faultProcessedMsg)
# if an output filename is provided - write statistics to it (csv format)
if outFile:
# check if file exists
isItThere = os.path.isfile(outFile)
# now open the file
stream_out = open(outFile,mode='at')
# write the column headers if this is a new file
if not isItThere:
# set up a table format order
headerString = 'Who, finish State, Finish2, lastMsg Date, podOn (hrs), radioOn (hrs), radioOn (%), ' + \
'#Messages, #Completed, % Completed, #Send, #Recv, ' + \
'#Nonce Resync, #TB, #Bolus, ' \
'#Basal, #Status Check, ' + \
'#Schedule Before TempBasal, #TB Spaced <30s, ' + \
'#Repeat TB Value, #Repeat TB <30s, ' + \
' #RepTB 30s to 19min, #incomplete TB, ' + \
'insulin Delivered, # Initialize Cmds, # AssignID (0x07), ' + \
'# SetUpPod (0x03), Pod Lot, PI Version, PM Version, ' + \
'raw fault, filename'
stream_out.write(headerString)
stream_out.write('\n')
# Extract items from actionSummary
if actionSummary.get('TB'):
subDict = actionSummary.get('TB')
numberOfTB = subDict['countCompleted']
numberScheduleBeforeTempBasal = subDict['numSchBasalbeforeTB']
numberTBSepLessThan30sec = subDict['numShortTB']
numRepeatedTB = subDict['numRepeatedTB']
numRepeatedShortTB = subDict['numRepeatedShortTB']
numrepeated19MinTB = subDict['numrepeated19MinTB']
else:
numberOfTB = 0
numberScheduleBeforeTempBasal = 0
numberTBSepLessThan30sec = 0
numRepeatedTB = 0
if actionSummary.get('Bolus'):
subDict = actionSummary.get('Bolus') | if actionSummary.get('Basal'):
subDict = actionSummary.get('Basal')
numberOfBasal = subDict['countCompleted']
else:
numberOfBasal = 0
if actionSummary.get('StatusCheck'):
subDict = actionSummary.get('StatusCheck')
numberOfStatusRequests = subDict['countCompleted']
else:
numberOfStatusRequests = 0
if actionSummary.get('CancelTB'):
subDict = actionSummary.get('CancelTB')
numIncomplCancelTB = subDict['countIncomplete']
else:
numIncomplCancelTB = 0
# write out | numberOfBolus = subDict['countCompleted']
else:
numberOfBolus = 0
| random_line_split |
analyzeMessageLogsRev3.py | _file(filename)
# add quick and dirty fix for new Issue Reports (Aug 2019)
tempRaw = commands[-1]['raw_value']
lastRaw = tempRaw.replace('\nstatus:','')
commands[-1]['raw_value'] = lastRaw
# add more stuff and return as a DataFrame
df = generate_table(commands, radio_on_time)
# set up a few reportable values here from df, time is in UTC
first_command = df.iloc[0]['time']
last_command = df.iloc[-1]['time']
send_receive_commands = df.groupby(['type']).size()
number_of_messages = len(df)
thisPerson, thisFinish, thisAntenna = parse_info_from_filename(thisFile)
thisFinish2 = 'Success' # default is 'Success'
if thisFinish == 'WIP':
thisFinish2 = 'WIP' # pod is still running
lastDate = last_command.date()
# Process df to generate the podState associated with every message
# Updates to states occur with pod message (mostly 1d) status
# (the state for extended_bolus_active is NOT included (always False))
# Includes values for requested bolus and TB
# Note that .iloc for df and podState are identical
podState, emptyMessageList, faultProcessedMsg = getPodState(df)
# From the podState, extract some values to use in reports
msgLogHrs = podState.iloc[-1]['timeCumSec']/3600
radioOnHrs = podState.iloc[-1]['radioOnCumSec']/3600
numberOfAssignID = len(podState[podState.message_type=='0x7'])
numberOfSetUpPod = len(podState[podState.message_type=='0x3'])
numberOfNonceResync = len(podState[podState.message_type=='06'])
insulinDelivered = podState.iloc[-1]['insulinDelivered']
sourceString = 'from last 0x1d'
# special handling if an 0x02 messages aka fault was received
if len(faultProcessedMsg):
hasFault = True
thisFault = faultProcessedMsg['logged_fault']
checkInsulin = faultProcessedMsg['insulinDelivered']
rawFault = faultProcessedMsg['raw_value']
if checkInsulin >= insulinDelivered:
insulinDelivered = checkInsulin
sourceString = 'from 0x02 msg'
else:
hasFault = False
rawFault = 'n/a'
thisFault = thisFinish
# checkAction returns actionFrame with indices and times for every action
# completed actions and incomplete requests are separate columns
# see also function getActionDict
# actionFrame dataframe of processed analysis from podState (by action)
# initIdx indices in podState to extract pod initilization
actionFrame, initIdx = checkAction(podState)
if outFile == 2:
# print a few things then returns
lot = podDict['lot']
tid = podDict['tid']
piv = podDict['piVersion']
print(f'{thisPerson},{thisAntenna},{thisFault},{first_command},{last_command},{msgLogHrs},{lot},{tid},{piv}')
actionSummary = []
return df, podState, actionFrame, actionSummary
if True:
# print out summary information to command window
# need this True to get the actionSummary used to fill csv file
print('\n First command in Log :', first_command)
print(' Last command in Log :', last_command)
print(' Lot and TID :', podDict['lot'], podDict['tid'])
print('__________________________________________\n')
print(' Summary for {:s} with {:s} ending'.format(thisFile, thisFinish))
print(' Pod Lot: {:s}, PI: {:s}, PM: {:s}'.format(podDict['lot'], podDict['piVersion'], podDict['pmVersion']))
print(' Total elapsed time in log (hrs) : {:6.1f}'.format(msgLogHrs))
print(' Radio on estimate : {:6.1f}, {:5.1f}%'.format(radioOnHrs, 100*radioOnHrs/msgLogHrs))
print(' Number of messages : {:6d}'.format(number_of_messages))
print(' Number of nonce resyncs : {:6d}'.format(numberOfNonceResync))
print(' Insulin delivered (u) : {:6.2f} ({:s})'.format(insulinDelivered, sourceString))
if hasFault:
thisFinish = thisFault
thisFinish2 = 'Fault'
if thisFault == '0x1C':
print(' An 0x0202 message of {:s} reported - 80 hour time limit'.format(thisFault))
thisFinish2 = 'Success'
elif thisFault == '0x18':
print(' An 0x0202 message of {:s} reported - out of insulin'.format(thisFault))
thisFinish2 = 'Success'
elif thisFault == '0x34':
print(' An 0x0202 message of {:s} reported - this wipes out registers'.format(thisFault))
else:
print(' An 0x0202 message of {:s} reported - details later'.format(thisFault))
print('\n Pod was initialized with {:d} messages, {:d} AssignID, {:d} SetUpPod required'.format(len(initIdx), \
numberOfAssignID, numberOfSetUpPod))
if emptyMessageList:
print(' *** Detected {:d} empty message(s) during life of the pod'.format(len(emptyMessageList)))
print(' *** indices:', emptyMessageList)
# process the action frame (returns a dictionary plus total completed message count)
actionSummary, totalCompletedMessages = processActionFrame(actionFrame, podState)
printActionSummary(actionSummary)
percentCompleted = 100*totalCompletedMessages/number_of_messages
print(' #Messages in completed actions : {:5d} : {:.1f}%'.format( \
totalCompletedMessages, percentCompleted))
if hasFault:
print('\nFault Details')
printDict(faultProcessedMsg)
# if an output filename is provided - write statistics to it (csv format)
if outFile:
# check if file exists
isItThere = os.path.isfile(outFile)
# now open the file
stream_out = open(outFile,mode='at')
# write the column headers if this is a new file
if not isItThere:
# set up a table format order
|
# Extract items from actionSummary
if actionSummary.get('TB'):
subDict = actionSummary.get('TB')
numberOfTB = subDict['countCompleted']
numberScheduleBeforeTempBasal = subDict['numSchBasalbeforeTB']
numberTBSepLessThan30sec = subDict['numShortTB']
numRepeatedTB = subDict['numRepeatedTB']
numRepeatedShortTB = subDict['numRepeatedShortTB']
numrepeated19MinTB = subDict['numrepeated19MinTB']
else:
numberOfTB = 0
numberScheduleBeforeTempBasal = 0
numberTBSepLessThan30sec = 0
numRepeatedTB = 0
if actionSummary.get('Bolus'):
subDict = actionSummary.get('Bolus')
numberOfBolus = subDict['countCompleted']
else:
numberOfBolus = 0
if actionSummary.get('Basal'):
subDict = actionSummary.get('Basal')
numberOfBasal = subDict['countCompleted']
else:
numberOfBasal = 0
if actionSummary.get('StatusCheck'):
subDict = actionSummary.get('StatusCheck')
numberOfStatusRequests = subDict['countCompleted']
else:
numberOfStatusRequests = 0
if actionSummary.get('CancelTB'):
subDict = actionSummary.get('CancelTB')
numIncomplCancelTB = subDict['countIncomplete']
else:
numIncomplCancelTB = 0
# write out | headerString = 'Who, finish State, Finish2, lastMsg Date, podOn (hrs), radioOn (hrs), radioOn (%), ' + \
'#Messages, #Completed, % Completed, #Send, #Recv, ' + \
'#Nonce Resync, #TB, #Bolus, ' \
'#Basal, #Status Check, ' + \
'#Schedule Before TempBasal, #TB Spaced <30s, ' + \
'#Repeat TB Value, #Repeat TB <30s, ' + \
' #RepTB 30s to 19min, #incomplete TB, ' + \
'insulin Delivered, # Initialize Cmds, # AssignID (0x07), ' + \
'# SetUpPod (0x03), Pod Lot, PI Version, PM Version, ' + \
'raw fault, filename'
stream_out.write(headerString)
stream_out.write('\n') | conditional_block |
analyzeMessageLogsRev3.py | (thisPath, thisFile, outFile):
# Rev3 uses the new checkAction code
# this replaces code used by New (rev2)
# deprecated: getPodSuccessfulActions
# deprecated: basal_analysis code (assumed perfect message order)
# This is time (sec) radio on Pod stays awake once comm is initiated
radio_on_time = 30
filename = thisPath + '/' + thisFile
# read the MessageLogs from the file
commands, podDict = read_file(filename)
# add quick and dirty fix for new Issue Reports (Aug 2019)
tempRaw = commands[-1]['raw_value']
lastRaw = tempRaw.replace('\nstatus:','')
commands[-1]['raw_value'] = lastRaw
# add more stuff and return as a DataFrame
df = generate_table(commands, radio_on_time)
# set up a few reportable values here from df, time is in UTC
first_command = df.iloc[0]['time']
last_command = df.iloc[-1]['time']
send_receive_commands = df.groupby(['type']).size()
number_of_messages = len(df)
thisPerson, thisFinish, thisAntenna = parse_info_from_filename(thisFile)
thisFinish2 = 'Success' # default is 'Success'
if thisFinish == 'WIP':
thisFinish2 = 'WIP' # pod is still running
lastDate = last_command.date()
# Process df to generate the podState associated with every message
# Updates to states occur with pod message (mostly 1d) status
# (the state for extended_bolus_active is NOT included (always False))
# Includes values for requested bolus and TB
# Note that .iloc for df and podState are identical
podState, emptyMessageList, faultProcessedMsg = getPodState(df)
# From the podState, extract some values to use in reports
msgLogHrs = podState.iloc[-1]['timeCumSec']/3600
radioOnHrs = podState.iloc[-1]['radioOnCumSec']/3600
numberOfAssignID = len(podState[podState.message_type=='0x7'])
numberOfSetUpPod = len(podState[podState.message_type=='0x3'])
numberOfNonceResync = len(podState[podState.message_type=='06'])
insulinDelivered = podState.iloc[-1]['insulinDelivered']
sourceString = 'from last 0x1d'
# special handling if an 0x02 messages aka fault was received
if len(faultProcessedMsg):
hasFault = True
thisFault = faultProcessedMsg['logged_fault']
checkInsulin = faultProcessedMsg['insulinDelivered']
rawFault = faultProcessedMsg['raw_value']
if checkInsulin >= insulinDelivered:
insulinDelivered = checkInsulin
sourceString = 'from 0x02 msg'
else:
hasFault = False
rawFault = 'n/a'
thisFault = thisFinish
# checkAction returns actionFrame with indices and times for every action
# completed actions and incomplete requests are separate columns
# see also function getActionDict
# actionFrame dataframe of processed analysis from podState (by action)
# initIdx indices in podState to extract pod initilization
actionFrame, initIdx = checkAction(podState)
if outFile == 2:
# print a few things then returns
lot = podDict['lot']
tid = podDict['tid']
piv = podDict['piVersion']
print(f'{thisPerson},{thisAntenna},{thisFault},{first_command},{last_command},{msgLogHrs},{lot},{tid},{piv}')
actionSummary = []
return df, podState, actionFrame, actionSummary
if True:
# print out summary information to command window
# need this True to get the actionSummary used to fill csv file
print('\n First command in Log :', first_command)
print(' Last command in Log :', last_command)
print(' Lot and TID :', podDict['lot'], podDict['tid'])
print('__________________________________________\n')
print(' Summary for {:s} with {:s} ending'.format(thisFile, thisFinish))
print(' Pod Lot: {:s}, PI: {:s}, PM: {:s}'.format(podDict['lot'], podDict['piVersion'], podDict['pmVersion']))
print(' Total elapsed time in log (hrs) : {:6.1f}'.format(msgLogHrs))
print(' Radio on estimate : {:6.1f}, {:5.1f}%'.format(radioOnHrs, 100*radioOnHrs/msgLogHrs))
print(' Number of messages : {:6d}'.format(number_of_messages))
print(' Number of nonce resyncs : {:6d}'.format(numberOfNonceResync))
print(' Insulin delivered (u) : {:6.2f} ({:s})'.format(insulinDelivered, sourceString))
if hasFault:
thisFinish = thisFault
thisFinish2 = 'Fault'
if thisFault == '0x1C':
print(' An 0x0202 message of {:s} reported - 80 hour time limit'.format(thisFault))
thisFinish2 = 'Success'
elif thisFault == '0x18':
print(' An 0x0202 message of {:s} reported - out of insulin'.format(thisFault))
thisFinish2 = 'Success'
elif thisFault == '0x34':
print(' An 0x0202 message of {:s} reported - this wipes out registers'.format(thisFault))
else:
print(' An 0x0202 message of {:s} reported - details later'.format(thisFault))
print('\n Pod was initialized with {:d} messages, {:d} AssignID, {:d} SetUpPod required'.format(len(initIdx), \
numberOfAssignID, numberOfSetUpPod))
if emptyMessageList:
print(' *** Detected {:d} empty message(s) during life of the pod'.format(len(emptyMessageList)))
print(' *** indices:', emptyMessageList)
# process the action frame (returns a dictionary plus total completed message count)
actionSummary, totalCompletedMessages = processActionFrame(actionFrame, podState)
printActionSummary(actionSummary)
percentCompleted = 100*totalCompletedMessages/number_of_messages
print(' #Messages in completed actions : {:5d} : {:.1f}%'.format( \
totalCompletedMessages, percentCompleted))
if hasFault:
print('\nFault Details')
printDict(faultProcessedMsg)
# if an output filename is provided - write statistics to it (csv format)
if outFile:
# check if file exists
isItThere = os.path.isfile(outFile)
# now open the file
stream_out = open(outFile,mode='at')
# write the column headers if this is a new file
if not isItThere:
# set up a table format order
headerString = 'Who, finish State, Finish2, lastMsg Date, podOn (hrs), radioOn (hrs), radioOn (%), ' + \
'#Messages, #Completed, % Completed, #Send, #Recv, ' + \
'#Nonce Resync, #TB, #Bolus, ' \
'#Basal, #Status Check, ' + \
'#Schedule Before TempBasal, #TB Spaced <30s, ' + \
'#Repeat TB Value, #Repeat TB <30s, ' + \
' #RepTB 30s to 19min, #incomplete TB, ' + \
'insulin Delivered, # Initialize Cmds, # AssignID (0x07), ' + \
'# SetUpPod (0x03), Pod Lot, PI Version, PM Version, ' + \
'raw fault, filename'
stream_out.write(headerString)
stream_out.write('\n')
# Extract items from actionSummary
if actionSummary.get('TB'):
subDict = actionSummary.get('TB')
numberOfTB = subDict['countCompleted']
numberScheduleBeforeTempBasal = subDict['numSchBasalbeforeTB']
numberTBSepLessThan30sec = subDict['numShortTB']
numRepeatedTB = subDict['numRepeatedTB']
numRepeatedShortTB = subDict['numRepeatedShortTB']
numrepeated19MinTB = subDict['numrepeated19MinTB']
else:
numberOfTB = 0
numberScheduleBeforeTempBasal = 0
numberTBSepLessThan30sec = 0
numRepeatedTB = 0
if actionSummary.get('Bolus'):
subDict = actionSummary.get('Bolus')
numberOfBolus = subDict['countCompleted']
else:
numberOfBolus = 0
if actionSummary.get('Basal'):
subDict = actionSummary.get('Basal | analyzeMessageLogsRev3 | identifier_name | |
analyzeMessageLogsRev3.py | thisPerson, thisFinish, thisAntenna = parse_info_from_filename(thisFile)
thisFinish2 = 'Success' # default is 'Success'
if thisFinish == 'WIP':
thisFinish2 = 'WIP' # pod is still running
lastDate = last_command.date()
# Process df to generate the podState associated with every message
# Updates to states occur with pod message (mostly 1d) status
# (the state for extended_bolus_active is NOT included (always False))
# Includes values for requested bolus and TB
# Note that .iloc for df and podState are identical
podState, emptyMessageList, faultProcessedMsg = getPodState(df)
# From the podState, extract some values to use in reports
msgLogHrs = podState.iloc[-1]['timeCumSec']/3600
radioOnHrs = podState.iloc[-1]['radioOnCumSec']/3600
numberOfAssignID = len(podState[podState.message_type=='0x7'])
numberOfSetUpPod = len(podState[podState.message_type=='0x3'])
numberOfNonceResync = len(podState[podState.message_type=='06'])
insulinDelivered = podState.iloc[-1]['insulinDelivered']
sourceString = 'from last 0x1d'
# special handling if an 0x02 messages aka fault was received
if len(faultProcessedMsg):
hasFault = True
thisFault = faultProcessedMsg['logged_fault']
checkInsulin = faultProcessedMsg['insulinDelivered']
rawFault = faultProcessedMsg['raw_value']
if checkInsulin >= insulinDelivered:
insulinDelivered = checkInsulin
sourceString = 'from 0x02 msg'
else:
hasFault = False
rawFault = 'n/a'
thisFault = thisFinish
# checkAction returns actionFrame with indices and times for every action
# completed actions and incomplete requests are separate columns
# see also function getActionDict
# actionFrame dataframe of processed analysis from podState (by action)
# initIdx indices in podState to extract pod initilization
actionFrame, initIdx = checkAction(podState)
if outFile == 2:
# print a few things then returns
lot = podDict['lot']
tid = podDict['tid']
piv = podDict['piVersion']
print(f'{thisPerson},{thisAntenna},{thisFault},{first_command},{last_command},{msgLogHrs},{lot},{tid},{piv}')
actionSummary = []
return df, podState, actionFrame, actionSummary
if True:
# print out summary information to command window
# need this True to get the actionSummary used to fill csv file
print('\n First command in Log :', first_command)
print(' Last command in Log :', last_command)
print(' Lot and TID :', podDict['lot'], podDict['tid'])
print('__________________________________________\n')
print(' Summary for {:s} with {:s} ending'.format(thisFile, thisFinish))
print(' Pod Lot: {:s}, PI: {:s}, PM: {:s}'.format(podDict['lot'], podDict['piVersion'], podDict['pmVersion']))
print(' Total elapsed time in log (hrs) : {:6.1f}'.format(msgLogHrs))
print(' Radio on estimate : {:6.1f}, {:5.1f}%'.format(radioOnHrs, 100*radioOnHrs/msgLogHrs))
print(' Number of messages : {:6d}'.format(number_of_messages))
print(' Number of nonce resyncs : {:6d}'.format(numberOfNonceResync))
print(' Insulin delivered (u) : {:6.2f} ({:s})'.format(insulinDelivered, sourceString))
if hasFault:
thisFinish = thisFault
thisFinish2 = 'Fault'
if thisFault == '0x1C':
print(' An 0x0202 message of {:s} reported - 80 hour time limit'.format(thisFault))
thisFinish2 = 'Success'
elif thisFault == '0x18':
print(' An 0x0202 message of {:s} reported - out of insulin'.format(thisFault))
thisFinish2 = 'Success'
elif thisFault == '0x34':
print(' An 0x0202 message of {:s} reported - this wipes out registers'.format(thisFault))
else:
print(' An 0x0202 message of {:s} reported - details later'.format(thisFault))
print('\n Pod was initialized with {:d} messages, {:d} AssignID, {:d} SetUpPod required'.format(len(initIdx), \
numberOfAssignID, numberOfSetUpPod))
if emptyMessageList:
print(' *** Detected {:d} empty message(s) during life of the pod'.format(len(emptyMessageList)))
print(' *** indices:', emptyMessageList)
# process the action frame (returns a dictionary plus total completed message count)
actionSummary, totalCompletedMessages = processActionFrame(actionFrame, podState)
printActionSummary(actionSummary)
percentCompleted = 100*totalCompletedMessages/number_of_messages
print(' #Messages in completed actions : {:5d} : {:.1f}%'.format( \
totalCompletedMessages, percentCompleted))
if hasFault:
print('\nFault Details')
printDict(faultProcessedMsg)
# if an output filename is provided - write statistics to it (csv format)
if outFile:
# check if file exists
isItThere = os.path.isfile(outFile)
# now open the file
stream_out = open(outFile,mode='at')
# write the column headers if this is a new file
if not isItThere:
# set up a table format order
headerString = 'Who, finish State, Finish2, lastMsg Date, podOn (hrs), radioOn (hrs), radioOn (%), ' + \
'#Messages, #Completed, % Completed, #Send, #Recv, ' + \
'#Nonce Resync, #TB, #Bolus, ' \
'#Basal, #Status Check, ' + \
'#Schedule Before TempBasal, #TB Spaced <30s, ' + \
'#Repeat TB Value, #Repeat TB <30s, ' + \
' #RepTB 30s to 19min, #incomplete TB, ' + \
'insulin Delivered, # Initialize Cmds, # AssignID (0x07), ' + \
'# SetUpPod (0x03), Pod Lot, PI Version, PM Version, ' + \
'raw fault, filename'
stream_out.write(headerString)
stream_out.write('\n')
# Extract items from actionSummary
if actionSummary.get('TB'):
subDict = actionSummary.get('TB')
numberOfTB = subDict['countCompleted']
numberScheduleBeforeTempBasal = subDict['numSchBasalbeforeTB']
numberTBSepLessThan30sec = subDict['numShortTB']
numRepeatedTB = subDict['numRepeatedTB']
numRepeatedShortTB = subDict['numRepeatedShortTB']
numrepeated19MinTB = subDict['numrepeated19MinTB']
else:
numberOfTB = 0
numberScheduleBeforeTempBasal = 0
numberTBSepLessThan30sec = 0
numRepeatedTB = 0
if actionSummary.get('Bolus'):
subDict = actionSummary.get('Bolus')
numberOfBolus = subDict['countCompleted']
else:
numberOfBolus = 0
if actionSummary.get('Basal'):
subDict = actionSummary.get('Basal')
numberOfBasal = subDict['countCompleted']
else:
numberOfBasal = 0
if actionSummary.get('StatusCheck'):
subDict = actionSummary.get('StatusCheck')
numberOfStatusRequests = subDict['countCompleted']
else:
numberOfStatusRequests = 0
if actionSummary.get('CancelTB'):
subDict = actionSummary.get | radio_on_time = 30
filename = thisPath + '/' + thisFile
# read the MessageLogs from the file
commands, podDict = read_file(filename)
# add quick and dirty fix for new Issue Reports (Aug 2019)
tempRaw = commands[-1]['raw_value']
lastRaw = tempRaw.replace('\nstatus:','')
commands[-1]['raw_value'] = lastRaw
# add more stuff and return as a DataFrame
df = generate_table(commands, radio_on_time)
# set up a few reportable values here from df, time is in UTC
first_command = df.iloc[0]['time']
last_command = df.iloc[-1]['time']
send_receive_commands = df.groupby(['type']).size()
number_of_messages = len(df) | identifier_body | |
cockpit.js | .Pages = new Meteor.Pagination("Project",{
// perPage: 2,
// itemTemplate: "cockpit_table",
// //templateName: 'Project',
// //itemTemplate: 'cockpit'
// //sort: {
// // title: 1
// //},
// //filters: {
// // count: {
// // $gt: 10
// // }
// //},
// //availableSettings: {
// // perPage: true,
// // sort: true
// //}
//});
//let that_pro;
//Meteor.subscribe('cockpitTable');
//Meteor.subscribe('userTable');
Template.cockpit.onDestroyed(function () {
_changeProject2.stop();
pro_id = null;
//that_pro=null;
});
Template.cockpit.rendered = function () {
$('#data_1').datepicker({
todayBtn: "linked",
keyboardNavigation: false,
forceParse: false,
calendarWeeks: true,
autoclose: true
});
$('#data_2').datepicker({
todayBtn: "linked",
keyboardNavigation: false,
forceParse: false,
calendarWeeks: true,
autoclose: true
});
//初始化
//_changeProject2=Meteor.subscribe('project',loguser,function(){
// that_pro=Project.find({}).fetch();
//});
//日期
//$('.data_1 .input-group.date').datepicker({
// todayBtn: "linked",
// keyboardNavigation: false,
// forceParse: false,
// calendarWeeks: true,
// autoclose: true
//});
//$('.M-box1').pagination({
// totalData:100,
// showData:1,
// coping:true,
// callback:function(api){
// console.log(api.getCurrent());
// $('.now').text(api.getCurrent());
// }
//});
//分页
//this.Pages = new Meteor.Pagination("Project",{
// perPage: 2,
// itemTemplate: "cockpit_table",
// templateName: 'Project',
// //itemTemplate: 'cockpit'
// //sort: {
// // title: 1
// //},
// //filters: {
// // count: {
// // $gt: 10
// // }
// //},
// //availableSettings: {
// // perPage: true,
// // sort: true
// //}
//});
};
Template.cockpit.onCreated(function () {
/*
* find() 返回值是一个游标。游标是一种从动数据源
*输出内容,可以对游标使用 fetch() 来把游标转换成数组
* */
//
//var userPhone=$.cookie('user_phone');
//var userType=$.cookie('user_type');
//得到登录用户的id
//var _loguserId=FlowRouter.getParam('_id');
//console.log(_loguserId);
loguser = sessionStorage.getItem('loguser');
//Session.get('loguser2');
console.log(loguser);
if (!loguser) {
FlowRouter.go('/login');
}
//console.log(loguser);
templ=this;
//订阅数据
$('#mengban').show();
_changeProject2 = this.subscribe('project', loguser, function () {
$('#mengban').hide();
var totle = Project.find().count();
console.log(totle);
$('.M-box1').pagination({
totalData:totle,
showData:limit,
coping:true,
callback:function(api){
console.log(api.getCurrent());
pagenum=api.getCurrent();
console.log(pagenum);
templ.nowpageData.set(api.getCurrent());
}
});
});
//this.subscribe('cockpitTable_user',userPhone,userType);
this.subscribe('allusers');
//this.subscribe('dictionaries');
//单条项目
var _data = Project.find({}).fetch();
//ReactiveDict本地变量
this.editorData = new ReactiveVar(_data);
//当前页码
this.nowpageData = new ReactiveVar();
//Meteor.call('getnum',_data,function(){
//
//});
//页码本地变量
//this.pages = new ReactiveVar();
});
Template.cockpit.helpers({
//项目集合
cockpitTable: function () {
var page = Template.instance().nowpageData.get();
var bendiPT = Project.find({},{skip:(page-1)*limit,limit:limit}).fetch();
//Template.instance().searchData.set(that_pro);
//var bendiPT=Template.instance().searchData.get();
for (var i = 0; i < bendiPT.length; i++) {
bendiPT[i].ordinal = i + 1;
//Meteor.call('proProgress',bendiPT[i]._id,function(error,res){
// bendiPT[i].progress=res['result'];
//});
if (bendiPT[i].supervisionEngineer) {
bendiPT[i].supervisionEngineer = Users.find({ '_id': bendiPT[i].supervisionEngineer }).fetch()[0].username;
}
if (bendiPT[i].backup == 0) {
bendiPT[i].backup = '无';
} else {
bendiPT[i].backup = Dictionaries.find({ "ecode": "backUp", 'value': bendiPT[i].backup }).fetch()[0].name;
}
if (bendiPT[i].weekly) {
bendiPT[i].weekly ='开启';
}else{
bendiPT[i].weekly ='关闭';
}
if (bendiPT[i].monthly) {
bendiPT[i].monthly ='开启';
}else{
bendiPT[i].monthly ='关闭';
}
}
return bendiPT;
},
//cockpitTable: function() {
// //return CockpitTable.find();
// var bendiCT=Project.find().fetch();
// for(var i=0;i<bendiCT.length;i++){
// bendiCT[i].ordinal=i+1;
// }
// return bendiCT;
//},
//单条项目集合
editorTable: function () {
var _data = Template.instance().editorData.get();
return _data;
},
//周报是否勾选
isweekly: function (a) {
var pro=Project.find({_id:a}).fetch();
if(pro[0]){
if(pro[0].weekly==0)
{
return false;
}else{
return true;
}
}
},
//月报是否勾选
ismonthly: function (a) {
var pro=Project.find({_id:a}).fetch();
if(pro[0]){
if(pro[0].monthly==0)
{
return false;
}else{
return true;
}
}
},
//用户表中监理工程师集合
userTableJLG: function () {
return Users.find({ "type": 2, 'state': 1 });
},
//工程师选中判断
engSelect: function (a) {
//console.log(a);
var supervisionEngineer = Template.instance().editorData.get()[0].supervisionEngineer;
var engname = Users.find({ '_id': supervisionEngineer }).fetch()[0].username;
if (a == engname) {
return true;
} else {
return false;
}
},
//字典表中备份方案集合
backUp: function () {
return Dictionaries.find({ "ecode": "backUp" });
},
//备份方案选中方法
backUpSelect: function (a) {
var backup = Template.instance().editorData.get()[0].backup;
if (a == backup) {
return true;
} else {
return false;
}
},
//验收判断
accState: function (a) {
//0是已验收,1是未验收
if (a == 1) {
return false;
} else if (a == 0) {
return true;
}
},
//是否显示操作判断
isHandle: function () {
var loguser = sessionStorage.getItem('loguser');
//var _loguserId=FlowRouter.getParam('_id');
//var loguserType=Users.find({'_id':_loguserId}).fetch()[0].type;
var user = Users.findOne({ '_id': loguser });
if (user) {
var loguserType = user.type;
//if(loguserType==1 || loguserType==0){
if (loguserType == 1 || loguserType == 0) {
return true;
} else {
return false;
}
}
return false;
}
});
Template.cockpit.onRendered(function () {
//日期
//$('.data_1 .input-group.date').datepicker({
// todayBtn: "linked",
// | (!val) {
shObj.show();
} else {
shObj.hide();
}
}
//this | identifier_body | |
cockpit.js | var totle = Project.find().count();
console.log(totle);
$('.M-box1').pagination({
totalData:totle,
showData:limit,
coping:true,
callback:function(api){
console.log(api.getCurrent());
pagenum=api.getCurrent();
console.log(pagenum);
templ.nowpageData.set(api.getCurrent());
}
});
});
//this.subscribe('cockpitTable_user',userPhone,userType);
this.subscribe('allusers');
//this.subscribe('dictionaries');
//单条项目
var _data = Project.find({}).fetch();
//ReactiveDict本地变量
this.editorData = new ReactiveVar(_data);
//当前页码
this.nowpageData = new ReactiveVar();
//Meteor.call('getnum',_data,function(){
//
//});
//页码本地变量
//this.pages = new ReactiveVar();
});
Template.cockpit.helpers({
//项目集合
cockpitTable: function () {
var page = Template.instance().nowpageData.get();
var bendiPT = Project.find({},{skip:(page-1)*limit,limit:limit}).fetch();
//Template.instance().searchData.set(that_pro);
//var bendiPT=Template.instance().searchData.get();
for (var i = 0; i < bendiPT.length; i++) {
bendiPT[i].ordinal = i + 1;
//Meteor.call('proProgress',bendiPT[i]._id,function(error,res){
// bendiPT[i].progress=res['result'];
//});
if (bendiPT[i].supervisionEngineer) {
bendiPT[i].supervisionEngineer = Users.find({ '_id': bendiPT[i].supervisionEngineer }).fetch()[0].username;
}
if (bendiPT[i].backup == 0) {
bendiPT[i].backup = '无';
} else {
bendiPT[i].backup = Dictionaries.find({ "ecode": "backUp", 'value': bendiPT[i].backup }).fetch()[0].name;
}
if (bendiPT[i].weekly) {
bendiPT[i].weekly ='开启';
}else{
bendiPT[i].weekly ='关闭';
}
if (bendiPT[i].monthly) {
bendiPT[i].monthly ='开启';
}else{
bendiPT[i].monthly ='关闭';
}
}
return bendiPT;
},
//cockpitTable: function() {
// //return CockpitTable.find();
// var bendiCT=Project.find().fetch();
// for(var i=0;i<bendiCT.length;i++){
// bendiCT[i].ordinal=i+1; | // return bendiCT;
//},
//单条项目集合
editorTable: function () {
var _data = Template.instance().editorData.get();
return _data;
},
//周报是否勾选
isweekly: function (a) {
var pro=Project.find({_id:a}).fetch();
if(pro[0]){
if(pro[0].weekly==0)
{
return false;
}else{
return true;
}
}
},
//月报是否勾选
ismonthly: function (a) {
var pro=Project.find({_id:a}).fetch();
if(pro[0]){
if(pro[0].monthly==0)
{
return false;
}else{
return true;
}
}
},
//用户表中监理工程师集合
userTableJLG: function () {
return Users.find({ "type": 2, 'state': 1 });
},
//工程师选中判断
engSelect: function (a) {
//console.log(a);
var supervisionEngineer = Template.instance().editorData.get()[0].supervisionEngineer;
var engname = Users.find({ '_id': supervisionEngineer }).fetch()[0].username;
if (a == engname) {
return true;
} else {
return false;
}
},
//字典表中备份方案集合
backUp: function () {
return Dictionaries.find({ "ecode": "backUp" });
},
//备份方案选中方法
backUpSelect: function (a) {
var backup = Template.instance().editorData.get()[0].backup;
if (a == backup) {
return true;
} else {
return false;
}
},
//验收判断
accState: function (a) {
//0是已验收,1是未验收
if (a == 1) {
return false;
} else if (a == 0) {
return true;
}
},
//是否显示操作判断
isHandle: function () {
var loguser = sessionStorage.getItem('loguser');
//var _loguserId=FlowRouter.getParam('_id');
//var loguserType=Users.find({'_id':_loguserId}).fetch()[0].type;
var user = Users.findOne({ '_id': loguser });
if (user) {
var loguserType = user.type;
//if(loguserType==1 || loguserType==0){
if (loguserType == 1 || loguserType == 0) {
return true;
} else {
return false;
}
}
return false;
}
});
Template.cockpit.onRendered(function () {
//日期
//$('.data_1 .input-group.date').datepicker({
// todayBtn: "linked",
// keyboardNavigation: false,
// forceParse: false,
// calendarWeeks: true,
// autoclose: true
//});
//$('.modal').appendTo("body");
//var proSum=CockpitTable.find().count();
//console.log(1111);
});
Template.cockpit.events({
//'click .add': function(e) {
// //alert(1);
// e.preventDefault();
// var addPro=$('#myModalAddpro');
// //var proname=addpro.find('.modal-body').find('input:first-child').val();
// //const target = e.target;
// //const text = target.text.value;
// //console.log(text);
// console.log(pro_id);
// CockpitTable.insert({'number':10,'proName':'11'});
// //var proNumber=CockpitTable.find({}).sort({number:-1}).limit(1);
// //CockpitTable.find({}, { sort: { number: -1 } }).limit(1);
//},
//验收项目
"click .acc": function (e) {
e.preventDefault();
pro_id = this._id;
var state = this.state;
if (state == 0) {
state = 1;
//Project.update({_id:pro_id},{ $set : { "state" : state,"accTime" : null} });
} else if (state == 1) {
state = 0;
//var timestamp = ((Date.parse(new Date()))/ 1000).toString();
//Project.update({_id:pro_id},{ $set : { "state" : state,"accTime" : timestamp} });
}
$('#mengban').show();
Meteor.call('accPro', pro_id, state, function (error, res) {
$('#mengban').hide();
});
//function base64_decode(base64str, file) {
// // create buffer object from base64 encoded string, it is important to tell the constructor that the string is base64 encoded
// var bitmap = new Buffer(base64str, 'base64');
// // write buffer to file
// fs.writeFileSync(file, bitmap);
// console.log('******** File created from base64 encoded string ********');
//}
//
//base64_decode('iVBORw0KGgoAAAANSUhEUgAAADQAAAAlCAYAAAAN8srVAAACTUlEQVR42u3Wv2sTcRiA8VPBxUKwEAxU3NxPIoFAl1bIkkmwYKAKRbqbRSWQCGJ+rMUibjo4FARBl0AgUIh/QXFxFIpKJHAQKA56r0/hDbyEK5VrDH2hBx+ud+Ga9+G+uSQQkVOv0+lMZNBFHoFRwABZb0F9CCITVdRjQd9b0CoOTNSGiRkidBWkljGGINb9CCECd0FqE7GJqkxeMxccK8UbJzppUPGIO5SfR9DCjINsTIR1RDbKXvAakuB9yqAsvuLaDIN6Jqag5/IaIxjYCxaxDzFGyKUMegdBb4ZBGfQmMUaIXeSmLyhDjHspl9wdiPHgJEGlUumf2UGml96HlJ+hRQwhRoSleQfZ | // } | random_line_split |
cockpit.js | totle = Project.find().count();
console.log(totle);
$('.M-box1').pagination({
totalData:totle,
showData:limit,
coping:true,
callback:function(api){
console.log(api.getCurrent());
pagenum=api.getCurrent();
console.log(pagenum);
templ.nowpageData.set(api.getCurrent());
}
});
});
//this.subscribe('cockpitTable_user',userPhone,userType);
this.subscribe('allusers');
//this.subscribe('dictionaries');
//单条项目
var _data = Project.find({}).fetch();
//ReactiveDict本地变量
this.editorData = new ReactiveVar(_data);
//当前页码
this.nowpageData = new ReactiveVar();
//Meteor.call('getnum',_data,function(){
//
//});
//页码本地变量
//this.pages = new ReactiveVar();
});
Template.cockpit.helpers({
//项目集合
cockpitTable: function () {
var page = Template.instance().nowpageData.get();
var bendiPT = Project.find({},{skip:(page-1)*limit,limit:limit}).fetch();
//Template.instance().searchData.set(that_pro);
//var bendiPT=Template.instance().searchData.get();
for (var i = 0; i < bendiPT.length; i++) {
bendiPT[i].ordinal = i + 1;
//Meteor.call('proProgress',bendiPT[i]._id,function(error,res){
// bendiPT[i].progress=res['result'];
//});
if (bendiPT[i].supervisionEngineer) {
bendiPT[i].supervisionEngineer = Users.find({ '_id': bendiPT[i].supervisionEngineer }).fetch()[0].username;
}
if (bendiPT[i].backup == 0) {
bendiPT[i].backup = '无';
} else {
bendiPT[i].backup = Dictionaries.find({ "ecode": "backUp", 'value': bendiPT[i].backup }).fetch()[0].name;
}
if (bendiPT[i].weekly) {
bendiPT[i].weekly ='开启';
}else{
bendiPT[i].weekly ='关闭';
}
if (bendiPT[i].monthly) {
bendiPT[i].monthly ='开启';
}else{
bendiPT[i].monthly ='关闭';
}
}
return bendiPT;
},
//cockpitTable: function() {
// //return CockpitTable.find();
// var bendiCT=Project.find().fetch();
// for(var i=0;i<bendiCT.length;i++){
// bendiCT[i].ordinal=i+1;
// }
// return bendiCT;
//},
//单条项目集合
editorTable: function () {
var _data = Template.instance().editorData.get();
return _data;
},
//周报是否勾选
isweekly: function (a) {
var pro=Project.find({_id:a}).fetch();
if(pro[0]){
if(pro[0].weekly==0)
{
return false;
}else{
return true;
}
}
},
//月报是否勾选
ismonthly: function (a) {
var pro=Project.find({_id:a}).fetch();
if(pro[0]){
if(pro[0].monthly==0)
{
return false;
}else{
return true;
}
}
},
//用户表中监理工程师集合
userTableJLG: function () {
return Users.find({ "type": 2, 'state': 1 });
},
//工程师选中判断
engSelect: function (a) {
//console.log(a);
var supervisionEngineer = Template.instance().editorData.get()[0].supervisionEngineer;
var engname = Users.find({ '_id': supervisionEngineer }).fetch()[0].username;
if (a == engname) {
return true;
} else {
return false;
}
},
//字典表中备份方案集合
backUp: function () {
return Dictionaries.find({ "ecode": "backUp" });
},
//备份方案选中方法
backUpSelect: function (a) {
var backup = Template.instance().editorData.get()[0].backup;
if (a == backup) {
return true;
} else {
return false;
}
},
//验收判断
accState: function (a) {
//0是已验收,1是未验收
if (a == 1) {
return false;
} else if (a == 0) {
return true;
}
},
//是否显示操作判断
isHandle: function () {
var loguser = sessionStorage.getItem('loguser');
//var _loguserId=FlowRouter.getParam('_id');
//var loguserType=Users.find({'_id':_loguserId}).fetch()[0].type;
var user = Users.findOne({ '_id': loguser });
if (user) {
var loguserType = user.type;
//if(loguserType==1 || loguserType==0){
if (loguserType == 1 || loguserType == 0) {
return true;
} else {
return false;
}
}
return false;
}
});
Template.cockpit.onRendered(function () {
//日期
//$('.data_1 .input-group.date').datepicker({
// todayBtn: "linked",
// keyboardNavigation: false,
// forceParse: false,
// calendarWeeks: true,
// autoclose: true
//});
//$('.modal').appendTo("body");
//var proSum=CockpitTable.find().count();
//console.log(1111);
});
Template.cockpit.events({
//'click .add': function(e) {
// //alert(1);
// e.preventDefault();
// var addPro=$('#myModalAddpro');
// //var proname=addpro.find('.modal-body').find('input:first-child').val();
// //const target = e.target;
// //const text = target.text.value;
// //console.log(text);
// console.log(pro_id);
// CockpitTable.insert({'number':10,'proName':'11'});
// //var proNumber=CockpitTable.find({}).sort({number:-1}).limit(1);
// //CockpitTable.find({}, { sort: { number: -1 } }).limit(1);
//},
//验收项目
"click .acc": function (e) {
e.preventDefault();
pro_id = this._id;
var state = this.state;
if (state == 0) {
state = 1;
//Project.update({_id:pro_id},{ $set : { "state" : state,"accTime" : null} });
} else if (state == 1) {
state = 0;
//var timestamp = ((Date.parse(new Date()))/ 1000).toString();
//Project.update({_id:pro_id},{ $set : { "state" : state,"accTime" : timestamp} });
| an').hide();
});
//function base64_decode(base64str, file) {
// // create buffer object from base64 encoded string, it is important to tell the constructor that the string is base64 encoded
// var bitmap = new Buffer(base64str, 'base64');
// // write buffer to file
// fs.writeFileSync(file, bitmap);
// console.log('******** File created from base64 encoded string ********');
//}
//
//base64_decode('iVBORw0KGgoAAAANSUhEUgAAADQAAAAlCAYAAAAN8srVAAACTUlEQVR42u3Wv2sTcRiA8VPBxUKwEAxU3NxPIoFAl1bIkkmwYKAKRbqbRSWQCGJ+rMUibjo4FARBl0AgUIh/QXFxFIpKJHAQKA56r0/hDbyEK5VrDH2hBx+ud+Ga9+G+uSQQkVOv0+lMZNBFHoFRwABZb0F9CCITVdRjQd9b0CoOTNSGiRkidBWkljGGINb9CCECd0FqE7GJqkxeMxccK8UbJzppUPGIO5SfR9DCjINsTIR1RDbKXvAakuB9yqAsvuLaDIN6Jqag5/IaIxjYCxaxDzFGyKUMegdBb4ZBGfQmMUaIXeSmLyhDjHspl9wdiPHgJEGlUumf2UGml96HlJ+hRQwhRoSleQf | }
$('#mengban').show();
Meteor.call('accPro', pro_id, state, function (error, res) {
$('#mengb | conditional_block |
cockpit.js | Obj) {
if (!val) {
shObj.show();
} else {
shObj.hide();
}
}
//this.Pages = new Meteor.Pagination("Project",{
// perPage: 2,
// itemTemplate: "cockpit_table",
// //templateName: 'Project',
// //itemTemplate: 'cockpit'
// //sort: {
// // title: 1
// //},
// //filters: {
// // count: {
// // $gt: 10
// // }
// //},
// //availableSettings: {
// // perPage: true,
// // sort: true
// //}
//});
//let that_pro;
//Meteor.subscribe('cockpitTable');
//Meteor.subscribe('userTable');
Template.cockpit.onDestroyed(function () {
_changeProject2.stop();
pro_id = null;
//that_pro=null;
});
Template.cockpit.rendered = function () {
$('#data_1').datepicker({
todayBtn: "linked",
keyboardNavigation: false,
forceParse: false,
calendarWeeks: true,
autoclose: true
});
$('#data_2').datepicker({
todayBtn: "linked",
keyboardNavigation: false,
forceParse: false,
calendarWeeks: true,
autoclose: true
});
//初始化
//_changeProject2=Meteor.subscribe('project',loguser,function(){
// that_pro=Project.find({}).fetch();
//});
//日期
//$('.data_1 .input-group.date').datepicker({
// todayBtn: "linked",
// keyboardNavigation: false,
// forceParse: false,
// calendarWeeks: true,
// autoclose: true
//});
//$('.M-box1').pagination({
// totalData:100,
// showData:1,
// coping:true,
// callback:function(api){
// console.log(api.getCurrent());
// $('.now').text(api.getCurrent());
// }
//});
//分页
//this.Pages = new Meteor.Pagination("Project",{
// perPage: 2,
// itemTemplate: "cockpit_table",
// templateName: 'Project',
// //itemTemplate: 'cockpit'
// //sort: {
// // title: 1
// //},
// //filters: {
// // count: {
// // $gt: 10
// // }
// //},
// //availableSettings: {
// // perPage: true,
// // sort: true
// //}
//});
};
Template.cockpit.onCreated(function () {
/*
* find() 返回值是一个游标。游标是一种从动数据源
*输出内容,可以对游标使用 fetch() 来把游标转换成数组
* */
//
//var userPhone=$.cookie('user_phone');
//var userType=$.cookie('user_type');
//得到登录用户的id
//var _loguserId=FlowRouter.getParam('_id');
//console.log(_loguserId);
loguser = sessionStorage.getItem('loguser');
//Session.get('loguser2');
console.log(loguser);
if (!loguser) {
FlowRouter.go('/login');
}
//console.log(loguser);
templ=this;
//订阅数据
$('#mengban').show();
_changeProject2 = this.subscribe('project', loguser, function () {
$('#mengban').hide();
var totle = Project.find().count();
console.log(totle);
$('.M-box1').pagination({
totalData:totle,
showData:limit,
coping:true,
callback:function(api){
console.log(api.getCurrent());
pagenum=api.getCurrent();
console.log(pagenum);
templ.nowpageData.set(api.getCurrent());
}
});
});
//this.subscribe('cockpitTable_user',userPhone,userType);
this.subscribe('allusers');
//this.subscribe('dictionaries');
//单条项目
var _data = Project.find({}).fetch();
//ReactiveDict本地变量
this.editorData = new ReactiveVar(_data);
//当前页码
this.nowpageData = new ReactiveVar();
//Meteor.call('getnum',_data,function(){
//
//});
//页码本地变量
//this.pages = new ReactiveVar();
});
Template.cockpit.helpers({
//项目集合
cockpitTable: function () {
var page = Template.instance().nowpageData.get();
var bendiPT = Project.find({},{skip:(page-1)*limit,limit:limit}).fetch();
//Template.instance().searchData.set(that_pro);
//var bendiPT=Template.instance().searchData.get();
for (var i = 0; i < bendiPT.length; i++) {
bendiPT[i].ordinal = i + 1;
//Meteor.call('proProgress',bendiPT[i]._id,function(error,res){
// bendiPT[i].progress=res['result'];
//});
if (bendiPT[i].supervisionEngineer) {
bendiPT[i].supervisionEngineer = Users.find({ '_id': bendiPT[i].supervisionEngineer }).fetch()[0].username;
}
if (bendiPT[i].backup == 0) {
bendiPT[i].backup = '无';
} else {
bendiPT[i].backup = Dictionaries.find({ "ecode": "backUp", 'value': bendiPT[i].backup }).fetch()[0].name;
}
if (bendiPT[i].weekly) {
bendiPT[i].weekly ='开启';
}else{
bendiPT[i].weekly ='关闭';
}
if (bendiPT[i].monthly) {
bendiPT[i].monthly ='开启';
}else{
bendiPT[i].monthly ='关闭';
}
}
return bendiPT;
},
//cockpitTable: function() {
// //return CockpitTable.find();
// var bendiCT=Project.find().fetch();
// for(var i=0;i<bendiCT.length;i++){
// bendiCT[i].ordinal=i+1;
// }
// return bendiCT;
//},
//单条项目集合
editorTable: function () {
var _data = Template.instance().editorData.get();
return _data;
},
//周报是否勾选
isweekly: function (a) {
var pro=Project.find({_id:a}).fetch();
if(pro[0]){
if(pro[0].weekly==0)
{
return false;
}else{
return true;
}
}
},
//月报是否勾选
ismonthly: function (a) {
var pro=Project.find({_id:a}).fetch();
if(pro[0]){
if(pro[0].monthly==0)
{
return false;
}else{
return true;
}
}
},
//用户表中监理工程师集合
userTableJLG: function () {
return Users.find({ "type": 2, 'state': 1 });
},
//工程师选中判断
engSelect: function (a) {
//console.log(a);
var supervisionEngineer = Template.instance().editorData.get()[0].supervisionEngineer;
var engname = Users.find({ '_id': supervisionEngineer }).fetch()[0].username;
if (a == engname) {
return true;
} else {
return false;
}
},
//字典表中备份方案集合
backUp: function () {
return Dictionaries.find({ "ecode": "backUp" });
},
//备份方案选中方法
backUpSelect: function (a) {
var backup = Template.instance().editorData.get()[0].backup;
if (a == backup) {
return true;
} else {
return false;
}
},
//验收判断
accState: function (a) {
//0是已验收,1是未验收
if (a == 1) {
return false;
} else if (a == 0) {
return true;
}
},
//是否显示操作判断
isHandle: function () {
var loguser = sessionStorage.getItem('loguser');
//var _loguserId=FlowRouter.getParam('_id');
//var loguserType=Users.find({'_id':_loguserId}).fetch()[0].type;
var user = Users.findOne({ '_id': loguser });
if (user) {
var loguserType = user.type;
//if(loguserType==1 || loguserType==0){
if (loguserType == 1 || loguserType == 0) {
return true;
} else {
return false;
}
}
return false;
}
});
Template.cockpit.onRendered(function () {
//日期
//$('.data_1 .input-group.date').datepicker({
// today | al, sh | identifier_name | |
config.rs | ::path::{Path, PathBuf};
use crate::om::glob::glob;
use std::process::exit;
use super::target;
const PUBLISHER_OPTIONS: &[&str] = &["system", "package_app", "upload_app"];
const BYPASS_APP_CONFIG_ENV_VAR: &str = "origen_app_bypass_config_lookup";
const APP_CONFIG_PATHS: &str = "origen_app_config_paths";
macro_rules! use_app_config {
() => {{
!std::env::var_os($crate::core::application::config::BYPASS_APP_CONFIG_ENV_VAR).is_some()
}}
}
#[derive(Debug, Deserialize)]
pub struct CurrentState {
pub target: Option<Vec<String>>
}
impl CurrentState {
pub fn build(root: &PathBuf) -> Self {
let file = root.join(".origen").join("application.toml");
let mut s = config::Config::builder().set_default("target", None::<Vec<String>>).unwrap();
if file.exists() {
s = s.add_source(File::with_name(&format!("{}", file.display())));
}
let cb = exit_on_bad_config!(s.build());
let slf: Self = exit_on_bad_config!(cb.try_deserialize());
slf
}
pub fn apply_to(&mut self, config: &mut Config) {
if let Some(t) = self.target.as_ref() {
config.target = Some(t.to_owned())
} else {
if let Some(t) = &config.target {
let clean_defaults = target::set_at_root(t.iter().map( |s| s.as_str() ).collect(), config.root.as_ref().unwrap());
self.target = Some(clean_defaults);
}
}
}
pub fn build_and_apply(config: &mut Config) {
if use_app_config!() {
let mut slf = Self::build(config.root.as_ref().unwrap());
slf.apply_to(config);
}
}
}
#[derive(Debug, Deserialize)]
// If you add an attribute to this you must also update:
// * pyapi/src/lib.rs to convert it to Python
// * default function below to define the default value
// * add an example of it to src/app_generators/templates/app/config/application.toml
pub struct Config {
pub name: String,
pub target: Option<Vec<String>>,
pub mode: String,
/// Don't use this unless you know what you're doing, use origen::STATUS::output_dir() instead, since
/// that accounts for the output directory being overridden by the current command
pub output_directory: Option<String>,
/// Don't use this unless you know what you're doing, use origen::STATUS::reference_dir() instead, since
/// that accounts for the reference directory being overridden by the current command
pub reference_directory: Option<String>,
pub website_output_directory: Option<String>,
pub website_source_directory: Option<String>,
pub website_release_location: Option<Location>,
pub website_release_name: Option<String>,
pub root: Option<PathBuf>,
pub revision_control: Option<HashMap<String, String>>,
pub unit_tester: Option<HashMap<String, String>>,
pub publisher: Option<HashMap<String, String>>,
pub linter: Option<HashMap<String, String>>,
pub release_scribe: Option<HashMap<String, String>>,
pub app_session_root: Option<String>,
pub commands: Option<Vec<String>>,
}
impl Config {
pub fn refresh(&mut self) {
let latest = Self::build(self.root.as_ref().unwrap(), false);
self.name = latest.name;
self.target = latest.target;
self.mode = latest.mode;
self.reference_directory = latest.reference_directory;
self.website_output_directory = latest.website_output_directory;
self.website_source_directory = latest.website_source_directory;
self.website_release_location = latest.website_release_location;
self.website_release_name = latest.website_release_name;
self.revision_control = latest.revision_control;
self.unit_tester = latest.unit_tester;
self.publisher = latest.publisher;
self.linter = latest.linter;
self.release_scribe = latest.release_scribe;
self.app_session_root = latest.app_session_root;
self.commands = latest.commands;
}
/// Builds a new config from all application.toml files found at the given app root
pub fn build(root: &Path, default_only: bool) -> Config {
log_trace!("Building app config");
let mut s = config::Config::builder()
.set_default("target", None::<Vec<String>>)
.unwrap()
.set_default("mode", "development".to_string())
.unwrap()
.set_default("revision_control", None::<HashMap<String, String>>)
.unwrap()
.set_default("unit_tester", None::<HashMap<String, String>>)
.unwrap()
.set_default("publisher", None::<HashMap<String, String>>)
.unwrap()
.set_default("linter", None::<HashMap<String, String>>)
.unwrap()
.set_default("release_scribe", None::<HashMap<String, String>>)
.unwrap()
.set_default("app_session_root", None::<String>)
.unwrap()
.set_default("commands", None::<Vec<String>>)
.unwrap();
let mut files: Vec<PathBuf> = Vec::new();
if let Some(paths) = std::env::var_os(APP_CONFIG_PATHS) {
log_trace!("Found custom config paths: {:?}", paths);
for path in std::env::split_paths(&paths) {
log_trace!("Looking for Origen app config file at '{}'", path.display());
if path.is_file() {
if let Some(ext) = path.extension() {
if ext == "toml" {
files.push(path);
} else {
log_error!(
"Expected file {} to have extension '.toml'. Found '{}'",
path.display(),
ext.to_string_lossy()
)
}
} else {
// accept a file without an extension. will be interpreted as a .toml
files.push(path);
}
} else if path.is_dir() {
let f = path.join("application.toml");
if f.exists() {
files.push(f);
}
} else {
log_error!(
"Config path {} either does not exists or is not accessible",
path.display()
);
exit(1);
}
}
}
if use_app_config!() {
let file = root.join("config").join("application.toml");
if file.exists() {
files.push(file);
}
} else {
// Bypass Origen's default configuration lookup - use only the enumerated configs
log_trace!("Bypassing Origen's App Config Lookup");
}
for f in files.iter().rev() {
log_trace!("Loading Origen config file from '{}'", f.display());
s = s.add_source(File::with_name(&format!("{}", f.display())));
}
s = s.add_source(Environment::with_prefix("origen_app").list_separator(",").with_list_parse_key("target").with_list_parse_key("commands").try_parsing(true));
let cb = exit_on_bad_config!(s.build());
let mut c: Self = exit_on_bad_config!(cb.try_deserialize());
c.root = Some(root.to_path_buf());
// TODO
// if let Some(l) = loc {
// c.website_release_location = Some(Location::new(&l));
// }
log_trace!("Completed building app config");
c.validate_options();
if !default_only {
CurrentState::build_and_apply(&mut c);
}
c
}
pub fn validate_options(&self) {
log_trace!("Validating available options...");
if let Some(targets) = self.target.as_ref() {
log_trace!("\tValidating default target...");
for t in targets {
target::clean_name(t, "targets", true, self.root.as_ref().unwrap());
}
log_trace!("\tValidating default target!");
}
log_trace!("\tValidating publisher options...");
for unknown in self.validate_publisher_options() {
log_warning!("Unknown Publisher Option '{}'", unknown);
}
log_trace!("\tFinished validating publisher options");
log_trace!("Finished checking configs!");
}
pub fn validate_publisher_options(&self) -> Vec<String> |
pub fn cmd_paths(&self) -> Vec<PathBuf> {
let mut retn = vec!();
if let Some(cmds) = self.commands.as_ref() {
// Load in only the commands explicitly given
for cmds_toml in cmds {
let ct = self.root.as_ref().unwrap().join("config").join(cmds_toml);
if ct.exists() {
retn.push(ct.to_owned());
} else {
log_error!("Can not locate app commands file '{}'", scrub_path!(ct).display())
}
}
} else {
// Load in any commands from:
// 1) app_root/commands.toml
// 2) app_root/commands/*/**.toml
let commands_toml = self.root.as_ref().unwrap().join("config").join("commands.toml");
// println!("commands toml: {}", commands_toml.display());
if commands_toml.exists | {
let mut unknowns: Vec<String> = vec![];
if let Some(p) = &self.publisher {
for (opt, _) in p.iter() {
if !PUBLISHER_OPTIONS.contains(&opt.as_str()) {
unknowns.push(opt.clone());
}
}
}
unknowns
} | identifier_body |
config.rs | ::path::{Path, PathBuf};
use crate::om::glob::glob;
use std::process::exit;
use super::target;
const PUBLISHER_OPTIONS: &[&str] = &["system", "package_app", "upload_app"];
const BYPASS_APP_CONFIG_ENV_VAR: &str = "origen_app_bypass_config_lookup";
const APP_CONFIG_PATHS: &str = "origen_app_config_paths";
macro_rules! use_app_config {
() => {{
!std::env::var_os($crate::core::application::config::BYPASS_APP_CONFIG_ENV_VAR).is_some()
}}
}
#[derive(Debug, Deserialize)]
pub struct | {
pub target: Option<Vec<String>>
}
impl CurrentState {
pub fn build(root: &PathBuf) -> Self {
let file = root.join(".origen").join("application.toml");
let mut s = config::Config::builder().set_default("target", None::<Vec<String>>).unwrap();
if file.exists() {
s = s.add_source(File::with_name(&format!("{}", file.display())));
}
let cb = exit_on_bad_config!(s.build());
let slf: Self = exit_on_bad_config!(cb.try_deserialize());
slf
}
pub fn apply_to(&mut self, config: &mut Config) {
if let Some(t) = self.target.as_ref() {
config.target = Some(t.to_owned())
} else {
if let Some(t) = &config.target {
let clean_defaults = target::set_at_root(t.iter().map( |s| s.as_str() ).collect(), config.root.as_ref().unwrap());
self.target = Some(clean_defaults);
}
}
}
pub fn build_and_apply(config: &mut Config) {
if use_app_config!() {
let mut slf = Self::build(config.root.as_ref().unwrap());
slf.apply_to(config);
}
}
}
#[derive(Debug, Deserialize)]
// If you add an attribute to this you must also update:
// * pyapi/src/lib.rs to convert it to Python
// * default function below to define the default value
// * add an example of it to src/app_generators/templates/app/config/application.toml
pub struct Config {
pub name: String,
pub target: Option<Vec<String>>,
pub mode: String,
/// Don't use this unless you know what you're doing, use origen::STATUS::output_dir() instead, since
/// that accounts for the output directory being overridden by the current command
pub output_directory: Option<String>,
/// Don't use this unless you know what you're doing, use origen::STATUS::reference_dir() instead, since
/// that accounts for the reference directory being overridden by the current command
pub reference_directory: Option<String>,
pub website_output_directory: Option<String>,
pub website_source_directory: Option<String>,
pub website_release_location: Option<Location>,
pub website_release_name: Option<String>,
pub root: Option<PathBuf>,
pub revision_control: Option<HashMap<String, String>>,
pub unit_tester: Option<HashMap<String, String>>,
pub publisher: Option<HashMap<String, String>>,
pub linter: Option<HashMap<String, String>>,
pub release_scribe: Option<HashMap<String, String>>,
pub app_session_root: Option<String>,
pub commands: Option<Vec<String>>,
}
impl Config {
pub fn refresh(&mut self) {
let latest = Self::build(self.root.as_ref().unwrap(), false);
self.name = latest.name;
self.target = latest.target;
self.mode = latest.mode;
self.reference_directory = latest.reference_directory;
self.website_output_directory = latest.website_output_directory;
self.website_source_directory = latest.website_source_directory;
self.website_release_location = latest.website_release_location;
self.website_release_name = latest.website_release_name;
self.revision_control = latest.revision_control;
self.unit_tester = latest.unit_tester;
self.publisher = latest.publisher;
self.linter = latest.linter;
self.release_scribe = latest.release_scribe;
self.app_session_root = latest.app_session_root;
self.commands = latest.commands;
}
/// Builds a new config from all application.toml files found at the given app root
pub fn build(root: &Path, default_only: bool) -> Config {
log_trace!("Building app config");
let mut s = config::Config::builder()
.set_default("target", None::<Vec<String>>)
.unwrap()
.set_default("mode", "development".to_string())
.unwrap()
.set_default("revision_control", None::<HashMap<String, String>>)
.unwrap()
.set_default("unit_tester", None::<HashMap<String, String>>)
.unwrap()
.set_default("publisher", None::<HashMap<String, String>>)
.unwrap()
.set_default("linter", None::<HashMap<String, String>>)
.unwrap()
.set_default("release_scribe", None::<HashMap<String, String>>)
.unwrap()
.set_default("app_session_root", None::<String>)
.unwrap()
.set_default("commands", None::<Vec<String>>)
.unwrap();
let mut files: Vec<PathBuf> = Vec::new();
if let Some(paths) = std::env::var_os(APP_CONFIG_PATHS) {
log_trace!("Found custom config paths: {:?}", paths);
for path in std::env::split_paths(&paths) {
log_trace!("Looking for Origen app config file at '{}'", path.display());
if path.is_file() {
if let Some(ext) = path.extension() {
if ext == "toml" {
files.push(path);
} else {
log_error!(
"Expected file {} to have extension '.toml'. Found '{}'",
path.display(),
ext.to_string_lossy()
)
}
} else {
// accept a file without an extension. will be interpreted as a .toml
files.push(path);
}
} else if path.is_dir() {
let f = path.join("application.toml");
if f.exists() {
files.push(f);
}
} else {
log_error!(
"Config path {} either does not exists or is not accessible",
path.display()
);
exit(1);
}
}
}
if use_app_config!() {
let file = root.join("config").join("application.toml");
if file.exists() {
files.push(file);
}
} else {
// Bypass Origen's default configuration lookup - use only the enumerated configs
log_trace!("Bypassing Origen's App Config Lookup");
}
for f in files.iter().rev() {
log_trace!("Loading Origen config file from '{}'", f.display());
s = s.add_source(File::with_name(&format!("{}", f.display())));
}
s = s.add_source(Environment::with_prefix("origen_app").list_separator(",").with_list_parse_key("target").with_list_parse_key("commands").try_parsing(true));
let cb = exit_on_bad_config!(s.build());
let mut c: Self = exit_on_bad_config!(cb.try_deserialize());
c.root = Some(root.to_path_buf());
// TODO
// if let Some(l) = loc {
// c.website_release_location = Some(Location::new(&l));
// }
log_trace!("Completed building app config");
c.validate_options();
if !default_only {
CurrentState::build_and_apply(&mut c);
}
c
}
pub fn validate_options(&self) {
log_trace!("Validating available options...");
if let Some(targets) = self.target.as_ref() {
log_trace!("\tValidating default target...");
for t in targets {
target::clean_name(t, "targets", true, self.root.as_ref().unwrap());
}
log_trace!("\tValidating default target!");
}
log_trace!("\tValidating publisher options...");
for unknown in self.validate_publisher_options() {
log_warning!("Unknown Publisher Option '{}'", unknown);
}
log_trace!("\tFinished validating publisher options");
log_trace!("Finished checking configs!");
}
pub fn validate_publisher_options(&self) -> Vec<String> {
let mut unknowns: Vec<String> = vec![];
if let Some(p) = &self.publisher {
for (opt, _) in p.iter() {
if !PUBLISHER_OPTIONS.contains(&opt.as_str()) {
unknowns.push(opt.clone());
}
}
}
unknowns
}
pub fn cmd_paths(&self) -> Vec<PathBuf> {
let mut retn = vec!();
if let Some(cmds) = self.commands.as_ref() {
// Load in only the commands explicitly given
for cmds_toml in cmds {
let ct = self.root.as_ref().unwrap().join("config").join(cmds_toml);
if ct.exists() {
retn.push(ct.to_owned());
} else {
log_error!("Can not locate app commands file '{}'", scrub_path!(ct).display())
}
}
} else {
// Load in any commands from:
// 1) app_root/commands.toml
// 2) app_root/commands/*/**.toml
let commands_toml = self.root.as_ref().unwrap().join("config").join("commands.toml");
// println!("commands toml: {}", commands_toml.display());
if commands_toml.exists() | CurrentState | identifier_name |
config.rs | ::path::{Path, PathBuf};
use crate::om::glob::glob;
use std::process::exit;
use super::target;
const PUBLISHER_OPTIONS: &[&str] = &["system", "package_app", "upload_app"];
const BYPASS_APP_CONFIG_ENV_VAR: &str = "origen_app_bypass_config_lookup";
const APP_CONFIG_PATHS: &str = "origen_app_config_paths";
macro_rules! use_app_config {
() => {{
!std::env::var_os($crate::core::application::config::BYPASS_APP_CONFIG_ENV_VAR).is_some()
}}
}
#[derive(Debug, Deserialize)]
pub struct CurrentState {
pub target: Option<Vec<String>>
}
impl CurrentState {
pub fn build(root: &PathBuf) -> Self {
let file = root.join(".origen").join("application.toml");
let mut s = config::Config::builder().set_default("target", None::<Vec<String>>).unwrap();
if file.exists() {
s = s.add_source(File::with_name(&format!("{}", file.display())));
}
let cb = exit_on_bad_config!(s.build());
let slf: Self = exit_on_bad_config!(cb.try_deserialize());
slf
}
pub fn apply_to(&mut self, config: &mut Config) {
if let Some(t) = self.target.as_ref() {
config.target = Some(t.to_owned())
} else {
if let Some(t) = &config.target {
let clean_defaults = target::set_at_root(t.iter().map( |s| s.as_str() ).collect(), config.root.as_ref().unwrap());
self.target = Some(clean_defaults);
}
}
}
pub fn build_and_apply(config: &mut Config) {
if use_app_config!() {
let mut slf = Self::build(config.root.as_ref().unwrap());
slf.apply_to(config);
}
}
}
#[derive(Debug, Deserialize)]
// If you add an attribute to this you must also update:
// * pyapi/src/lib.rs to convert it to Python
// * default function below to define the default value
// * add an example of it to src/app_generators/templates/app/config/application.toml
pub struct Config {
pub name: String,
pub target: Option<Vec<String>>,
pub mode: String,
/// Don't use this unless you know what you're doing, use origen::STATUS::output_dir() instead, since
/// that accounts for the output directory being overridden by the current command
pub output_directory: Option<String>,
/// Don't use this unless you know what you're doing, use origen::STATUS::reference_dir() instead, since
/// that accounts for the reference directory being overridden by the current command
pub reference_directory: Option<String>,
pub website_output_directory: Option<String>,
pub website_source_directory: Option<String>,
pub website_release_location: Option<Location>,
pub website_release_name: Option<String>,
pub root: Option<PathBuf>,
pub revision_control: Option<HashMap<String, String>>,
pub unit_tester: Option<HashMap<String, String>>,
pub publisher: Option<HashMap<String, String>>,
pub linter: Option<HashMap<String, String>>,
pub release_scribe: Option<HashMap<String, String>>,
pub app_session_root: Option<String>,
pub commands: Option<Vec<String>>,
}
impl Config {
pub fn refresh(&mut self) {
let latest = Self::build(self.root.as_ref().unwrap(), false);
self.name = latest.name;
self.target = latest.target;
self.mode = latest.mode;
self.reference_directory = latest.reference_directory;
self.website_output_directory = latest.website_output_directory;
self.website_source_directory = latest.website_source_directory;
self.website_release_location = latest.website_release_location;
self.website_release_name = latest.website_release_name;
self.revision_control = latest.revision_control;
self.unit_tester = latest.unit_tester;
self.publisher = latest.publisher;
self.linter = latest.linter;
self.release_scribe = latest.release_scribe;
self.app_session_root = latest.app_session_root;
self.commands = latest.commands;
}
/// Builds a new config from all application.toml files found at the given app root
pub fn build(root: &Path, default_only: bool) -> Config {
log_trace!("Building app config");
let mut s = config::Config::builder()
.set_default("target", None::<Vec<String>>)
.unwrap()
.set_default("mode", "development".to_string())
.unwrap()
.set_default("revision_control", None::<HashMap<String, String>>)
.unwrap()
.set_default("unit_tester", None::<HashMap<String, String>>)
.unwrap()
.set_default("publisher", None::<HashMap<String, String>>)
.unwrap()
.set_default("linter", None::<HashMap<String, String>>)
.unwrap()
.set_default("release_scribe", None::<HashMap<String, String>>)
.unwrap()
.set_default("app_session_root", None::<String>)
.unwrap()
.set_default("commands", None::<Vec<String>>)
.unwrap();
let mut files: Vec<PathBuf> = Vec::new();
if let Some(paths) = std::env::var_os(APP_CONFIG_PATHS) {
log_trace!("Found custom config paths: {:?}", paths);
for path in std::env::split_paths(&paths) {
log_trace!("Looking for Origen app config file at '{}'", path.display());
if path.is_file() {
if let Some(ext) = path.extension() {
if ext == "toml" {
files.push(path);
} else {
log_error!(
"Expected file {} to have extension '.toml'. Found '{}'",
path.display(),
ext.to_string_lossy()
)
}
} else {
// accept a file without an extension. will be interpreted as a .toml
files.push(path);
}
} else if path.is_dir() {
let f = path.join("application.toml");
if f.exists() {
files.push(f);
}
} else |
}
}
if use_app_config!() {
let file = root.join("config").join("application.toml");
if file.exists() {
files.push(file);
}
} else {
// Bypass Origen's default configuration lookup - use only the enumerated configs
log_trace!("Bypassing Origen's App Config Lookup");
}
for f in files.iter().rev() {
log_trace!("Loading Origen config file from '{}'", f.display());
s = s.add_source(File::with_name(&format!("{}", f.display())));
}
s = s.add_source(Environment::with_prefix("origen_app").list_separator(",").with_list_parse_key("target").with_list_parse_key("commands").try_parsing(true));
let cb = exit_on_bad_config!(s.build());
let mut c: Self = exit_on_bad_config!(cb.try_deserialize());
c.root = Some(root.to_path_buf());
// TODO
// if let Some(l) = loc {
// c.website_release_location = Some(Location::new(&l));
// }
log_trace!("Completed building app config");
c.validate_options();
if !default_only {
CurrentState::build_and_apply(&mut c);
}
c
}
pub fn validate_options(&self) {
log_trace!("Validating available options...");
if let Some(targets) = self.target.as_ref() {
log_trace!("\tValidating default target...");
for t in targets {
target::clean_name(t, "targets", true, self.root.as_ref().unwrap());
}
log_trace!("\tValidating default target!");
}
log_trace!("\tValidating publisher options...");
for unknown in self.validate_publisher_options() {
log_warning!("Unknown Publisher Option '{}'", unknown);
}
log_trace!("\tFinished validating publisher options");
log_trace!("Finished checking configs!");
}
pub fn validate_publisher_options(&self) -> Vec<String> {
let mut unknowns: Vec<String> = vec![];
if let Some(p) = &self.publisher {
for (opt, _) in p.iter() {
if !PUBLISHER_OPTIONS.contains(&opt.as_str()) {
unknowns.push(opt.clone());
}
}
}
unknowns
}
pub fn cmd_paths(&self) -> Vec<PathBuf> {
let mut retn = vec!();
if let Some(cmds) = self.commands.as_ref() {
// Load in only the commands explicitly given
for cmds_toml in cmds {
let ct = self.root.as_ref().unwrap().join("config").join(cmds_toml);
if ct.exists() {
retn.push(ct.to_owned());
} else {
log_error!("Can not locate app commands file '{}'", scrub_path!(ct).display())
}
}
} else {
// Load in any commands from:
// 1) app_root/commands.toml
// 2) app_root/commands/*/**.toml
let commands_toml = self.root.as_ref().unwrap().join("config").join("commands.toml");
// println!("commands toml: {}", commands_toml.display());
if commands_toml.exists | {
log_error!(
"Config path {} either does not exists or is not accessible",
path.display()
);
exit(1);
} | conditional_block |
config.rs | ::path::{Path, PathBuf};
use crate::om::glob::glob;
use std::process::exit;
use super::target;
const PUBLISHER_OPTIONS: &[&str] = &["system", "package_app", "upload_app"];
const BYPASS_APP_CONFIG_ENV_VAR: &str = "origen_app_bypass_config_lookup";
const APP_CONFIG_PATHS: &str = "origen_app_config_paths";
macro_rules! use_app_config {
() => {{
!std::env::var_os($crate::core::application::config::BYPASS_APP_CONFIG_ENV_VAR).is_some()
}}
}
#[derive(Debug, Deserialize)]
pub struct CurrentState {
pub target: Option<Vec<String>>
}
impl CurrentState {
pub fn build(root: &PathBuf) -> Self {
let file = root.join(".origen").join("application.toml");
let mut s = config::Config::builder().set_default("target", None::<Vec<String>>).unwrap();
if file.exists() {
s = s.add_source(File::with_name(&format!("{}", file.display())));
}
let cb = exit_on_bad_config!(s.build());
let slf: Self = exit_on_bad_config!(cb.try_deserialize());
slf
}
pub fn apply_to(&mut self, config: &mut Config) {
if let Some(t) = self.target.as_ref() {
config.target = Some(t.to_owned())
} else {
if let Some(t) = &config.target {
let clean_defaults = target::set_at_root(t.iter().map( |s| s.as_str() ).collect(), config.root.as_ref().unwrap());
self.target = Some(clean_defaults);
} | let mut slf = Self::build(config.root.as_ref().unwrap());
slf.apply_to(config);
}
}
}
#[derive(Debug, Deserialize)]
// If you add an attribute to this you must also update:
// * pyapi/src/lib.rs to convert it to Python
// * default function below to define the default value
// * add an example of it to src/app_generators/templates/app/config/application.toml
pub struct Config {
pub name: String,
pub target: Option<Vec<String>>,
pub mode: String,
/// Don't use this unless you know what you're doing, use origen::STATUS::output_dir() instead, since
/// that accounts for the output directory being overridden by the current command
pub output_directory: Option<String>,
/// Don't use this unless you know what you're doing, use origen::STATUS::reference_dir() instead, since
/// that accounts for the reference directory being overridden by the current command
pub reference_directory: Option<String>,
pub website_output_directory: Option<String>,
pub website_source_directory: Option<String>,
pub website_release_location: Option<Location>,
pub website_release_name: Option<String>,
pub root: Option<PathBuf>,
pub revision_control: Option<HashMap<String, String>>,
pub unit_tester: Option<HashMap<String, String>>,
pub publisher: Option<HashMap<String, String>>,
pub linter: Option<HashMap<String, String>>,
pub release_scribe: Option<HashMap<String, String>>,
pub app_session_root: Option<String>,
pub commands: Option<Vec<String>>,
}
impl Config {
pub fn refresh(&mut self) {
let latest = Self::build(self.root.as_ref().unwrap(), false);
self.name = latest.name;
self.target = latest.target;
self.mode = latest.mode;
self.reference_directory = latest.reference_directory;
self.website_output_directory = latest.website_output_directory;
self.website_source_directory = latest.website_source_directory;
self.website_release_location = latest.website_release_location;
self.website_release_name = latest.website_release_name;
self.revision_control = latest.revision_control;
self.unit_tester = latest.unit_tester;
self.publisher = latest.publisher;
self.linter = latest.linter;
self.release_scribe = latest.release_scribe;
self.app_session_root = latest.app_session_root;
self.commands = latest.commands;
}
/// Builds a new config from all application.toml files found at the given app root
pub fn build(root: &Path, default_only: bool) -> Config {
log_trace!("Building app config");
let mut s = config::Config::builder()
.set_default("target", None::<Vec<String>>)
.unwrap()
.set_default("mode", "development".to_string())
.unwrap()
.set_default("revision_control", None::<HashMap<String, String>>)
.unwrap()
.set_default("unit_tester", None::<HashMap<String, String>>)
.unwrap()
.set_default("publisher", None::<HashMap<String, String>>)
.unwrap()
.set_default("linter", None::<HashMap<String, String>>)
.unwrap()
.set_default("release_scribe", None::<HashMap<String, String>>)
.unwrap()
.set_default("app_session_root", None::<String>)
.unwrap()
.set_default("commands", None::<Vec<String>>)
.unwrap();
let mut files: Vec<PathBuf> = Vec::new();
if let Some(paths) = std::env::var_os(APP_CONFIG_PATHS) {
log_trace!("Found custom config paths: {:?}", paths);
for path in std::env::split_paths(&paths) {
log_trace!("Looking for Origen app config file at '{}'", path.display());
if path.is_file() {
if let Some(ext) = path.extension() {
if ext == "toml" {
files.push(path);
} else {
log_error!(
"Expected file {} to have extension '.toml'. Found '{}'",
path.display(),
ext.to_string_lossy()
)
}
} else {
// accept a file without an extension. will be interpreted as a .toml
files.push(path);
}
} else if path.is_dir() {
let f = path.join("application.toml");
if f.exists() {
files.push(f);
}
} else {
log_error!(
"Config path {} either does not exists or is not accessible",
path.display()
);
exit(1);
}
}
}
if use_app_config!() {
let file = root.join("config").join("application.toml");
if file.exists() {
files.push(file);
}
} else {
// Bypass Origen's default configuration lookup - use only the enumerated configs
log_trace!("Bypassing Origen's App Config Lookup");
}
for f in files.iter().rev() {
log_trace!("Loading Origen config file from '{}'", f.display());
s = s.add_source(File::with_name(&format!("{}", f.display())));
}
s = s.add_source(Environment::with_prefix("origen_app").list_separator(",").with_list_parse_key("target").with_list_parse_key("commands").try_parsing(true));
let cb = exit_on_bad_config!(s.build());
let mut c: Self = exit_on_bad_config!(cb.try_deserialize());
c.root = Some(root.to_path_buf());
// TODO
// if let Some(l) = loc {
// c.website_release_location = Some(Location::new(&l));
// }
log_trace!("Completed building app config");
c.validate_options();
if !default_only {
CurrentState::build_and_apply(&mut c);
}
c
}
pub fn validate_options(&self) {
log_trace!("Validating available options...");
if let Some(targets) = self.target.as_ref() {
log_trace!("\tValidating default target...");
for t in targets {
target::clean_name(t, "targets", true, self.root.as_ref().unwrap());
}
log_trace!("\tValidating default target!");
}
log_trace!("\tValidating publisher options...");
for unknown in self.validate_publisher_options() {
log_warning!("Unknown Publisher Option '{}'", unknown);
}
log_trace!("\tFinished validating publisher options");
log_trace!("Finished checking configs!");
}
pub fn validate_publisher_options(&self) -> Vec<String> {
let mut unknowns: Vec<String> = vec![];
if let Some(p) = &self.publisher {
for (opt, _) in p.iter() {
if !PUBLISHER_OPTIONS.contains(&opt.as_str()) {
unknowns.push(opt.clone());
}
}
}
unknowns
}
pub fn cmd_paths(&self) -> Vec<PathBuf> {
let mut retn = vec!();
if let Some(cmds) = self.commands.as_ref() {
// Load in only the commands explicitly given
for cmds_toml in cmds {
let ct = self.root.as_ref().unwrap().join("config").join(cmds_toml);
if ct.exists() {
retn.push(ct.to_owned());
} else {
log_error!("Can not locate app commands file '{}'", scrub_path!(ct).display())
}
}
} else {
// Load in any commands from:
// 1) app_root/commands.toml
// 2) app_root/commands/*/**.toml
let commands_toml = self.root.as_ref().unwrap().join("config").join("commands.toml");
// println!("commands toml: {}", commands_toml.display());
if commands_toml.exists() {
| }
}
pub fn build_and_apply(config: &mut Config) {
if use_app_config!() { | random_line_split |
client_handler.go | , backend *LightEthereum) *clientHandler {
handler := &clientHandler{
checkpoint: checkpoint,
backend: backend,
closeCh: make(chan struct{}),
}
if ulcServers != nil {
ulc, err := newULC(ulcServers, ulcFraction)
if err != nil {
log.Error("Failed to initialize ultra light client")
}
handler.ulc = ulc
log.Info("Enable ultra light client mode")
}
var height uint64
if checkpoint != nil {
height = (checkpoint.SectionIndex+1)*params.CHTFrequency - 1
}
handler.fetcher = newLightFetcher(handler)
handler.downloader = downloader.New(height, backend.chainDb, nil, backend.eventMux, nil, backend.blockchain, handler.removePeer)
handler.backend.peers.notify((*downloaderPeerNotify)(handler))
return handler
}
func (h *clientHandler) stop() {
close(h.closeCh)
h.downloader.Terminate()
h.fetcher.close()
h.wg.Wait()
}
// runPeer is the p2p protocol run function for the given version.
func (h *clientHandler) runPeer(version uint, p *p2p.Peer, rw p2p.MsgReadWriter) error {
trusted := false
if h.ulc != nil {
trusted = h.ulc.trusted(p.ID())
}
peer := newPeer(int(version), h.backend.config.NetworkId, trusted, p, newMeteredMsgWriter(rw, int(version)))
peer.poolEntry = h.backend.serverPool.connect(peer, peer.Node())
if peer.poolEntry == nil {
return p2p.DiscRequested
}
h.wg.Add(1)
defer h.wg.Done()
err := h.handle(peer)
h.backend.serverPool.disconnect(peer.poolEntry)
return err
}
func (h *clientHandler) handle(p *peer) error {
if h.backend.peers.Len() >= h.backend.config.LightPeers && !p.Peer.Info().Network.Trusted {
return p2p.DiscTooManyPeers
}
p.Log().Debug("Light Ethereum peer connected", "name", p.Name())
// Execute the LES handshake
var (
head = h.backend.blockchain.CurrentHeader()
hash = head.Hash()
number = head.Number.Uint64()
td = h.backend.blockchain.GetTd(hash, number)
)
if err := p.Handshake(td, hash, number, h.backend.blockchain.Genesis().Hash(), nil); err != nil {
p.Log().Debug("Light Ethereum handshake failed", "err", err)
return err
}
// Register the peer locally
if err := h.backend.peers.Register(p); err != nil {
p.Log().Error("Light Ethereum peer registration failed", "err", err)
return err
}
serverConnectionGauge.Update(int64(h.backend.peers.Len()))
connectedAt := mclock.Now()
defer func() {
h.backend.peers.Unregister(p.id)
connectionTimer.Update(time.Duration(mclock.Now() - connectedAt))
serverConnectionGauge.Update(int64(h.backend.peers.Len()))
}()
h.fetcher.announce(p, p.headInfo)
// pool entry can be nil during the unit test.
if p.poolEntry != nil {
h.backend.serverPool.registered(p.poolEntry)
}
// Spawn a main loop to handle all incoming messages.
for {
if err := h.handleMsg(p); err != nil {
p.Log().Debug("Light Ethereum message handling failed", "err", err)
p.fcServer.DumpLogs()
return err
}
}
}
// handleMsg is invoked whenever an inbound message is received from a remote
// peer. The remote connection is torn down upon returning any error.
func (h *clientHandler) handleMsg(p *peer) error {
// Read the next message from the remote peer, and ensure it's fully consumed
msg, err := p.rw.ReadMsg()
if err != nil {
return err
}
p.Log().Trace("Light Ethereum message arrived", "code", msg.Code, "bytes", msg.Size)
if msg.Size > ProtocolMaxMsgSize {
return errResp(ErrMsgTooLarge, "%v > %v", msg.Size, ProtocolMaxMsgSize)
}
defer msg.Discard()
var deliverMsg *Msg
// Handle the message depending on its contents
switch msg.Code {
case AnnounceMsg:
p.Log().Trace("Received announce message")
var req announceData
if err := msg.Decode(&req); err != nil {
return errResp(ErrDecode, "%v: %v", msg, err)
}
if err := req.sanityCheck(); err != nil {
return err
}
update, size := req.Update.decode()
if p.rejectUpdate(size) {
return errResp(ErrRequestRejected, "")
}
p.updateFlowControl(update)
if req.Hash != (common.Hash{}) {
if p.announceType == announceTypeNone {
return errResp(ErrUnexpectedResponse, "")
}
if p.announceType == announceTypeSigned {
if err := req.checkSignature(p.ID(), update); err != nil {
p.Log().Trace("Invalid announcement signature", "err", err)
return err
}
p.Log().Trace("Valid announcement signature")
}
p.Log().Trace("Announce message content", "number", req.Number, "hash", req.Hash, "td", req.Td, "reorg", req.ReorgDepth)
h.fetcher.announce(p, &req)
}
case BlockHeadersMsg:
p.Log().Trace("Received block header response message")
var resp struct {
ReqID, BV uint64
Headers []*types.Header
}
if err := msg.Decode(&resp); err != nil {
return errResp(ErrDecode, "msg %v: %v", msg, err)
}
p.fcServer.ReceivedReply(resp.ReqID, resp.BV)
if h.fetcher.requestedID(resp.ReqID) {
h.fetcher.deliverHeaders(p, resp.ReqID, resp.Headers)
} else {
if err := h.downloader.DeliverHeaders(p.id, resp.Headers); err != nil {
log.Debug("Failed to deliver headers", "err", err)
}
}
case BlockBodiesMsg:
p.Log().Trace("Received block bodies response")
var resp struct {
ReqID, BV uint64
Data []*types.Body
}
if err := msg.Decode(&resp); err != nil {
return errResp(ErrDecode, "msg %v: %v", msg, err)
}
p.fcServer.ReceivedReply(resp.ReqID, resp.BV)
deliverMsg = &Msg{
MsgType: MsgBlockBodies,
ReqID: resp.ReqID,
Obj: resp.Data,
}
case CodeMsg:
p.Log().Trace("Received code response")
var resp struct {
ReqID, BV uint64
Data [][]byte
}
if err := msg.Decode(&resp); err != nil {
return errResp(ErrDecode, "msg %v: %v", msg, err)
}
p.fcServer.ReceivedReply(resp.ReqID, resp.BV)
deliverMsg = &Msg{
MsgType: MsgCode,
ReqID: resp.ReqID,
Obj: resp.Data,
}
case ReceiptsMsg:
p.Log().Trace("Received receipts response")
var resp struct {
ReqID, BV uint64
Receipts []types.Receipts | }
p.fcServer.ReceivedReply(resp.ReqID, resp.BV)
deliverMsg = &Msg{
MsgType: MsgReceipts,
ReqID: resp.ReqID,
Obj: resp.Receipts,
}
case ProofsV2Msg:
p.Log().Trace("Received les/2 proofs response")
var resp struct {
ReqID, BV uint64
Data light.NodeList
}
if err := msg.Decode(&resp); err != nil {
return errResp(ErrDecode, "msg %v: %v", msg, err)
}
p.fcServer.ReceivedReply(resp.ReqID, resp.BV)
deliverMsg = &Msg{
MsgType: MsgProofsV2,
ReqID: resp.ReqID,
Obj: resp.Data,
}
case HelperTrieProofsMsg:
p.Log().Trace("Received helper trie proof response")
var resp struct {
ReqID, BV uint64
Data HelperTrieResps
}
if err := msg.Decode(&resp); err != nil {
return errResp(ErrDecode, "msg %v: %v", msg, err)
}
p.fcServer.ReceivedReply(resp.ReqID, resp.BV)
deliverMsg = &Msg{
MsgType: MsgHelperTrieProofs,
ReqID: resp.ReqID,
Obj: | }
if err := msg.Decode(&resp); err != nil {
return errResp(ErrDecode, "msg %v: %v", msg, err) | random_line_split |
client_handler.go | backend *LightEthereum) *clientHandler {
handler := &clientHandler{
checkpoint: checkpoint,
backend: backend,
closeCh: make(chan struct{}),
}
if ulcServers != nil {
ulc, err := newULC(ulcServers, ulcFraction)
if err != nil {
log.Error("Failed to initialize ultra light client")
}
handler.ulc = ulc
log.Info("Enable ultra light client mode")
}
var height uint64
if checkpoint != nil {
height = (checkpoint.SectionIndex+1)*params.CHTFrequency - 1
}
handler.fetcher = newLightFetcher(handler)
handler.downloader = downloader.New(height, backend.chainDb, nil, backend.eventMux, nil, backend.blockchain, handler.removePeer)
handler.backend.peers.notify((*downloaderPeerNotify)(handler))
return handler
}
func (h *clientHandler) stop() |
// runPeer is the p2p protocol run function for the given version.
func (h *clientHandler) runPeer(version uint, p *p2p.Peer, rw p2p.MsgReadWriter) error {
trusted := false
if h.ulc != nil {
trusted = h.ulc.trusted(p.ID())
}
peer := newPeer(int(version), h.backend.config.NetworkId, trusted, p, newMeteredMsgWriter(rw, int(version)))
peer.poolEntry = h.backend.serverPool.connect(peer, peer.Node())
if peer.poolEntry == nil {
return p2p.DiscRequested
}
h.wg.Add(1)
defer h.wg.Done()
err := h.handle(peer)
h.backend.serverPool.disconnect(peer.poolEntry)
return err
}
func (h *clientHandler) handle(p *peer) error {
if h.backend.peers.Len() >= h.backend.config.LightPeers && !p.Peer.Info().Network.Trusted {
return p2p.DiscTooManyPeers
}
p.Log().Debug("Light Ethereum peer connected", "name", p.Name())
// Execute the LES handshake
var (
head = h.backend.blockchain.CurrentHeader()
hash = head.Hash()
number = head.Number.Uint64()
td = h.backend.blockchain.GetTd(hash, number)
)
if err := p.Handshake(td, hash, number, h.backend.blockchain.Genesis().Hash(), nil); err != nil {
p.Log().Debug("Light Ethereum handshake failed", "err", err)
return err
}
// Register the peer locally
if err := h.backend.peers.Register(p); err != nil {
p.Log().Error("Light Ethereum peer registration failed", "err", err)
return err
}
serverConnectionGauge.Update(int64(h.backend.peers.Len()))
connectedAt := mclock.Now()
defer func() {
h.backend.peers.Unregister(p.id)
connectionTimer.Update(time.Duration(mclock.Now() - connectedAt))
serverConnectionGauge.Update(int64(h.backend.peers.Len()))
}()
h.fetcher.announce(p, p.headInfo)
// pool entry can be nil during the unit test.
if p.poolEntry != nil {
h.backend.serverPool.registered(p.poolEntry)
}
// Spawn a main loop to handle all incoming messages.
for {
if err := h.handleMsg(p); err != nil {
p.Log().Debug("Light Ethereum message handling failed", "err", err)
p.fcServer.DumpLogs()
return err
}
}
}
// handleMsg is invoked whenever an inbound message is received from a remote
// peer. The remote connection is torn down upon returning any error.
func (h *clientHandler) handleMsg(p *peer) error {
// Read the next message from the remote peer, and ensure it's fully consumed
msg, err := p.rw.ReadMsg()
if err != nil {
return err
}
p.Log().Trace("Light Ethereum message arrived", "code", msg.Code, "bytes", msg.Size)
if msg.Size > ProtocolMaxMsgSize {
return errResp(ErrMsgTooLarge, "%v > %v", msg.Size, ProtocolMaxMsgSize)
}
defer msg.Discard()
var deliverMsg *Msg
// Handle the message depending on its contents
switch msg.Code {
case AnnounceMsg:
p.Log().Trace("Received announce message")
var req announceData
if err := msg.Decode(&req); err != nil {
return errResp(ErrDecode, "%v: %v", msg, err)
}
if err := req.sanityCheck(); err != nil {
return err
}
update, size := req.Update.decode()
if p.rejectUpdate(size) {
return errResp(ErrRequestRejected, "")
}
p.updateFlowControl(update)
if req.Hash != (common.Hash{}) {
if p.announceType == announceTypeNone {
return errResp(ErrUnexpectedResponse, "")
}
if p.announceType == announceTypeSigned {
if err := req.checkSignature(p.ID(), update); err != nil {
p.Log().Trace("Invalid announcement signature", "err", err)
return err
}
p.Log().Trace("Valid announcement signature")
}
p.Log().Trace("Announce message content", "number", req.Number, "hash", req.Hash, "td", req.Td, "reorg", req.ReorgDepth)
h.fetcher.announce(p, &req)
}
case BlockHeadersMsg:
p.Log().Trace("Received block header response message")
var resp struct {
ReqID, BV uint64
Headers []*types.Header
}
if err := msg.Decode(&resp); err != nil {
return errResp(ErrDecode, "msg %v: %v", msg, err)
}
p.fcServer.ReceivedReply(resp.ReqID, resp.BV)
if h.fetcher.requestedID(resp.ReqID) {
h.fetcher.deliverHeaders(p, resp.ReqID, resp.Headers)
} else {
if err := h.downloader.DeliverHeaders(p.id, resp.Headers); err != nil {
log.Debug("Failed to deliver headers", "err", err)
}
}
case BlockBodiesMsg:
p.Log().Trace("Received block bodies response")
var resp struct {
ReqID, BV uint64
Data []*types.Body
}
if err := msg.Decode(&resp); err != nil {
return errResp(ErrDecode, "msg %v: %v", msg, err)
}
p.fcServer.ReceivedReply(resp.ReqID, resp.BV)
deliverMsg = &Msg{
MsgType: MsgBlockBodies,
ReqID: resp.ReqID,
Obj: resp.Data,
}
case CodeMsg:
p.Log().Trace("Received code response")
var resp struct {
ReqID, BV uint64
Data [][]byte
}
if err := msg.Decode(&resp); err != nil {
return errResp(ErrDecode, "msg %v: %v", msg, err)
}
p.fcServer.ReceivedReply(resp.ReqID, resp.BV)
deliverMsg = &Msg{
MsgType: MsgCode,
ReqID: resp.ReqID,
Obj: resp.Data,
}
case ReceiptsMsg:
p.Log().Trace("Received receipts response")
var resp struct {
ReqID, BV uint64
Receipts []types.Receipts
}
if err := msg.Decode(&resp); err != nil {
return errResp(ErrDecode, "msg %v: %v", msg, err)
}
p.fcServer.ReceivedReply(resp.ReqID, resp.BV)
deliverMsg = &Msg{
MsgType: MsgReceipts,
ReqID: resp.ReqID,
Obj: resp.Receipts,
}
case ProofsV2Msg:
p.Log().Trace("Received les/2 proofs response")
var resp struct {
ReqID, BV uint64
Data light.NodeList
}
if err := msg.Decode(&resp); err != nil {
return errResp(ErrDecode, "msg %v: %v", msg, err)
}
p.fcServer.ReceivedReply(resp.ReqID, resp.BV)
deliverMsg = &Msg{
MsgType: MsgProofsV2,
ReqID: resp.ReqID,
Obj: resp.Data,
}
case HelperTrieProofsMsg:
p.Log().Trace("Received helper trie proof response")
var resp struct {
ReqID, BV uint64
Data HelperTrieResps
}
if err := msg.Decode(&resp); err != nil {
return errResp(ErrDecode, "msg %v: %v", msg, err)
}
p.fcServer.ReceivedReply(resp.ReqID, resp.BV)
deliverMsg = &Msg{
MsgType: MsgHelperTrieProofs,
ReqID: resp.ReqID,
Obj | {
close(h.closeCh)
h.downloader.Terminate()
h.fetcher.close()
h.wg.Wait()
} | identifier_body |
client_handler.go | ers && !p.Peer.Info().Network.Trusted {
return p2p.DiscTooManyPeers
}
p.Log().Debug("Light Ethereum peer connected", "name", p.Name())
// Execute the LES handshake
var (
head = h.backend.blockchain.CurrentHeader()
hash = head.Hash()
number = head.Number.Uint64()
td = h.backend.blockchain.GetTd(hash, number)
)
if err := p.Handshake(td, hash, number, h.backend.blockchain.Genesis().Hash(), nil); err != nil {
p.Log().Debug("Light Ethereum handshake failed", "err", err)
return err
}
// Register the peer locally
if err := h.backend.peers.Register(p); err != nil {
p.Log().Error("Light Ethereum peer registration failed", "err", err)
return err
}
serverConnectionGauge.Update(int64(h.backend.peers.Len()))
connectedAt := mclock.Now()
defer func() {
h.backend.peers.Unregister(p.id)
connectionTimer.Update(time.Duration(mclock.Now() - connectedAt))
serverConnectionGauge.Update(int64(h.backend.peers.Len()))
}()
h.fetcher.announce(p, p.headInfo)
// pool entry can be nil during the unit test.
if p.poolEntry != nil {
h.backend.serverPool.registered(p.poolEntry)
}
// Spawn a main loop to handle all incoming messages.
for {
if err := h.handleMsg(p); err != nil {
p.Log().Debug("Light Ethereum message handling failed", "err", err)
p.fcServer.DumpLogs()
return err
}
}
}
// handleMsg is invoked whenever an inbound message is received from a remote
// peer. The remote connection is torn down upon returning any error.
func (h *clientHandler) handleMsg(p *peer) error {
// Read the next message from the remote peer, and ensure it's fully consumed
msg, err := p.rw.ReadMsg()
if err != nil {
return err
}
p.Log().Trace("Light Ethereum message arrived", "code", msg.Code, "bytes", msg.Size)
if msg.Size > ProtocolMaxMsgSize {
return errResp(ErrMsgTooLarge, "%v > %v", msg.Size, ProtocolMaxMsgSize)
}
defer msg.Discard()
var deliverMsg *Msg
// Handle the message depending on its contents
switch msg.Code {
case AnnounceMsg:
p.Log().Trace("Received announce message")
var req announceData
if err := msg.Decode(&req); err != nil {
return errResp(ErrDecode, "%v: %v", msg, err)
}
if err := req.sanityCheck(); err != nil {
return err
}
update, size := req.Update.decode()
if p.rejectUpdate(size) {
return errResp(ErrRequestRejected, "")
}
p.updateFlowControl(update)
if req.Hash != (common.Hash{}) {
if p.announceType == announceTypeNone {
return errResp(ErrUnexpectedResponse, "")
}
if p.announceType == announceTypeSigned {
if err := req.checkSignature(p.ID(), update); err != nil {
p.Log().Trace("Invalid announcement signature", "err", err)
return err
}
p.Log().Trace("Valid announcement signature")
}
p.Log().Trace("Announce message content", "number", req.Number, "hash", req.Hash, "td", req.Td, "reorg", req.ReorgDepth)
h.fetcher.announce(p, &req)
}
case BlockHeadersMsg:
p.Log().Trace("Received block header response message")
var resp struct {
ReqID, BV uint64
Headers []*types.Header
}
if err := msg.Decode(&resp); err != nil {
return errResp(ErrDecode, "msg %v: %v", msg, err)
}
p.fcServer.ReceivedReply(resp.ReqID, resp.BV)
if h.fetcher.requestedID(resp.ReqID) {
h.fetcher.deliverHeaders(p, resp.ReqID, resp.Headers)
} else {
if err := h.downloader.DeliverHeaders(p.id, resp.Headers); err != nil {
log.Debug("Failed to deliver headers", "err", err)
}
}
case BlockBodiesMsg:
p.Log().Trace("Received block bodies response")
var resp struct {
ReqID, BV uint64
Data []*types.Body
}
if err := msg.Decode(&resp); err != nil {
return errResp(ErrDecode, "msg %v: %v", msg, err)
}
p.fcServer.ReceivedReply(resp.ReqID, resp.BV)
deliverMsg = &Msg{
MsgType: MsgBlockBodies,
ReqID: resp.ReqID,
Obj: resp.Data,
}
case CodeMsg:
p.Log().Trace("Received code response")
var resp struct {
ReqID, BV uint64
Data [][]byte
}
if err := msg.Decode(&resp); err != nil {
return errResp(ErrDecode, "msg %v: %v", msg, err)
}
p.fcServer.ReceivedReply(resp.ReqID, resp.BV)
deliverMsg = &Msg{
MsgType: MsgCode,
ReqID: resp.ReqID,
Obj: resp.Data,
}
case ReceiptsMsg:
p.Log().Trace("Received receipts response")
var resp struct {
ReqID, BV uint64
Receipts []types.Receipts
}
if err := msg.Decode(&resp); err != nil {
return errResp(ErrDecode, "msg %v: %v", msg, err)
}
p.fcServer.ReceivedReply(resp.ReqID, resp.BV)
deliverMsg = &Msg{
MsgType: MsgReceipts,
ReqID: resp.ReqID,
Obj: resp.Receipts,
}
case ProofsV2Msg:
p.Log().Trace("Received les/2 proofs response")
var resp struct {
ReqID, BV uint64
Data light.NodeList
}
if err := msg.Decode(&resp); err != nil {
return errResp(ErrDecode, "msg %v: %v", msg, err)
}
p.fcServer.ReceivedReply(resp.ReqID, resp.BV)
deliverMsg = &Msg{
MsgType: MsgProofsV2,
ReqID: resp.ReqID,
Obj: resp.Data,
}
case HelperTrieProofsMsg:
p.Log().Trace("Received helper trie proof response")
var resp struct {
ReqID, BV uint64
Data HelperTrieResps
}
if err := msg.Decode(&resp); err != nil {
return errResp(ErrDecode, "msg %v: %v", msg, err)
}
p.fcServer.ReceivedReply(resp.ReqID, resp.BV)
deliverMsg = &Msg{
MsgType: MsgHelperTrieProofs,
ReqID: resp.ReqID,
Obj: resp.Data,
}
case TxStatusMsg:
p.Log().Trace("Received tx status response")
var resp struct {
ReqID, BV uint64
Status []light.TxStatus
}
if err := msg.Decode(&resp); err != nil {
return errResp(ErrDecode, "msg %v: %v", msg, err)
}
p.fcServer.ReceivedReply(resp.ReqID, resp.BV)
deliverMsg = &Msg{
MsgType: MsgTxStatus,
ReqID: resp.ReqID,
Obj: resp.Status,
}
case StopMsg:
p.freezeServer(true)
h.backend.retriever.frozen(p)
p.Log().Debug("Service stopped")
case ResumeMsg:
var bv uint64
if err := msg.Decode(&bv); err != nil {
return errResp(ErrDecode, "msg %v: %v", msg, err)
}
p.fcServer.ResumeFreeze(bv)
p.freezeServer(false)
p.Log().Debug("Service resumed")
default:
p.Log().Trace("Received invalid message", "code", msg.Code)
return errResp(ErrInvalidMsgCode, "%v", msg.Code)
}
// Deliver the received response to retriever.
if deliverMsg != nil {
if err := h.backend.retriever.deliver(p, deliverMsg); err != nil {
p.responseErrors++
if p.responseErrors > maxResponseErrors {
return err
}
}
}
return nil
}
func (h *clientHandler) removePeer(id string) {
h.backend.peers.Unregister(id)
}
type peerConnection struct {
handler *clientHandler
peer *peer
}
func (pc *peerConnection) Head() (common.Hash, *big.Int) {
return pc.peer.HeadAndTd()
}
func (pc *peerConnection) | RequestHeadersByHash | identifier_name | |
client_handler.go | backend *LightEthereum) *clientHandler {
handler := &clientHandler{
checkpoint: checkpoint,
backend: backend,
closeCh: make(chan struct{}),
}
if ulcServers != nil {
ulc, err := newULC(ulcServers, ulcFraction)
if err != nil |
handler.ulc = ulc
log.Info("Enable ultra light client mode")
}
var height uint64
if checkpoint != nil {
height = (checkpoint.SectionIndex+1)*params.CHTFrequency - 1
}
handler.fetcher = newLightFetcher(handler)
handler.downloader = downloader.New(height, backend.chainDb, nil, backend.eventMux, nil, backend.blockchain, handler.removePeer)
handler.backend.peers.notify((*downloaderPeerNotify)(handler))
return handler
}
func (h *clientHandler) stop() {
close(h.closeCh)
h.downloader.Terminate()
h.fetcher.close()
h.wg.Wait()
}
// runPeer is the p2p protocol run function for the given version.
func (h *clientHandler) runPeer(version uint, p *p2p.Peer, rw p2p.MsgReadWriter) error {
trusted := false
if h.ulc != nil {
trusted = h.ulc.trusted(p.ID())
}
peer := newPeer(int(version), h.backend.config.NetworkId, trusted, p, newMeteredMsgWriter(rw, int(version)))
peer.poolEntry = h.backend.serverPool.connect(peer, peer.Node())
if peer.poolEntry == nil {
return p2p.DiscRequested
}
h.wg.Add(1)
defer h.wg.Done()
err := h.handle(peer)
h.backend.serverPool.disconnect(peer.poolEntry)
return err
}
func (h *clientHandler) handle(p *peer) error {
if h.backend.peers.Len() >= h.backend.config.LightPeers && !p.Peer.Info().Network.Trusted {
return p2p.DiscTooManyPeers
}
p.Log().Debug("Light Ethereum peer connected", "name", p.Name())
// Execute the LES handshake
var (
head = h.backend.blockchain.CurrentHeader()
hash = head.Hash()
number = head.Number.Uint64()
td = h.backend.blockchain.GetTd(hash, number)
)
if err := p.Handshake(td, hash, number, h.backend.blockchain.Genesis().Hash(), nil); err != nil {
p.Log().Debug("Light Ethereum handshake failed", "err", err)
return err
}
// Register the peer locally
if err := h.backend.peers.Register(p); err != nil {
p.Log().Error("Light Ethereum peer registration failed", "err", err)
return err
}
serverConnectionGauge.Update(int64(h.backend.peers.Len()))
connectedAt := mclock.Now()
defer func() {
h.backend.peers.Unregister(p.id)
connectionTimer.Update(time.Duration(mclock.Now() - connectedAt))
serverConnectionGauge.Update(int64(h.backend.peers.Len()))
}()
h.fetcher.announce(p, p.headInfo)
// pool entry can be nil during the unit test.
if p.poolEntry != nil {
h.backend.serverPool.registered(p.poolEntry)
}
// Spawn a main loop to handle all incoming messages.
for {
if err := h.handleMsg(p); err != nil {
p.Log().Debug("Light Ethereum message handling failed", "err", err)
p.fcServer.DumpLogs()
return err
}
}
}
// handleMsg is invoked whenever an inbound message is received from a remote
// peer. The remote connection is torn down upon returning any error.
func (h *clientHandler) handleMsg(p *peer) error {
// Read the next message from the remote peer, and ensure it's fully consumed
msg, err := p.rw.ReadMsg()
if err != nil {
return err
}
p.Log().Trace("Light Ethereum message arrived", "code", msg.Code, "bytes", msg.Size)
if msg.Size > ProtocolMaxMsgSize {
return errResp(ErrMsgTooLarge, "%v > %v", msg.Size, ProtocolMaxMsgSize)
}
defer msg.Discard()
var deliverMsg *Msg
// Handle the message depending on its contents
switch msg.Code {
case AnnounceMsg:
p.Log().Trace("Received announce message")
var req announceData
if err := msg.Decode(&req); err != nil {
return errResp(ErrDecode, "%v: %v", msg, err)
}
if err := req.sanityCheck(); err != nil {
return err
}
update, size := req.Update.decode()
if p.rejectUpdate(size) {
return errResp(ErrRequestRejected, "")
}
p.updateFlowControl(update)
if req.Hash != (common.Hash{}) {
if p.announceType == announceTypeNone {
return errResp(ErrUnexpectedResponse, "")
}
if p.announceType == announceTypeSigned {
if err := req.checkSignature(p.ID(), update); err != nil {
p.Log().Trace("Invalid announcement signature", "err", err)
return err
}
p.Log().Trace("Valid announcement signature")
}
p.Log().Trace("Announce message content", "number", req.Number, "hash", req.Hash, "td", req.Td, "reorg", req.ReorgDepth)
h.fetcher.announce(p, &req)
}
case BlockHeadersMsg:
p.Log().Trace("Received block header response message")
var resp struct {
ReqID, BV uint64
Headers []*types.Header
}
if err := msg.Decode(&resp); err != nil {
return errResp(ErrDecode, "msg %v: %v", msg, err)
}
p.fcServer.ReceivedReply(resp.ReqID, resp.BV)
if h.fetcher.requestedID(resp.ReqID) {
h.fetcher.deliverHeaders(p, resp.ReqID, resp.Headers)
} else {
if err := h.downloader.DeliverHeaders(p.id, resp.Headers); err != nil {
log.Debug("Failed to deliver headers", "err", err)
}
}
case BlockBodiesMsg:
p.Log().Trace("Received block bodies response")
var resp struct {
ReqID, BV uint64
Data []*types.Body
}
if err := msg.Decode(&resp); err != nil {
return errResp(ErrDecode, "msg %v: %v", msg, err)
}
p.fcServer.ReceivedReply(resp.ReqID, resp.BV)
deliverMsg = &Msg{
MsgType: MsgBlockBodies,
ReqID: resp.ReqID,
Obj: resp.Data,
}
case CodeMsg:
p.Log().Trace("Received code response")
var resp struct {
ReqID, BV uint64
Data [][]byte
}
if err := msg.Decode(&resp); err != nil {
return errResp(ErrDecode, "msg %v: %v", msg, err)
}
p.fcServer.ReceivedReply(resp.ReqID, resp.BV)
deliverMsg = &Msg{
MsgType: MsgCode,
ReqID: resp.ReqID,
Obj: resp.Data,
}
case ReceiptsMsg:
p.Log().Trace("Received receipts response")
var resp struct {
ReqID, BV uint64
Receipts []types.Receipts
}
if err := msg.Decode(&resp); err != nil {
return errResp(ErrDecode, "msg %v: %v", msg, err)
}
p.fcServer.ReceivedReply(resp.ReqID, resp.BV)
deliverMsg = &Msg{
MsgType: MsgReceipts,
ReqID: resp.ReqID,
Obj: resp.Receipts,
}
case ProofsV2Msg:
p.Log().Trace("Received les/2 proofs response")
var resp struct {
ReqID, BV uint64
Data light.NodeList
}
if err := msg.Decode(&resp); err != nil {
return errResp(ErrDecode, "msg %v: %v", msg, err)
}
p.fcServer.ReceivedReply(resp.ReqID, resp.BV)
deliverMsg = &Msg{
MsgType: MsgProofsV2,
ReqID: resp.ReqID,
Obj: resp.Data,
}
case HelperTrieProofsMsg:
p.Log().Trace("Received helper trie proof response")
var resp struct {
ReqID, BV uint64
Data HelperTrieResps
}
if err := msg.Decode(&resp); err != nil {
return errResp(ErrDecode, "msg %v: %v", msg, err)
}
p.fcServer.ReceivedReply(resp.ReqID, resp.BV)
deliverMsg = &Msg{
MsgType: MsgHelperTrieProofs,
ReqID: resp.ReqID,
Obj | {
log.Error("Failed to initialize ultra light client")
} | conditional_block |
files_test.go | No != expected.Pages[1].PageNo {
t.Errorf("Page two PageNo got %v wanted %v", rmf.Pages[1].PageNo, expected.Pages[1].PageNo)
}
if rmf.Pages[1].Identifier != expected.Pages[1].Identifier {
t.Errorf("Page two Identifier got %v wanted %v", rmf.Pages[1].Identifier, expected.Pages[1].Identifier)
}
if rmf.Pages[1].rmPath != expected.Pages[1].rmPath {
t.Errorf("Page two rmPath got %v wanted %v", rmf.Pages[1].rmPath, expected.Pages[1].rmPath)
}
if rmf.Pages[1].LayerNames[1] != expected.Pages[1].LayerNames[1] {
t.Error("Page two second layer names not the same")
}
// https://stackoverflow.com/a/29339052
redirStdOut := func(log string) string {
oldStdout := os.Stdout
r, w, _ := os.Pipe()
os.Stdout = w
// debug!
rmf.Debug(log)
w.Close()
s, _ := ioutil.ReadAll(r)
r.Close()
os.Stdout = oldStdout
return string(s)
}
rmf.Debugging = false
s := redirStdOut("hi")
if s != "" {
t.Error("debug should be nil")
}
rmf.Debugging = true
s = redirStdOut("hi")
if s != "hi\n" {
t.Errorf("debug got %s not %s", s, "hi")
}
}
// TestFilesXochitlWithoutPDF tests xochitl format files without a pdf
func TestFilesXochitlWithoutPDF(t *testing.T) {
template := "../templates/A4.pdf"
rmf, err := RMFiler("../testfiles/d34df12d-e72b-4939-a791-5b34b3a810e7", template)
if err != nil {
t.Fatalf("Could not open file %v", err)
}
expected := RMFileInfo{
RmFS: &RmFS{
pdfPath: "", // no pdf
identifier: "d34df12d-e72b-4939-a791-5b34b3a810e7",
},
Version: 0,
VisibleName: "toolbox",
LastModified: ptime("2020-01-05 13:03:52 +0000 GMT"),
PageCount: 1,
Pages: []RMPage{
{
PageNo: 0,
Identifier: "2c277cdb-79a5-4f69-b583-4901d944e77e",
rmFileDesc: &rmFileDesc{rmPath: "d34df12d-e72b-4939-a791-5b34b3a810e7/2c277cdb-79a5-4f69-b583-4901d944e77e.rm"},
LayerNames: []string{"Layer 1"},
},
},
}
if rmf.pdfPath != expected.pdfPath {
t.Errorf("pdfPath got %v wanted %v", rmf.pdfPath, expected.pdfPath)
}
if rmf.identifier != expected.identifier {
t.Errorf("identifier got %v wanted %v", rmf.identifier, expected.identifier)
}
if rmf.Version != expected.Version {
t.Errorf("Version got %v wanted %v", rmf.Version, expected.Version)
}
if rmf.VisibleName != expected.VisibleName {
t.Errorf("VisibleName got %v wanted %v", rmf.VisibleName, expected.VisibleName)
}
if rmf.PageCount != expected.PageCount {
t.Errorf("PageCount got %v wanted %v", rmf.PageCount, expected.PageCount)
}
if rmf.Pages[0].PageNo != expected.Pages[0].PageNo {
t.Errorf("Page one PageNo got %v wanted %v", rmf.Pages[0].PageNo, expected.Pages[0].PageNo)
}
if rmf.Pages[0].Identifier != expected.Pages[0].Identifier {
t.Errorf("Page one Identifier got %v wanted %v", rmf.Pages[0].Identifier, expected.Pages[0].Identifier)
}
if rmf.Pages[0].rmPath != expected.Pages[0].rmPath {
t.Errorf("Page one rmPath got %v wanted %v", rmf.Pages[0].rmPath, expected.Pages[0].rmPath)
}
if rmf.Pages[0].LayerNames[0] != expected.Pages[0].LayerNames[0] {
t.Error("Page one second layer names not the same")
}
}
// TestInsertedPage checks if an inserted page is detected correctly
func TestInsertedPage(t *testing.T) {
testUUID := "fbe9f971-03ba-4c21-a0e8-78dd921f9c4c"
template := "../templates/A4.pdf"
rmf, err := RMFiler("../testfiles/"+testUUID, template)
if err != nil {
t.Fatalf("Could not open file %v", err)
} | expected := RMFileInfo{
RmFS: &RmFS{
pdfPath: "fbe9f971-03ba-4c21-a0e8-78dd921f9c4c.pdf",
identifier: "fbe9f971-03ba-4c21-a0e8-78dd921f9c4c",
},
Version: 0,
VisibleName: "insert-pages",
LastModified: ptime("2022-09-09 14:13:39 +0100 BST"),
Orientation: "portrait",
OriginalPageCount: 2,
PageCount: 3,
Pages: []RMPage{
{
PageNo: 0,
Identifier: "fa678373-8530-465d-a988-a0b158d957e4",
rmFileDesc: &rmFileDesc{rmPath: "fbe9f971-03ba-4c21-a0e8-78dd921f9c4c/fa678373-8530-465d-a988-a0b158d957e4.rm"},
LayerNames: []string{"Layer 1"},
},
{
PageNo: 1,
Identifier: "0b8b6e65-926c-4269-9109-36fca8718c94",
rmFileDesc: &rmFileDesc{rmPath: "fbe9f971-03ba-4c21-a0e8-78dd921f9c4c/0b8b6e65-926c-4269-9109-36fca8718c94.rm"},
LayerNames: []string{"Layer 1"},
},
{
PageNo: 2,
Identifier: "e2a69ab6-5c11-42d1-8d2d-9ce6569d9fdf",
rmFileDesc: &rmFileDesc{rmPath: "fbe9f971-03ba-4c21-a0e8-78dd921f9c4c/e2a69ab6-5c11-42d1-8d2d-9ce6569d9fdf.rm"},
LayerNames: []string{"Layer 1"},
},
},
RedirectionPageMap: []int{0, -1, 1},
Debugging: false,
}
opt := cmp.Comparer(func(x, y RMFileInfo) bool {
if x.pdfPath != y.pdfPath {
t.Errorf("path %s != %s", x.pdfPath, y.pdfPath)
return false
}
if x.identifier != y.identifier {
t.Errorf("identifier %s != %s", x.pdfPath, y.pdfPath)
return false
}
if x.Version != y.Version ||
x.VisibleName != y.VisibleName ||
x.Orientation != y.Orientation ||
x.OriginalPageCount != y.OriginalPageCount ||
x.PageCount != y.PageCount {
t.Error("version, visiblename, orientation, originalpagecount or pagecount differ")
return false
}
if len(x.RedirectionPageMap) != len(y.RedirectionPageMap) {
t.Errorf("redirection length %d != %d", len(x.RedirectionPage | random_line_split | |
files_test.go | (t *testing.T) {
template := ""
rmf, err := RMFiler("../testfiles/cc8313bb-5fab-4ab5-af39-46e6d4160df3.pdf", template)
if err != nil {
t.Fatalf("Could not open file %v", err)
}
expected := RMFileInfo{
RmFS: &RmFS{
pdfPath: "cc8313bb-5fab-4ab5-af39-46e6d4160df3.pdf",
identifier: "cc8313bb-5fab-4ab5-af39-46e6d4160df3",
},
Version: 17,
VisibleName: "tpl",
LastModified: ptime("2019-12-28 23:17:19 +0000 GMT"),
PageCount: 2,
Pages: []RMPage{
{
PageNo: 0,
Identifier: "da7f9a41-c2b2-4cbc-9c1b-5a20b5d54224",
rmFileDesc: &rmFileDesc{rmPath: "cc8313bb-5fab-4ab5-af39-46e6d4160df3/da7f9a41-c2b2-4cbc-9c1b-5a20b5d54224.rm"},
LayerNames: []string{"Layer 1", "Layer 2 is empty"},
},
{
PageNo: 1,
Identifier: "7794dbce-2506-4fb0-99fd-9ec031426d57",
rmFileDesc: &rmFileDesc{rmPath: "cc8313bb-5fab-4ab5-af39-46e6d4160df3/7794dbce-2506-4fb0-99fd-9ec031426d57.rm"},
LayerNames: []string{"Layer 1", "Layer 2"},
},
},
}
if rmf.pdfPath != expected.pdfPath {
t.Errorf("pdfPath got %v wanted %v", rmf.pdfPath, expected.pdfPath)
}
if rmf.identifier != expected.identifier {
t.Errorf("identifier got %v wanted %v", rmf.identifier, expected.identifier)
}
if rmf.Version != expected.Version {
t.Errorf("Version got %v wanted %v", rmf.Version, expected.Version)
}
if rmf.VisibleName != expected.VisibleName {
t.Errorf("VisibleName got %v wanted %v", rmf.VisibleName, expected.VisibleName)
}
if rmf.PageCount != expected.PageCount {
t.Errorf("PageCount got %v wanted %v", rmf.PageCount, expected.PageCount)
}
if rmf.Pages[1].PageNo != expected.Pages[1].PageNo {
t.Errorf("Page two PageNo got %v wanted %v", rmf.Pages[1].PageNo, expected.Pages[1].PageNo)
}
if rmf.Pages[1].Identifier != expected.Pages[1].Identifier {
t.Errorf("Page two Identifier got %v wanted %v", rmf.Pages[1].Identifier, expected.Pages[1].Identifier)
}
if rmf.Pages[1].rmPath != expected.Pages[1].rmPath {
t.Errorf("Page two rmPath got %v wanted %v", rmf.Pages[1].rmPath, expected.Pages[1].rmPath)
}
if rmf.Pages[1].LayerNames[1] != expected.Pages[1].LayerNames[1] {
t.Error("Page two second layer names not the same")
}
// https://stackoverflow.com/a/29339052
redirStdOut := func(log string) string {
oldStdout := os.Stdout
r, w, _ := os.Pipe()
os.Stdout = w
// debug!
rmf.Debug(log)
w.Close()
s, _ := ioutil.ReadAll(r)
r.Close()
os.Stdout = oldStdout
return string(s)
}
rmf.Debugging = false
s := redirStdOut("hi")
if s != "" {
t.Error("debug should be nil")
}
rmf.Debugging = true
s = redirStdOut("hi")
if s != "hi\n" {
t.Errorf("debug got %s not %s", s, "hi")
}
}
// TestFilesXochitlWithoutPDF tests xochitl format files without a pdf
func TestFilesXochitlWithoutPDF(t *testing.T) {
template := "../templates/A4.pdf"
rmf, err := RMFiler("../testfiles/d34df12d-e72b-4939-a791-5b34b3a810e7", template)
if err != nil {
t.Fatalf("Could not open file %v", err)
}
expected := RMFileInfo{
RmFS: &RmFS{
pdfPath: "", // no pdf
identifier: "d34df12d-e72b-4939-a791-5b34b3a810e7",
},
Version: 0,
VisibleName: "toolbox",
LastModified: ptime("2020-01-05 13:03:52 +0000 GMT"),
PageCount: 1,
Pages: []RMPage{
{
PageNo: 0,
Identifier: "2c277cdb-79a5-4f69-b583-4901d944e77e",
rmFileDesc: &rmFileDesc{rmPath: "d34df12d-e72b-4939-a791-5b34b3a810e7/2c277cdb-79a5-4f69-b583-4901d944e77e.rm"},
LayerNames: []string{"Layer 1"},
},
},
}
if rmf.pdfPath != expected.pdfPath {
t.Errorf("pdfPath got %v wanted %v", rmf.pdfPath, expected.pdfPath)
}
if rmf.identifier != expected.identifier {
t.Errorf("identifier got %v wanted %v", rmf.identifier, expected.identifier)
}
if rmf.Version != expected.Version {
t.Errorf("Version got %v wanted %v", rmf.Version, expected.Version)
}
if rmf.VisibleName != expected.VisibleName {
t.Errorf("VisibleName got %v wanted %v", rmf.VisibleName, expected.VisibleName)
}
if rmf.PageCount != expected.PageCount {
t.Errorf("PageCount got %v wanted %v", rmf.PageCount, expected.PageCount)
}
if rmf.Pages[0].PageNo != expected.Pages[0].PageNo {
t.Errorf("Page one PageNo got %v wanted %v", rmf.Pages[0].PageNo, expected.Pages[0].PageNo)
}
if rmf.Pages[0].Identifier != expected.Pages[0].Identifier {
t.Errorf("Page one Identifier got %v wanted %v", rmf.Pages[0].Identifier, expected.Pages[0].Identifier)
}
if rmf.Pages[0].rmPath != expected.Pages[0].rmPath {
t.Errorf("Page one rmPath got %v wanted %v", rmf.Pages[0].rmPath, expected.Pages[0].rmPath)
}
if rmf.Pages[0].LayerNames[0] != expected.Pages[0].LayerNames[0] {
t.Error("Page one second layer names not the same")
}
}
// TestInsertedPage checks if an inserted page is detected correctly
func TestInsertedPage(t *testing.T) {
testUUID := "fbe9f971-03ba-4c21-a0e8-78dd921f9c4c"
template := "../templates/A4.pdf"
rmf, err := RMFiler("../testfiles/"+testUUID, template)
if err != nil {
t.Fatalf("Could not open file %v", err)
}
expected := RMFileInfo{
RmFS: &RmFS{
pdfPath: "fbe9f971-03ba-4c21-a0e8-78dd921f9c4c.pdf",
identifier: "fbe9f971-03ba-4c21-a0e8-78dd921f9c4c",
},
Version: 0,
VisibleName: "insert-pages",
LastModified: ptime("2022-09-09 14: | TestFilesXochitlWithPDF | identifier_name | |
files_test.go | != expected.Pages[1].PageNo {
t.Errorf("Page two PageNo got %v wanted %v", rmf.Pages[1].PageNo, expected.Pages[1].PageNo)
}
if rmf.Pages[1].Identifier != expected.Pages[1].Identifier {
t.Errorf("Page two Identifier got %v wanted %v", rmf.Pages[1].Identifier, expected.Pages[1].Identifier)
}
if rmf.Pages[1].rmPath != expected.Pages[1].rmPath {
t.Errorf("Page two rmPath got %v wanted %v", rmf.Pages[1].rmPath, expected.Pages[1].rmPath)
}
if rmf.Pages[1].LayerNames[1] != expected.Pages[1].LayerNames[1] |
// https://stackoverflow.com/a/29339052
redirStdOut := func(log string) string {
oldStdout := os.Stdout
r, w, _ := os.Pipe()
os.Stdout = w
// debug!
rmf.Debug(log)
w.Close()
s, _ := ioutil.ReadAll(r)
r.Close()
os.Stdout = oldStdout
return string(s)
}
rmf.Debugging = false
s := redirStdOut("hi")
if s != "" {
t.Error("debug should be nil")
}
rmf.Debugging = true
s = redirStdOut("hi")
if s != "hi\n" {
t.Errorf("debug got %s not %s", s, "hi")
}
}
// TestFilesXochitlWithoutPDF tests xochitl format files without a pdf
func TestFilesXochitlWithoutPDF(t *testing.T) {
template := "../templates/A4.pdf"
rmf, err := RMFiler("../testfiles/d34df12d-e72b-4939-a791-5b34b3a810e7", template)
if err != nil {
t.Fatalf("Could not open file %v", err)
}
expected := RMFileInfo{
RmFS: &RmFS{
pdfPath: "", // no pdf
identifier: "d34df12d-e72b-4939-a791-5b34b3a810e7",
},
Version: 0,
VisibleName: "toolbox",
LastModified: ptime("2020-01-05 13:03:52 +0000 GMT"),
PageCount: 1,
Pages: []RMPage{
{
PageNo: 0,
Identifier: "2c277cdb-79a5-4f69-b583-4901d944e77e",
rmFileDesc: &rmFileDesc{rmPath: "d34df12d-e72b-4939-a791-5b34b3a810e7/2c277cdb-79a5-4f69-b583-4901d944e77e.rm"},
LayerNames: []string{"Layer 1"},
},
},
}
if rmf.pdfPath != expected.pdfPath {
t.Errorf("pdfPath got %v wanted %v", rmf.pdfPath, expected.pdfPath)
}
if rmf.identifier != expected.identifier {
t.Errorf("identifier got %v wanted %v", rmf.identifier, expected.identifier)
}
if rmf.Version != expected.Version {
t.Errorf("Version got %v wanted %v", rmf.Version, expected.Version)
}
if rmf.VisibleName != expected.VisibleName {
t.Errorf("VisibleName got %v wanted %v", rmf.VisibleName, expected.VisibleName)
}
if rmf.PageCount != expected.PageCount {
t.Errorf("PageCount got %v wanted %v", rmf.PageCount, expected.PageCount)
}
if rmf.Pages[0].PageNo != expected.Pages[0].PageNo {
t.Errorf("Page one PageNo got %v wanted %v", rmf.Pages[0].PageNo, expected.Pages[0].PageNo)
}
if rmf.Pages[0].Identifier != expected.Pages[0].Identifier {
t.Errorf("Page one Identifier got %v wanted %v", rmf.Pages[0].Identifier, expected.Pages[0].Identifier)
}
if rmf.Pages[0].rmPath != expected.Pages[0].rmPath {
t.Errorf("Page one rmPath got %v wanted %v", rmf.Pages[0].rmPath, expected.Pages[0].rmPath)
}
if rmf.Pages[0].LayerNames[0] != expected.Pages[0].LayerNames[0] {
t.Error("Page one second layer names not the same")
}
}
// TestInsertedPage checks if an inserted page is detected correctly
func TestInsertedPage(t *testing.T) {
testUUID := "fbe9f971-03ba-4c21-a0e8-78dd921f9c4c"
template := "../templates/A4.pdf"
rmf, err := RMFiler("../testfiles/"+testUUID, template)
if err != nil {
t.Fatalf("Could not open file %v", err)
}
expected := RMFileInfo{
RmFS: &RmFS{
pdfPath: "fbe9f971-03ba-4c21-a0e8-78dd921f9c4c.pdf",
identifier: "fbe9f971-03ba-4c21-a0e8-78dd921f9c4c",
},
Version: 0,
VisibleName: "insert-pages",
LastModified: ptime("2022-09-09 14:13:39 +0100 BST"),
Orientation: "portrait",
OriginalPageCount: 2,
PageCount: 3,
Pages: []RMPage{
{
PageNo: 0,
Identifier: "fa678373-8530-465d-a988-a0b158d957e4",
rmFileDesc: &rmFileDesc{rmPath: "fbe9f971-03ba-4c21-a0e8-78dd921f9c4c/fa678373-8530-465d-a988-a0b158d957e4.rm"},
LayerNames: []string{"Layer 1"},
},
{
PageNo: 1,
Identifier: "0b8b6e65-926c-4269-9109-36fca8718c94",
rmFileDesc: &rmFileDesc{rmPath: "fbe9f971-03ba-4c21-a0e8-78dd921f9c4c/0b8b6e65-926c-4269-9109-36fca8718c94.rm"},
LayerNames: []string{"Layer 1"},
},
{
PageNo: 2,
Identifier: "e2a69ab6-5c11-42d1-8d2d-9ce6569d9fdf",
rmFileDesc: &rmFileDesc{rmPath: "fbe9f971-03ba-4c21-a0e8-78dd921f9c4c/e2a69ab6-5c11-42d1-8d2d-9ce6569d9fdf.rm"},
LayerNames: []string{"Layer 1"},
},
},
RedirectionPageMap: []int{0, -1, 1},
Debugging: false,
}
opt := cmp.Comparer(func(x, y RMFileInfo) bool {
if x.pdfPath != y.pdfPath {
t.Errorf("path %s != %s", x.pdfPath, y.pdfPath)
return false
}
if x.identifier != y.identifier {
t.Errorf("identifier %s != %s", x.pdfPath, y.pdfPath)
return false
}
if x.Version != y.Version ||
x.VisibleName != y.VisibleName ||
x.Orientation != y.Orientation ||
x.OriginalPageCount != y.OriginalPageCount ||
x.PageCount != y.PageCount {
t.Error("version, visiblename, orientation, originalpagecount or pagecount differ")
return false
}
if len(x.RedirectionPageMap) != len(y.RedirectionPageMap) {
t.Errorf("redirection length %d != %d", len(x.Redirection | {
t.Error("Page two second layer names not the same")
} | conditional_block |
files_test.go | y.OriginalPageCount ||
x.PageCount != y.PageCount {
t.Error("version, visiblename, orientation, originalpagecount or pagecount differ")
return false
}
if len(x.RedirectionPageMap) != len(y.RedirectionPageMap) {
t.Errorf("redirection length %d != %d", len(x.RedirectionPageMap), len(y.RedirectionPageMap))
return false
}
for i, rpm := range x.RedirectionPageMap {
if rpm != y.RedirectionPageMap[i] {
t.Errorf("redirection page map %d %d != %d", i, rpm, y.RedirectionPageMap[i])
return false
}
}
if len(x.Pages) != len(y.Pages) {
t.Errorf("page lengths different %d != %d", len(x.Pages), len(y.Pages))
return false
}
for i, xPage := range x.Pages {
yPage := y.Pages[i]
if xPage.PageNo != yPage.PageNo {
t.Errorf("page %d != %d", xPage.PageNo, yPage.PageNo)
return false
}
if xPage.Identifier != yPage.Identifier {
t.Errorf("identifier %s != %s", xPage.Identifier, yPage.Identifier)
return false
}
if xPage.rmPath != yPage.rmPath {
t.Errorf("rmpath %x != %s", xPage.rmPath, yPage.rmPath)
return false
}
if len(xPage.LayerNames) != len(yPage.LayerNames) {
t.Errorf("layer len %d != %d", len(xPage.LayerNames), len(yPage.LayerNames))
return false
}
}
return true
})
// if !cmp.Equal(rmf, expected, cmpopts.IgnoreUnexported(rmf), cmpopts.IgnoreInterfaces(struct{ io.Reader }{})) {
if !cmp.Equal(rmf, expected, opt) {
t.Errorf("rmf != expected for insert page test")
}
if len(expected.Pages) != rmf.PageCount {
t.Errorf("expected pages %d != rmf pages %d", len(expected.Pages), rmf.PageCount)
}
if len(rmf.insertedPages) != 1 || rmf.insertedPages[0] != 1 {
t.Errorf(
"inserted pages %v should equal [1]",
rmf.insertedPages,
)
}
if !cmp.Equal(rmf.insertedPages.insertedPageNos(), []int{2}) {
t.Errorf(
"human inserted pages %v should equal {2}",
rmf.insertedPages.insertedPageNos(),
)
}
if rmf.insertedPages.insertedPageNumbers() != "2" {
t.Errorf(
"human inserted pages as string %v should equal '2'",
rmf.insertedPages.insertedPageNumbers(),
)
}
type iterExpected struct {
pageNo int
pdfPageNo int
inserted bool
isTemplate bool
}
iExpectArray := []iterExpected{
{0, 0, false, false},
{1, 0, true, true},
{2, 1, false, false},
}
for i := 0; i < rmf.PageCount; i++ {
// ignore filehandle in last assignment
pageNo, pdfPageNo, inserted, isTemplate, _ := rmf.PageIterate()
j := iterExpected{pageNo, pdfPageNo, inserted, isTemplate}
e := iExpectArray[i]
if j.pageNo != e.pageNo ||
j.pdfPageNo != e.pdfPageNo ||
j.inserted != e.inserted ||
j.isTemplate != e.isTemplate {
t.Errorf("iter i %d expected %+v got %+v", i, e, j)
}
}
}
// TestHorizontal checks if a horizontal PDF is detected correctly
func TestHorizontal(t *testing.T) {
testUUID := "e724bba2-266f-434d-aaf2-935d2b405aee"
template := ""
rmf, err := RMFiler("../testfiles/"+testUUID, template)
if err != nil {
t.Errorf("Could not open file %v", err)
}
if rmf.Orientation != "landscape" {
t.Errorf("Expected landscape orientation, got %s", rmf.Orientation)
}
}
// TestExtensionIgnored checks that when providing an input with an extension
// the extension is ignored
func TestExtensionIgnored(t *testing.T) {
testUUID := "e724bba2-266f-434d-aaf2-935d2b405aee.arbitrary"
template := ""
_, err := RMFiler("../testfiles/"+testUUID, template)
if err != nil {
t.Errorf("Could not open file %v", err)
}
}
// TestZipWithNoMetadata tests a zip file with no metadata
// note that this older rmapi zip format uses 0-indexed page numbers
//
// ../testfiles/no-metadata.zip
// ddae88d1-7514-43b6-b7de-dcdd18eeb69a.content
// ddae88d1-7514-43b6-b7de-dcdd18eeb69a.pagedata
// ddae88d1-7514-43b6-b7de-dcdd18eeb69a/0-metadata.json
// ddae88d1-7514-43b6-b7de-dcdd18eeb69a/0.rm
//
// in comparison, see ../testfiles/horizontal_rmapi.zip
// e724bba2-266f-434d-aaf2-935d2b405aee.content
// e724bba2-266f-434d-aaf2-935d2b405aee.metadata
// e724bba2-266f-434d-aaf2-935d2b405aee.pagedata
// e724bba2-266f-434d-aaf2-935d2b405aee.pdf
// e724bba2-266f-434d-aaf2-935d2b405aee/1a9ef8e1-8009-4c84-bbe8-ba2885a137e6-metadata.json
// e724bba2-266f-434d-aaf2-935d2b405aee/1a9ef8e1-8009-4c84-bbe8-ba2885a137e6.rm
func TestZipWithNoMetadata(t *testing.T) {
template := ""
rmf, err := RMFiler("../testfiles/no-metadata.zip", template)
if err != nil {
t.Errorf("Could not open file %v", err)
}
for i := 0; i < rmf.PageCount; i++ {
pageNo, pdfPageNo, inserted, isTemplate, _ := rmf.PageIterate()
t.Logf(
"pageno %d pdfpageno %d inserted %t istpl %t\n",
pageNo, pdfPageNo, inserted, isTemplate,
)
}
}
// TestZipVersion3 tests a remarkable v3.0.4 zip file made by rmapi
//
// ../testfiles/version3.zip
// 701cdc43-04aa-410c-bc6f-3c773105a74d
// 701cdc43-04aa-410c-bc6f-3c773105a74d.content
// 701cdc43-04aa-410c-bc6f-3c773105a74d.metadata
// 701cdc43-04aa-410c-bc6f-3c773105a74d.pdf
func TestZipV3(t *testing.T) | {
template := ""
rmf, err := RMFiler("../testfiles/version3.zip", template)
expected := "software version 3 not supported -- no rm metadata file found"
if !strings.Contains(err.Error(), expected) {
t.Errorf("v3 file should error with %s, not %v", expected, err)
}
pages := 0
for i := 0; i < rmf.PageCount; i++ {
pages++
pageNo, pdfPageNo, inserted, isTemplate, _ := rmf.PageIterate()
t.Logf(
"pageno %d pdfpageno %d inserted %t istpl %t\n",
pageNo, pdfPageNo, inserted, isTemplate,
)
}
if pages != 2 { | identifier_body | |
pars.py | :972,
'Урал' :52,
'Челябинская область' :11225,
'Челябинск' :56,
'Магнитогорск' :235,
'Снежинск' :11218,
'Курганская область' :11158,
'Курган' :53,
'Свердловская область' :11162,
'Екатеринбург' :54,
'Каменск-Уральский' :11164,
'Нижний Тагил' :11168,
'Новоуральск' :11170,
'Первоуральск' :11171,
'Тюменская область' :11176,
'Тюмень' :55,
'Тобольск' :11175,
'Ханты-Мансийский автономный округ' :11193,
'Ханты-Мансийск' :57,
'Сургут' :973,
'Нижневартовск' :1091,
'Сибирь' :59,
'Омская область' :11318,
'Омск' :66,
'Новосибирская область' :11316,
'Новосибирск' :65,
'Бердск' :11314,
'Томская область' :11353,
'Томск' :67,
'Ямало-Ненецкий автономный округ' :11232,
'Салехард' :58,
'Алтайский край' :11235,
'Барнаул' :197,
'Бийск' :975,
'Рубцовск' :11251,
'Республика Алтай' :10231,
'Горно-Алтайск' :11319,
'Кемеровская область' :11282,
'Кемерово' :64,
'Междуреченск' :11287,
'Новокузнецк' :237,
'Прокопьевск' :11291,
'Республика Хакасия' :11340,
'Абакан' :1095,
'Республика Тыва' :10233,
'Кызыл' :11333,
'Красноярский край' :11309,
'Красноярск' :62,
'Ачинск' :11302,
'Норильск' :11311,
'Железногорск' :20086,
'Иркутская область' :11266,
'Иркутск' :63,
'Братск' :976,
'Республика Бурятия' :11330,
'Улан-Удэ' :198,
'Забайкальский край' :21949,
'Чита' :68,
'Дальневосточный федеральный округ' :73,
'Республика Саха (Якутия)' :11443,
'Якутск' :74,
'Амурская область' :11375,
'Благовещенск' :77,
'Еврейская автономная область' :10243,
'Биробиджан' :11393,
'Приморский край' :11409,
'Владивосток' :75,
'Находка' :974,
'Уссурийск' :11426,
'Чукотский автономный округ' :10251,
'Анадырь' :11458,
'Камчатский край' :11398,
'Петропавловск-Камчатский' :78,
'Магаданская область' :11403,
'Магадан' :79,
'Сахалинская область' :11450,
'Южно-Сахалинск' :80,
'Хабаровский край' :11457,
'Хабаровск' :76,
'Комсомольск-на-Амуре' :11453
}
exceptions = [
'2gis.ru',
'yandex.ru',
'wikipedia',
'pulscen',
'blizko.ru',
'Avito.ru',
'avito.ru',
'edadeal.ru'
]
def stop_pars(event):
print('Stop Pars')
def output(event):
button_1.config(state="disabled")
# button_2.config(state="normal")
root.update()
# получаю список запросов
inquiries_text = text.get(1.0, END)
inquiries_text = inquiries_text.split("\n")
inquiries = []
for val in inquiries_text:
if len(val) != 0:
inquiries.append(val.strip())
# получаю регион
region = dict_regions_russia[combo.get()]
# получаю глубину парсинга
deep_pars = spin.get()
try:
deep_pars = int(deep_pars)
# отрабатывает исклчюение на то что ввели не цифру
except ValueError:
deep_pars = 1
# максимальная глубина парсинга 10 страниц
if deep_pars > 10:
deep_pars = 10
# если ввели ноль
if deep_pars == 0:
deep_pars = 1
progress = 0
main_simple_progress = ( 100 / len(inquiries) ) / int(deep_pars)
# Запускаю selenium
options = Options()
# Запускаем драйвер без графической оболочки браузера
options.headless = True
# Убираем логирование в консоль
options.add_argument('--log-level=3')
# Инициализируем драйвер хром
driver = webdriver.Chrome(chrome_options=options, executable_path='drivers/chromedriver.exe')
for inquirie in inquiries:
title_list = []
description_list = []
keywords_list = []
h1_list = []
h2_list = []
h3_list = []
for i in range(1, deep_pars + 1):
# получаю страницу яндекс поиска
q = 'https://yandex.ru/search/?text='+str(inquirie)+'&lr='+str(region)+'&p='+str(i)
driver.get(q)
soup = BeautifulSoup (driver.page_source, features="html.parser")
links = []
# обрабатываю полученную страницу
for link in soup.select('.serp-item .organic__url'):
# делаю сравнение со списком исключений для ссылок
check_link = True
for exception_val in exceptions:
result = re.search(exception_val, link.attrs["href"])
if result :
check_link = False
break
# заполняю список собранными ссылками
if check_link:
links.append(link.attrs["href"])
one_part_progress = round( main_simple_progress / len(links), 1 )
for link in links:
driver.get(link)
soup_site = BeautifulSoup (driver.page_source, features="html.parser")
if soup_site.title != None:
title_list.append(soup_site.title.string)
h3 = soup_site.find_all('h3')
if h3 != None:
for tag in h3:
h3_list.append(tag.text)
h2 = soup_site.find_all('h2')
if h2 != None:
for tag in h2:
h2_list.append(tag.text)
h1 = soup_site.find_all('h1')
if h1 != None:
for tag in h1:
h1_list.append(tag.text)
description = soup_site.find('meta', {'name':'description'})
if description != None:
description_list.append(description.get('content'))
keywords = soup_site.find('meta', {'name':'keywords'})
if keywords != None:
keywords_list.append(keywords.get('content'))
# создаю новую книгу
workboo | k = openp | identifier_name | |
pars.py | 2,
'Урал' :52,
'Челябинская область' :11225,
'Челябинск' :56,
'Магнитогорск' :235,
'Снежинск' :11218,
'Курганская область' :11158,
'Курган' :53,
'Свердловская область' :11162,
'Екатеринбург' :54,
'Каменск-Уральский' :11164,
'Нижний Тагил' :11168,
'Новоуральск' :11170,
'Первоуральск' :11171,
'Тюменская область' :11176,
'Тюмень' :55,
'Тобольск' :11175,
'Ханты-Мансийский автономный округ' :11193,
'Ханты-Мансийск' :57,
'Сургут' :973,
'Нижневартовск' :1091,
'Сибирь' :59,
'Омская область' :11318,
'Омск' :66,
'Новосибирская область' :11316,
'Новосибирск' :65,
'Бердск' :11314,
'Томская область' :11353,
'Томск' :67,
'Ямало-Ненецкий автономный округ' :11232,
'Салехард' :58,
'Алтайский край' :11235,
'Барнаул' :197,
'Бийск' :975,
'Рубцовск' :11251,
'Республика Алтай' :10231,
'Горно-Алтайск' :11319,
'Кемеровская область' :11282,
'Кемерово' :64,
'Междуреченск' :11287,
'Новокузнецк' :237,
'Прокопьевск' :11291,
'Республика Хакасия' :11340,
'Абакан' :1095,
'Республика Тыва' :10233,
'Кызыл' :11333,
'Красноярский край' :11309,
'Красноярск' :62,
'Ачинск' :11302,
'Норильск' :11311,
'Железногорск' :20086,
'Иркутская область' :11266,
'Иркутск' :63,
'Братск' :976,
'Республика Бурятия' :11330,
'Улан-Удэ' :198,
'Забайкальский край' :21949,
'Чита' :68,
'Дальневосточный федеральный округ' :73,
'Республика Саха (Якутия)' :11443,
'Якутск' :74,
'Амурская область' :11375,
'Благовещенск' :77,
'Еврейская автономная область' :10243,
'Биробиджан' :11393,
'Приморский край' :11409,
'Владивосток' :75,
'Находка' :974,
'Уссурийск' :11426,
'Чукотский автономный округ' :10251,
'Анадырь' :11458,
'Камчатский край' :11398,
'Петропавловск-Камчатский' :78,
'Магаданская область' :11403,
'Магадан' :79,
'Сахалинская область' :11450,
'Южно-Сахалинск' :80,
'Хабаровский край' :11457,
'Хабаровск' :76,
'Комсомольск-на-Амуре' :11453
}
exceptions = [
'2gis.ru',
'yandex.ru',
'wikipedia',
'pulscen',
'blizko.ru',
'Avito.ru',
'avito.ru',
'edadeal.ru'
]
def stop_pars(event):
print('Stop Pars')
def output(event):
button_1.config(state="disabled")
# button_2.config(state="normal")
root.update()
# получаю список запросов
inquiries_text = text.get(1.0, END)
inquiries_text = inquiries_text.split("\n")
inquiries = []
for val in inquiries_text:
if len(val) != 0:
inquiries.append(val.strip())
# получаю регион
region = dict_regions_russia[combo.get()]
# получаю глубину парсинга
deep_pars = spin.get()
try:
deep_pars = int(deep_pars)
# отрабатывает исклчюение на то что ввели не цифру
except ValueError:
deep_pars = 1
# максимальная глубина парсинга 10 страниц
if deep_pars > 10:
deep_pars = 10
# если ввели ноль
if deep_pars == 0:
deep_pars = 1
progress = 0
main_simple_progress = ( 100 / len(inquiries) ) / int(deep_pars)
# Запускаю selenium
options = Options()
# Запускаем драйвер без графической оболочки браузера
options.headless = True
# Убираем логирование в консоль
options.add_argument('--log-level=3')
# Инициализируем драйвер хром
driver = webdriver.Chrome(chrome_options=options, executable_path='drivers/chromedriver.exe')
for inquirie in inquiries:
title_list = []
description_list = []
keywords_list = []
h1_list = []
h2_list = []
h3_list = []
for i in range(1, deep_pars + 1):
# получаю страницу яндекс поиска
q = 'https://yandex.ru/search/?text='+str(inquirie)+'&lr='+str(region)+'&p='+str(i)
driver.get(q)
soup = BeautifulSoup (driver.page_source, features="html.parser")
links = []
# обрабатываю полученную страницу
for link in soup.select('.serp-item .organic__url'):
# делаю сравнение со списком исключений для ссылок
check_link = True
for exception_val in exceptions:
result = re.search(exception_val, link.attrs["href"])
if result :
check_link = False
break
# заполняю список собранными ссылками
if check_link:
links.append(link.attrs["href"])
one_part_progress = round( main_simple_progress / len(links), 1 )
for link in links:
driver.get(link)
soup_site = BeautifulSoup (driver.page_source, features="html.parser")
if soup_site.title != None:
title_list.append(soup_site.title.string)
h3 = soup_site.find_all('h3')
if h3 != None:
for tag in h3:
h3_list.append(tag.text)
h2 = soup_site.find_all('h2')
if h2 != None:
for tag in h2:
h2_list.append(tag.text)
h1 = soup_site.find_all('h1')
if h1 != None:
for tag in h1:
h1_list.append(tag.text)
description = soup_site.find('meta', {'name':'description'})
if description != None:
description_list.append(description.get('content'))
keywords = soup_site.find('meta', {'name':'keywords'})
if keywords != None:
keywords_list.append(keywords.get('content'))
|
# создаю новую книгу
workbook = openpyxl.Workbook() | random_line_split | |
pars.py | рал' :52,
'Челябинская область' :11225,
'Челябинск' :56,
'Магнитогорск' :235,
'Снежинск' :11218,
'Курганская область' :11158,
'Курган' :53,
'Свердловская область' :11162,
'Екатеринбург' :54,
'Каменск-Уральский' :11164,
'Нижний Тагил' :11168,
'Новоуральск' :11170,
'Первоуральск' :11171,
'Тюменская область' :11176,
'Тюмень' :55,
'Тобольск' :11175,
'Ханты-Мансийский автономный округ' :11193,
'Ханты-Мансийск' :57,
'Сургут' :973,
'Нижневартовск' :1091,
'Сибирь' :59,
'Омская область' :11318,
'Омск' :66,
'Новосибирская область' :11316,
'Новосибирск' :65,
'Бердск' :11314,
'Томская область' :11353,
'Томск' :67,
'Ямало-Ненецкий автономный округ' :11232,
'Салехард' :58,
'Алтайский край' :11235,
'Барнаул' :197,
'Бийск' :975,
'Рубцовск' :11251,
'Республика Алтай' :10231,
'Горно-Алтайск' :11319,
'Кемеровская область' :11282,
'Кемерово' :64,
'Междуреченск' :11287,
'Новокузнецк' :237,
'Прокопьевск' :11291,
'Республика Хакасия' :11340,
'Абакан' :1095,
'Республика Тыва' :10233,
'Кызыл' :11333,
'Красноярский край' :11309,
'Красноярск' :62,
'Ачинск' :11302,
'Норильск' :11311,
'Железногорск' :20086,
'Иркутская область' :11266,
'Иркутск' :63,
'Братск' :976,
'Республика Бурятия' :11330,
'Улан-Удэ' :198,
'Забайкальский край' :21949,
'Чита' :68,
'Дальневосточный федеральный округ' :73,
'Республика Саха (Якутия)' :11443,
'Якутск' :74,
'Амурская область' :11375,
'Благовещенск' :77,
'Еврейская автономная область' :10243,
'Биробиджан' :11393,
'Приморский край' :11409,
'Владивосток' :75,
'Находка' :974,
'Уссурийск' :11426,
'Чукотский автономный округ' :10251,
'Анадырь' :11458,
'Камчатский край' :11398,
'Петропавловск-Камчатский' :78,
'Магаданская область' :11403,
'Магадан' :79,
'Сахалинская область' :11450,
'Южно-Сахалинск' :80,
'Хабаровский край' :11457,
'Хабаровск' :76,
'Комсомольск-на-Амуре' :11453
}
exceptions = [
'2gis.ru',
'yandex.ru',
'wikipedia',
'pulscen',
'blizko.ru',
'Avito.ru',
'avito.ru',
'edadeal.ru'
]
def stop_pars(event):
print('Stop Pars')
def output(event):
button_1.config(state="disabled")
# button_2.config(state="normal")
root.update()
# получаю список запросов
inquiries_text = text.get(1.0, END)
inquiries_text = inquiries_text.split("\n")
inquiries = []
for val in inquiries_text:
if len(val) != 0:
inquiries.append(val.strip())
# получаю регион
region = dict_regions_russia[combo.get()]
# получаю глубину парсинга
deep_pars = spin.get()
try:
deep_pars = int(deep_pars)
# отрабатывает исклчюение на то что ввели не цифру
except ValueError:
deep_pars = 1
# максимальная глубина парсинга 10 страниц
if deep_pars > 10:
deep_pars = 10
# если ввели ноль
if deep_pars == 0:
deep_pars = 1
progress = 0
main_simple_progress = ( 100 / len(inquiries) ) / int(deep_pars)
# Запускаю selenium
options = Options()
# Запускаем драйвер без графической оболочки браузера
options.headless = True
# Убираем логирование в консоль
options.add_argument('--log-level=3')
# Инициализируем драйвер хром
driver = webdriver.Chrome(chrome_options=options, executable_path='drivers/chromedriver.exe')
for inquirie in inquiries:
title_list = []
description_list = []
keywords_list = []
h1_list = []
h2_list = []
h3_list = []
for i in range(1, deep_pars + 1):
# получаю страницу яндекс поиска
q = 'https://yandex.ru/search/?text='+str(inquirie)+'&lr='+str(region)+'&p='+str(i)
driver.get(q)
soup = BeautifulSoup (driver.page_source, features="html.parser")
links = []
# обрабатываю полученную страницу
for link in soup.select('.serp-item .organic__url'):
# делаю сравнение со списком исключений для ссылок
check_link = True
for exception_val in exceptions:
result = re.search(exception_val, link.attrs["href"])
if result :
check_link = False
break
# заполняю список собранными ссылками
if check_link:
links.append(link.attrs["href"])
one_part_progress = round( main_simple_progress / len(links), 1 )
for link in links:
driver.get(link)
soup_site = BeautifulSoup (driver.page_source, features="html.parser")
if soup_site.title != None:
title_list.append(soup_site.title.string)
h3 = soup_site.find_all('h3')
if h3 != None:
for tag in h3:
h3_list.append(tag.text)
h2 = soup_site.find_all('h2')
if h2 != None:
for tag in h2:
h2_list.append(tag.text)
h1 = soup_site.find_all('h1')
if h1 != None:
for tag in h1:
h1_list.append(tag.text)
description = soup_site.find('meta', {'name':'description'})
if description != None:
description_list.append(description.get('content'))
keywords = soup_site.find('meta', {'name':'keywords'})
if keywords != None:
keywords_list.append(keywords.get('content'))
# создаю новую книгу
workbook = openpyxl.Workbo | ok()
title_she | identifier_body | |
pars.py | 74,
'Уссурийск' :11426,
'Чукотский автономный округ' :10251,
'Анадырь' :11458,
'Камчатский край' :11398,
'Петропавловск-Камчатский' :78,
'Магаданская область' :11403,
'Магадан' :79,
'Сахалинская область' :11450,
'Южно-Сахалинск' :80,
'Хабаровский край' :11457,
'Хабаровск' :76,
'Комсомольск-на-Амуре' :11453
}
exceptions = [
'2gis.ru',
'yandex.ru',
'wikipedia',
'pulscen',
'blizko.ru',
'Avito.ru',
'avito.ru',
'edadeal.ru'
]
def stop_pars(event):
print('Stop Pars')
def output(event):
button_1.config(state="disabled")
# button_2.config(state="normal")
root.update()
# получаю список запросов
inquiries_text = text.get(1.0, END)
inquiries_text = inquiries_text.split("\n")
inquiries = []
for val in inquiries_text:
if len(val) != 0:
inquiries.append(val.strip())
# получаю регион
region = dict_regions_russia[combo.get()]
# получаю глубину парсинга
deep_pars = spin.get()
try:
deep_pars = int(deep_pars)
# отрабатывает исклчюение на то что ввели не цифру
except ValueError:
deep_pars = 1
# максимальная глубина парсинга 10 страниц
if deep_pars > 10:
deep_pars = 10
# если ввели ноль
if deep_pars == 0:
deep_pars = 1
progress = 0
main_simple_progress = ( 100 / len(inquiries) ) / int(deep_pars)
# Запускаю selenium
options = Options()
# Запускаем драйвер без графической оболочки браузера
options.headless = True
# Убираем логирование в консоль
options.add_argument('--log-level=3')
# Инициализируем драйвер хром
driver = webdriver.Chrome(chrome_options=options, executable_path='drivers/chromedriver.exe')
for inquirie in inquiries:
title_list = []
description_list = []
keywords_list = []
h1_list = []
h2_list = []
h3_list = []
for i in range(1, deep_pars + 1):
# получаю страницу яндекс поиска
q = 'https://yandex.ru/search/?text='+str(inquirie)+'&lr='+str(region)+'&p='+str(i)
driver.get(q)
soup = BeautifulSoup (driver.page_source, features="html.parser")
links = []
# обрабатываю полученную страницу
for link in soup.select('.serp-item .organic__url'):
# делаю сравнение со списком исключений для ссылок
check_link = True
for exception_val in exceptions:
result = re.search(exception_val, link.attrs["href"])
if result :
check_link = False
break
# заполняю список собранными ссылками
if check_link:
links.append(link.attrs["href"])
one_part_progress = round( main_simple_progress / len(links), 1 )
for link in links:
driver.get(link)
soup_site = BeautifulSoup (driver.page_source, features="html.parser")
if soup_site.title != None:
title_list.append(soup_site.title.string)
h3 = soup_site.find_all('h3')
if h3 != None:
for tag in h3:
h3_list.append(tag.text)
h2 = soup_site.find_all('h2')
if h2 != None:
for tag in h2:
h2_list.append(tag.text)
h1 = soup_site.find_all('h1')
if h1 != None:
for tag in h1:
h1_list.append(tag.text)
description = soup_site.find('meta', {'name':'description'})
if description != None:
description_list.append(description.get('content'))
keywords = soup_site.find('meta', {'name':'keywords'})
if keywords != None:
keywords_list.append(keywords.get('content'))
# создаю новую книгу
workbook = openpyxl.Workbook()
title_sheet = workbook.active
# filename = datetime.datetime.today().strftime("%Y-%m-%d-%H-%M-%S")
# выбираем активный лист и меняем ему название
title_sheet.title = "title"
if title_check.get() == True:
i = 1
for word in title_list:
cellref = title_sheet.cell(row=i, column=1)
cellref.value = word
i = i + 1
if h3_check.get() == True:
# добавляю новую страницу
h3_sheet = workbook.create_sheet('H3', 1)
i = 1
for word in h3_list:
cellref = h3_sheet.cell(row=i, column=1)
cellref.value = word
i = i + 1
if h2_check.get() == True:
# добавляю новую страницу
h2_sheet = workbook.create_sheet('H2', 1)
i = 1
for word in h2_list:
cellref = h2_sheet.cell(row=i, column=1)
cellref.value = word
i = i + 1
# добавляю новую страницу
if h1_check.get() == True:
h1_sheet = workbook.create_sheet('H1', 1)
i = 1
for word in h1_list:
cellref = h1_sheet.cell(row=i, column=1)
cellref.value = word
i = i + 1
if keywords_check.get() == True:
# добавляю новую страницу
keywords_sheet = workbook.create_sheet('Keywords', 1)
i = 1
for word in keywords_list:
cellref = keywords_sheet.cell(row=i, column=1)
cellref.value = word
i = i + 1
if desc_check.get() == True:
# добавляю новую страницу
description_sheet = workbook.create_sheet('Description', 1)
i = 1
for word in description_list:
cellref = description_sheet.cell(row=i, column=1)
cellref.value = word
i = i + 1
# сохраняю данные в exel
workbook.save(filename = inquirie+'.xlsx')
progress = progress + one_part_progress
print('Прогресс '+ str(progress))
barVar.set(round(progress))
root.update()
button_1.config(state="normal")
# button_2.config(state="disabled")
barVar.set(100)
root.update()
print('Done')
# ---------------------------------------------------------------------------------
# рисую интерфейс
root = Tk()
root.title("Парсер мета данных сайтов по запросам")
root.geometry('400x450')
root.resizable(width=False, height=False)
frame_1 = Frame()
frame_2 = Frame()
frame_3 = Frame()
frame_4 = Frame()
frame_5 = Frame()
frame_6 = Frame()
frame_7 = Frame()
frame_8 = Frame()
frame_1.pack()
frame_2.pack()
frame_3.pack()
frame_4.pack()
frame_5.pack()
frame_6.pack()
frame_7.pack()
frame_8.pack()
lable_1 = Label(frame_1, text="Что собрать:")
lable_1.pack()
h1_check = BooleanVar()
h1_check.set(1)
c4 = Checkbutton(frame_1, text="h1", variable=h1_check, onvalue=1, offvalue=0)
c4.pack(side=LEFT)
title_check = BooleanVar()
title_check.set(1)
c1 = Checkbutton(frame_1, text="title", variable=title_check, onvalue=1, offvalue=0)
c1.pack(side=RIGHT)
h2_check = BooleanVar()
h2_check.set(1)
c5 = Checkbutton(frame_2, text="h2", variable=h2_check, onvalue=1, offvalue=0)
c5.pack(side=LEFT)
desc_check = BooleanVar()
desc_check.set(1)
c2 = Checkbutton(frame_2, text="desc", variable=desc_check, onvalue=1, offvalue=0)
c2.pack(side=RIGHT)
h3_check = BooleanVar()
h3_check.set( | 1)
c6 = Checkbutton(frame_3, text="h3", varia | conditional_block | |
openai.py | : Any) -> Any:
new(args[0]) # args[0] is self
return old(*args, **kwargs)
return repl
def _async_wrap(old: Any, new: Any):
async def repl(*args, **kwargs):
new(args[0]) # args[0] is self
return await old(*args, **kwargs)
return repl
def _set_credentials(self):
openai.api_key = self.openai_api_key
api_type = "open_ai"
api_base = "https://api.openai.com/v1"
api_version = None
if hasattr(self, "openai_api_type"):
api_type = self.openai_api_type
if api_type == "azure":
if hasattr(self, "openai_api_base"):
api_base = self.openai_api_base
if hasattr(self, "openai_api_version"):
api_version = self.openai_api_version
openai.api_type = api_type
openai.api_base = api_base
openai.api_version = api_version
# Monkey patching langchain
# pylint: disable=protected-access
ChatOpenAI._generate = _wrap(ChatOpenAI._generate, _set_credentials) # type: ignore
ChatOpenAI._agenerate = _async_wrap(ChatOpenAI._agenerate, _set_credentials) # type: ignore
BaseOpenAI._generate = _wrap(BaseOpenAI._generate, _set_credentials) # type: ignore
BaseOpenAI._agenerate = _async_wrap(BaseOpenAI._agenerate, _set_credentials) # type: ignore
# pylint: enable=protected-access
#########################################################################
# Monkey patching end #
#########################################################################
def _use_azure_credentials():
openai.api_type = "azure"
openai.api_key = os.environ.get("LLM_AZURE_OPENAI_API_KEY")
openai.api_base = os.environ.get("LLM_AZURE_OPENAI_API_BASE")
# os.environ.get("LLM_AZURE_OPENAI_API_VERSION")
openai.api_version = "2023-03-15-preview"
def _use_openai_credentials():
openai.api_type = "open_ai"
openai.api_key = os.environ.get("LLM_OPENAI_API_KEY")
openai.api_base = "https://api.openai.com/v1"
openai.api_version = None
openai_available = os.environ.get("LLM_OPENAI_API_KEY") is not None
azure_openai_available = os.environ.get("LLM_AZURE_OPENAI_API_KEY") is not None
# This is a hack to make sure that the openai api is set correctly
# Right now it is overkill, but it will be useful when the api gets fixed and we no longer
# hardcode the model names (i.e. OpenAI fixes their api)
@contextmanager
def _openai_client(use_azure_api: bool, is_preference: bool):
"""Set the openai client to use the correct api type, if available
Args:
use_azure_api (bool): If true, use the azure api, else use the openai api
is_preference (bool): If true, it can fall back to the other api if the preferred one is not available
"""
if use_azure_api:
if azure_openai_available:
_use_azure_credentials()
elif is_preference and openai_available:
_use_openai_credentials()
elif is_preference:
raise EnvironmentError(
"No OpenAI api available, please set LLM_AZURE_OPENAI_API_KEY, LLM_AZURE_OPENAI_API_BASE and "
"LLM_AZURE_OPENAI_API_VERSION environment variables or LLM_OPENAI_API_KEY environment variable"
)
else:
raise EnvironmentError(
"Azure OpenAI api not available, please set LLM_AZURE_OPENAI_API_KEY, LLM_AZURE_OPENAI_API_BASE and "
"LLM_AZURE_OPENAI_API_VERSION environment variables"
)
else:
if openai_available:
_use_openai_credentials()
elif is_preference and azure_openai_available:
_use_azure_credentials()
elif is_preference:
raise EnvironmentError(
"No OpenAI api available, please set LLM_OPENAI_API_KEY environment variable or LLM_AZURE_OPENAI_API_KEY, "
"LLM_AZURE_OPENAI_API_BASE and LLM_AZURE_OPENAI_API_VERSION environment variables"
)
else:
raise EnvironmentError(
"OpenAI api not available, please set LLM_OPENAI_API_KEY environment variable"
)
# API client is setup correctly
yield
def _get_available_deployments(openai_models: Dict[str, List[str]], model_aliases: Dict[str, str]):
available_deployments: Dict[str, Dict[str, Any]] = {
"chat_completion": {},
"completion": {},
"fine_tuneing": {},
}
if azure_openai_available:
with _openai_client(use_azure_api=True, is_preference=False):
deployments = openai.Deployment.list().get("data") or [] # type: ignore
for deployment in deployments:
model_name = deployment.model
if model_name in model_aliases:
|
if model_name in openai_models["chat_completion"]:
available_deployments["chat_completion"][deployment.id] = deployment
elif model_name in openai_models["completion"]:
available_deployments["completion"][deployment.id] = deployment
elif model_name in openai_models["fine_tuneing"]:
available_deployments["fine_tuneing"][deployment.id] = deployment
return available_deployments
def _get_available_models(openai_models: Dict[str, List[str]],
available_deployments: Dict[str, Dict[str, Any]]):
available_models: Dict[str, BaseLanguageModel] = {}
if openai_available:
openai_api_key = os.environ["LLM_OPENAI_API_KEY"]
for model_name in openai_models["chat_completion"]:
available_models[OPENAI_PREFIX + model_name] = ChatOpenAI(
model=model_name, openai_api_key=openai_api_key, client="")
for model_name in openai_models["completion"]:
available_models[OPENAI_PREFIX + model_name] = OpenAI(
model=model_name, openai_api_key=openai_api_key, client="")
if azure_openai_available:
azure_openai_api_key = os.environ["LLM_AZURE_OPENAI_API_KEY"]
azure_openai_api_base = os.environ["LLM_AZURE_OPENAI_API_BASE"]
azure_openai_api_version = os.environ["LLM_AZURE_OPENAI_API_VERSION"]
for model_type, Model in [("chat_completion", AzureChatOpenAI), ("completion", AzureOpenAI)]:
for deployment_name, deployment in available_deployments[model_type].items():
available_models[AZURE_OPENAI_PREFIX + deployment_name] = Model(
model=deployment.model,
deployment_name=deployment_name,
openai_api_base=azure_openai_api_base,
openai_api_version=azure_openai_api_version,
openai_api_key=azure_openai_api_key,
client="",
)
return available_models
_model_aliases = {
"gpt-35-turbo": "gpt-3.5-turbo",
}
# Hardcoded because openai can't provide a trustworthly api to get the list of models and capabilities...
openai_models = {
"chat_completion": [
"gpt-4",
"gpt-4-32k",
"gpt-3.5-turbo",
"gpt-3.5-turbo-16k"
],
"completion": [
"text-davinci-003",
"text-curie-001",
"text-babbage-001",
"text-ada-001",
],
"fine_tuneing": [
"davinci",
"curie",
"babbage",
"ada",
]
}
available_deployments = _get_available_deployments(openai_models, _model_aliases)
available_models = _get_available_models(openai_models, available_deployments)
logger.info("Available openai models: %s", ", ".join(available_models.keys()))
OpenAIModel = Enum('OpenAIModel', {name: name for name in available_models}) # type: ignore
default_openai_model = OpenAIModel[os.environ.get("LLM_DEFAULT_MODEL", "gpt-3.5-turbo")]
# Long descriptions will be displayed in the playground UI and are copied from the OpenAI docs
class OpenAIModelConfig(ModelConfig):
"""OpenAI LLM configuration."""
model_name: OpenAIModel = Field(default=default_openai_model, # type: ignore
description="The name of the model to use.")
max_tokens: PositiveInt = Field(1024, description="""\
The maximum number of [tokens](https://platform.openai.com/tokenizer) to generate in the chat completion.
The total length of input tokens and generated tokens is limited by the model's context length. \
[Example Python code](https://github.com/openai/openai-cookbook/blob/main/examples/How_to_count_tokens_with_tiktoken.ipynb) for counting tokens.\
""")
temperature: float = Field(default=0.0, ge=0, le=2, description="""\
What sampling temperature to use, between | model_name = model_aliases[model_name] | conditional_block |
openai.py | _API_KEY")
openai.api_base = os.environ.get("LLM_AZURE_OPENAI_API_BASE")
# os.environ.get("LLM_AZURE_OPENAI_API_VERSION")
openai.api_version = "2023-03-15-preview"
def _use_openai_credentials():
openai.api_type = "open_ai"
openai.api_key = os.environ.get("LLM_OPENAI_API_KEY")
openai.api_base = "https://api.openai.com/v1"
openai.api_version = None
openai_available = os.environ.get("LLM_OPENAI_API_KEY") is not None
azure_openai_available = os.environ.get("LLM_AZURE_OPENAI_API_KEY") is not None
# This is a hack to make sure that the openai api is set correctly
# Right now it is overkill, but it will be useful when the api gets fixed and we no longer
# hardcode the model names (i.e. OpenAI fixes their api)
@contextmanager
def _openai_client(use_azure_api: bool, is_preference: bool):
"""Set the openai client to use the correct api type, if available
Args:
use_azure_api (bool): If true, use the azure api, else use the openai api
is_preference (bool): If true, it can fall back to the other api if the preferred one is not available
"""
if use_azure_api:
if azure_openai_available:
_use_azure_credentials()
elif is_preference and openai_available:
_use_openai_credentials()
elif is_preference:
raise EnvironmentError(
"No OpenAI api available, please set LLM_AZURE_OPENAI_API_KEY, LLM_AZURE_OPENAI_API_BASE and "
"LLM_AZURE_OPENAI_API_VERSION environment variables or LLM_OPENAI_API_KEY environment variable"
)
else:
raise EnvironmentError(
"Azure OpenAI api not available, please set LLM_AZURE_OPENAI_API_KEY, LLM_AZURE_OPENAI_API_BASE and "
"LLM_AZURE_OPENAI_API_VERSION environment variables"
)
else:
if openai_available:
_use_openai_credentials()
elif is_preference and azure_openai_available:
_use_azure_credentials()
elif is_preference:
raise EnvironmentError(
"No OpenAI api available, please set LLM_OPENAI_API_KEY environment variable or LLM_AZURE_OPENAI_API_KEY, "
"LLM_AZURE_OPENAI_API_BASE and LLM_AZURE_OPENAI_API_VERSION environment variables"
)
else:
raise EnvironmentError(
"OpenAI api not available, please set LLM_OPENAI_API_KEY environment variable"
)
# API client is setup correctly
yield
def _get_available_deployments(openai_models: Dict[str, List[str]], model_aliases: Dict[str, str]):
available_deployments: Dict[str, Dict[str, Any]] = {
"chat_completion": {},
"completion": {},
"fine_tuneing": {},
}
if azure_openai_available:
with _openai_client(use_azure_api=True, is_preference=False):
deployments = openai.Deployment.list().get("data") or [] # type: ignore
for deployment in deployments:
model_name = deployment.model
if model_name in model_aliases:
model_name = model_aliases[model_name]
if model_name in openai_models["chat_completion"]:
available_deployments["chat_completion"][deployment.id] = deployment
elif model_name in openai_models["completion"]:
available_deployments["completion"][deployment.id] = deployment
elif model_name in openai_models["fine_tuneing"]:
available_deployments["fine_tuneing"][deployment.id] = deployment
return available_deployments
def _get_available_models(openai_models: Dict[str, List[str]],
available_deployments: Dict[str, Dict[str, Any]]):
available_models: Dict[str, BaseLanguageModel] = {}
if openai_available:
openai_api_key = os.environ["LLM_OPENAI_API_KEY"]
for model_name in openai_models["chat_completion"]:
available_models[OPENAI_PREFIX + model_name] = ChatOpenAI(
model=model_name, openai_api_key=openai_api_key, client="")
for model_name in openai_models["completion"]:
available_models[OPENAI_PREFIX + model_name] = OpenAI(
model=model_name, openai_api_key=openai_api_key, client="")
if azure_openai_available:
azure_openai_api_key = os.environ["LLM_AZURE_OPENAI_API_KEY"]
azure_openai_api_base = os.environ["LLM_AZURE_OPENAI_API_BASE"]
azure_openai_api_version = os.environ["LLM_AZURE_OPENAI_API_VERSION"]
for model_type, Model in [("chat_completion", AzureChatOpenAI), ("completion", AzureOpenAI)]:
for deployment_name, deployment in available_deployments[model_type].items():
available_models[AZURE_OPENAI_PREFIX + deployment_name] = Model(
model=deployment.model,
deployment_name=deployment_name,
openai_api_base=azure_openai_api_base,
openai_api_version=azure_openai_api_version,
openai_api_key=azure_openai_api_key,
client="",
)
return available_models
_model_aliases = {
"gpt-35-turbo": "gpt-3.5-turbo",
}
# Hardcoded because openai can't provide a trustworthly api to get the list of models and capabilities...
openai_models = {
"chat_completion": [
"gpt-4",
"gpt-4-32k",
"gpt-3.5-turbo",
"gpt-3.5-turbo-16k"
],
"completion": [
"text-davinci-003",
"text-curie-001",
"text-babbage-001",
"text-ada-001",
],
"fine_tuneing": [
"davinci",
"curie",
"babbage",
"ada",
]
}
available_deployments = _get_available_deployments(openai_models, _model_aliases)
available_models = _get_available_models(openai_models, available_deployments)
logger.info("Available openai models: %s", ", ".join(available_models.keys()))
OpenAIModel = Enum('OpenAIModel', {name: name for name in available_models}) # type: ignore
default_openai_model = OpenAIModel[os.environ.get("LLM_DEFAULT_MODEL", "gpt-3.5-turbo")]
# Long descriptions will be displayed in the playground UI and are copied from the OpenAI docs
class OpenAIModelConfig(ModelConfig):
"""OpenAI LLM configuration."""
model_name: OpenAIModel = Field(default=default_openai_model, # type: ignore
description="The name of the model to use.")
max_tokens: PositiveInt = Field(1024, description="""\
The maximum number of [tokens](https://platform.openai.com/tokenizer) to generate in the chat completion.
The total length of input tokens and generated tokens is limited by the model's context length. \
[Example Python code](https://github.com/openai/openai-cookbook/blob/main/examples/How_to_count_tokens_with_tiktoken.ipynb) for counting tokens.\
""")
temperature: float = Field(default=0.0, ge=0, le=2, description="""\
What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, \
while lower values like 0.2 will make it more focused and deterministic.
We generally recommend altering this or `top_p` but not both.\
""")
top_p: float = Field(default=1, ge=0, le=1, description="""\
An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. \
So 0.1 means only the tokens comprising the top 10% probability mass are considered.
We generally recommend altering this or `temperature` but not both.\
""")
presence_penalty: float = Field(default=0, ge=-2, le=2, description="""\
Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far, \
increasing the model's likelihood to talk about new topics.
[See more information about frequency and presence penalties.](https://platform.openai.com/docs/api-reference/parameter-details)\
""")
frequency_penalty: float = Field(default=0, ge=-2, le=2, description="""\
Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, \
decreasing the model's likelihood to repeat the same line verbatim.
[See more information about frequency and presence penalties.](https://platform.openai.com/docs/api-reference/parameter-details)\
""")
@validator('max_tokens')
def max_tokens_must_be_positive(cls, v):
"""
Validate that max_tokens is a positive integer.
"""
if v <= 0:
raise ValueError('max_tokens must be a positive integer')
return v
def get_model(self) -> BaseLanguageModel: | """Get the model from the configuration.
Returns: | random_line_split | |
openai.py | _key
api_type = "open_ai"
api_base = "https://api.openai.com/v1"
api_version = None
if hasattr(self, "openai_api_type"):
api_type = self.openai_api_type
if api_type == "azure":
if hasattr(self, "openai_api_base"):
api_base = self.openai_api_base
if hasattr(self, "openai_api_version"):
api_version = self.openai_api_version
openai.api_type = api_type
openai.api_base = api_base
openai.api_version = api_version
# Monkey patching langchain
# pylint: disable=protected-access
ChatOpenAI._generate = _wrap(ChatOpenAI._generate, _set_credentials) # type: ignore
ChatOpenAI._agenerate = _async_wrap(ChatOpenAI._agenerate, _set_credentials) # type: ignore
BaseOpenAI._generate = _wrap(BaseOpenAI._generate, _set_credentials) # type: ignore
BaseOpenAI._agenerate = _async_wrap(BaseOpenAI._agenerate, _set_credentials) # type: ignore
# pylint: enable=protected-access
#########################################################################
# Monkey patching end #
#########################################################################
def _use_azure_credentials():
openai.api_type = "azure"
openai.api_key = os.environ.get("LLM_AZURE_OPENAI_API_KEY")
openai.api_base = os.environ.get("LLM_AZURE_OPENAI_API_BASE")
# os.environ.get("LLM_AZURE_OPENAI_API_VERSION")
openai.api_version = "2023-03-15-preview"
def _use_openai_credentials():
openai.api_type = "open_ai"
openai.api_key = os.environ.get("LLM_OPENAI_API_KEY")
openai.api_base = "https://api.openai.com/v1"
openai.api_version = None
openai_available = os.environ.get("LLM_OPENAI_API_KEY") is not None
azure_openai_available = os.environ.get("LLM_AZURE_OPENAI_API_KEY") is not None
# This is a hack to make sure that the openai api is set correctly
# Right now it is overkill, but it will be useful when the api gets fixed and we no longer
# hardcode the model names (i.e. OpenAI fixes their api)
@contextmanager
def _openai_client(use_azure_api: bool, is_preference: bool):
"""Set the openai client to use the correct api type, if available
Args:
use_azure_api (bool): If true, use the azure api, else use the openai api
is_preference (bool): If true, it can fall back to the other api if the preferred one is not available
"""
if use_azure_api:
if azure_openai_available:
_use_azure_credentials()
elif is_preference and openai_available:
_use_openai_credentials()
elif is_preference:
raise EnvironmentError(
"No OpenAI api available, please set LLM_AZURE_OPENAI_API_KEY, LLM_AZURE_OPENAI_API_BASE and "
"LLM_AZURE_OPENAI_API_VERSION environment variables or LLM_OPENAI_API_KEY environment variable"
)
else:
raise EnvironmentError(
"Azure OpenAI api not available, please set LLM_AZURE_OPENAI_API_KEY, LLM_AZURE_OPENAI_API_BASE and "
"LLM_AZURE_OPENAI_API_VERSION environment variables"
)
else:
if openai_available:
_use_openai_credentials()
elif is_preference and azure_openai_available:
_use_azure_credentials()
elif is_preference:
raise EnvironmentError(
"No OpenAI api available, please set LLM_OPENAI_API_KEY environment variable or LLM_AZURE_OPENAI_API_KEY, "
"LLM_AZURE_OPENAI_API_BASE and LLM_AZURE_OPENAI_API_VERSION environment variables"
)
else:
raise EnvironmentError(
"OpenAI api not available, please set LLM_OPENAI_API_KEY environment variable"
)
# API client is setup correctly
yield
def _get_available_deployments(openai_models: Dict[str, List[str]], model_aliases: Dict[str, str]):
available_deployments: Dict[str, Dict[str, Any]] = {
"chat_completion": {},
"completion": {},
"fine_tuneing": {},
}
if azure_openai_available:
with _openai_client(use_azure_api=True, is_preference=False):
deployments = openai.Deployment.list().get("data") or [] # type: ignore
for deployment in deployments:
model_name = deployment.model
if model_name in model_aliases:
model_name = model_aliases[model_name]
if model_name in openai_models["chat_completion"]:
available_deployments["chat_completion"][deployment.id] = deployment
elif model_name in openai_models["completion"]:
available_deployments["completion"][deployment.id] = deployment
elif model_name in openai_models["fine_tuneing"]:
available_deployments["fine_tuneing"][deployment.id] = deployment
return available_deployments
def _get_available_models(openai_models: Dict[str, List[str]],
available_deployments: Dict[str, Dict[str, Any]]):
available_models: Dict[str, BaseLanguageModel] = {}
if openai_available:
openai_api_key = os.environ["LLM_OPENAI_API_KEY"]
for model_name in openai_models["chat_completion"]:
available_models[OPENAI_PREFIX + model_name] = ChatOpenAI(
model=model_name, openai_api_key=openai_api_key, client="")
for model_name in openai_models["completion"]:
available_models[OPENAI_PREFIX + model_name] = OpenAI(
model=model_name, openai_api_key=openai_api_key, client="")
if azure_openai_available:
azure_openai_api_key = os.environ["LLM_AZURE_OPENAI_API_KEY"]
azure_openai_api_base = os.environ["LLM_AZURE_OPENAI_API_BASE"]
azure_openai_api_version = os.environ["LLM_AZURE_OPENAI_API_VERSION"]
for model_type, Model in [("chat_completion", AzureChatOpenAI), ("completion", AzureOpenAI)]:
for deployment_name, deployment in available_deployments[model_type].items():
available_models[AZURE_OPENAI_PREFIX + deployment_name] = Model(
model=deployment.model,
deployment_name=deployment_name,
openai_api_base=azure_openai_api_base,
openai_api_version=azure_openai_api_version,
openai_api_key=azure_openai_api_key,
client="",
)
return available_models
_model_aliases = {
"gpt-35-turbo": "gpt-3.5-turbo",
}
# Hardcoded because openai can't provide a trustworthly api to get the list of models and capabilities...
openai_models = {
"chat_completion": [
"gpt-4",
"gpt-4-32k",
"gpt-3.5-turbo",
"gpt-3.5-turbo-16k"
],
"completion": [
"text-davinci-003",
"text-curie-001",
"text-babbage-001",
"text-ada-001",
],
"fine_tuneing": [
"davinci",
"curie",
"babbage",
"ada",
]
}
available_deployments = _get_available_deployments(openai_models, _model_aliases)
available_models = _get_available_models(openai_models, available_deployments)
logger.info("Available openai models: %s", ", ".join(available_models.keys()))
OpenAIModel = Enum('OpenAIModel', {name: name for name in available_models}) # type: ignore
default_openai_model = OpenAIModel[os.environ.get("LLM_DEFAULT_MODEL", "gpt-3.5-turbo")]
# Long descriptions will be displayed in the playground UI and are copied from the OpenAI docs
class OpenAIModelConfig(ModelConfig):
| """OpenAI LLM configuration."""
model_name: OpenAIModel = Field(default=default_openai_model, # type: ignore
description="The name of the model to use.")
max_tokens: PositiveInt = Field(1024, description="""\
The maximum number of [tokens](https://platform.openai.com/tokenizer) to generate in the chat completion.
The total length of input tokens and generated tokens is limited by the model's context length. \
[Example Python code](https://github.com/openai/openai-cookbook/blob/main/examples/How_to_count_tokens_with_tiktoken.ipynb) for counting tokens.\
""")
temperature: float = Field(default=0.0, ge=0, le=2, description="""\
What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, \
while lower values like 0.2 will make it more focused and deterministic.
We generally recommend altering this or `top_p` but not both.\
""")
top_p: float = Field(default=1, ge=0, le=1, description="""\
An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. \ | identifier_body | |
openai.py | : Any) -> Any:
new(args[0]) # args[0] is self
return old(*args, **kwargs)
return repl
def | (old: Any, new: Any):
async def repl(*args, **kwargs):
new(args[0]) # args[0] is self
return await old(*args, **kwargs)
return repl
def _set_credentials(self):
openai.api_key = self.openai_api_key
api_type = "open_ai"
api_base = "https://api.openai.com/v1"
api_version = None
if hasattr(self, "openai_api_type"):
api_type = self.openai_api_type
if api_type == "azure":
if hasattr(self, "openai_api_base"):
api_base = self.openai_api_base
if hasattr(self, "openai_api_version"):
api_version = self.openai_api_version
openai.api_type = api_type
openai.api_base = api_base
openai.api_version = api_version
# Monkey patching langchain
# pylint: disable=protected-access
ChatOpenAI._generate = _wrap(ChatOpenAI._generate, _set_credentials) # type: ignore
ChatOpenAI._agenerate = _async_wrap(ChatOpenAI._agenerate, _set_credentials) # type: ignore
BaseOpenAI._generate = _wrap(BaseOpenAI._generate, _set_credentials) # type: ignore
BaseOpenAI._agenerate = _async_wrap(BaseOpenAI._agenerate, _set_credentials) # type: ignore
# pylint: enable=protected-access
#########################################################################
# Monkey patching end #
#########################################################################
def _use_azure_credentials():
openai.api_type = "azure"
openai.api_key = os.environ.get("LLM_AZURE_OPENAI_API_KEY")
openai.api_base = os.environ.get("LLM_AZURE_OPENAI_API_BASE")
# os.environ.get("LLM_AZURE_OPENAI_API_VERSION")
openai.api_version = "2023-03-15-preview"
def _use_openai_credentials():
openai.api_type = "open_ai"
openai.api_key = os.environ.get("LLM_OPENAI_API_KEY")
openai.api_base = "https://api.openai.com/v1"
openai.api_version = None
openai_available = os.environ.get("LLM_OPENAI_API_KEY") is not None
azure_openai_available = os.environ.get("LLM_AZURE_OPENAI_API_KEY") is not None
# This is a hack to make sure that the openai api is set correctly
# Right now it is overkill, but it will be useful when the api gets fixed and we no longer
# hardcode the model names (i.e. OpenAI fixes their api)
@contextmanager
def _openai_client(use_azure_api: bool, is_preference: bool):
"""Set the openai client to use the correct api type, if available
Args:
use_azure_api (bool): If true, use the azure api, else use the openai api
is_preference (bool): If true, it can fall back to the other api if the preferred one is not available
"""
if use_azure_api:
if azure_openai_available:
_use_azure_credentials()
elif is_preference and openai_available:
_use_openai_credentials()
elif is_preference:
raise EnvironmentError(
"No OpenAI api available, please set LLM_AZURE_OPENAI_API_KEY, LLM_AZURE_OPENAI_API_BASE and "
"LLM_AZURE_OPENAI_API_VERSION environment variables or LLM_OPENAI_API_KEY environment variable"
)
else:
raise EnvironmentError(
"Azure OpenAI api not available, please set LLM_AZURE_OPENAI_API_KEY, LLM_AZURE_OPENAI_API_BASE and "
"LLM_AZURE_OPENAI_API_VERSION environment variables"
)
else:
if openai_available:
_use_openai_credentials()
elif is_preference and azure_openai_available:
_use_azure_credentials()
elif is_preference:
raise EnvironmentError(
"No OpenAI api available, please set LLM_OPENAI_API_KEY environment variable or LLM_AZURE_OPENAI_API_KEY, "
"LLM_AZURE_OPENAI_API_BASE and LLM_AZURE_OPENAI_API_VERSION environment variables"
)
else:
raise EnvironmentError(
"OpenAI api not available, please set LLM_OPENAI_API_KEY environment variable"
)
# API client is setup correctly
yield
def _get_available_deployments(openai_models: Dict[str, List[str]], model_aliases: Dict[str, str]):
available_deployments: Dict[str, Dict[str, Any]] = {
"chat_completion": {},
"completion": {},
"fine_tuneing": {},
}
if azure_openai_available:
with _openai_client(use_azure_api=True, is_preference=False):
deployments = openai.Deployment.list().get("data") or [] # type: ignore
for deployment in deployments:
model_name = deployment.model
if model_name in model_aliases:
model_name = model_aliases[model_name]
if model_name in openai_models["chat_completion"]:
available_deployments["chat_completion"][deployment.id] = deployment
elif model_name in openai_models["completion"]:
available_deployments["completion"][deployment.id] = deployment
elif model_name in openai_models["fine_tuneing"]:
available_deployments["fine_tuneing"][deployment.id] = deployment
return available_deployments
def _get_available_models(openai_models: Dict[str, List[str]],
available_deployments: Dict[str, Dict[str, Any]]):
available_models: Dict[str, BaseLanguageModel] = {}
if openai_available:
openai_api_key = os.environ["LLM_OPENAI_API_KEY"]
for model_name in openai_models["chat_completion"]:
available_models[OPENAI_PREFIX + model_name] = ChatOpenAI(
model=model_name, openai_api_key=openai_api_key, client="")
for model_name in openai_models["completion"]:
available_models[OPENAI_PREFIX + model_name] = OpenAI(
model=model_name, openai_api_key=openai_api_key, client="")
if azure_openai_available:
azure_openai_api_key = os.environ["LLM_AZURE_OPENAI_API_KEY"]
azure_openai_api_base = os.environ["LLM_AZURE_OPENAI_API_BASE"]
azure_openai_api_version = os.environ["LLM_AZURE_OPENAI_API_VERSION"]
for model_type, Model in [("chat_completion", AzureChatOpenAI), ("completion", AzureOpenAI)]:
for deployment_name, deployment in available_deployments[model_type].items():
available_models[AZURE_OPENAI_PREFIX + deployment_name] = Model(
model=deployment.model,
deployment_name=deployment_name,
openai_api_base=azure_openai_api_base,
openai_api_version=azure_openai_api_version,
openai_api_key=azure_openai_api_key,
client="",
)
return available_models
_model_aliases = {
"gpt-35-turbo": "gpt-3.5-turbo",
}
# Hardcoded because openai can't provide a trustworthly api to get the list of models and capabilities...
openai_models = {
"chat_completion": [
"gpt-4",
"gpt-4-32k",
"gpt-3.5-turbo",
"gpt-3.5-turbo-16k"
],
"completion": [
"text-davinci-003",
"text-curie-001",
"text-babbage-001",
"text-ada-001",
],
"fine_tuneing": [
"davinci",
"curie",
"babbage",
"ada",
]
}
available_deployments = _get_available_deployments(openai_models, _model_aliases)
available_models = _get_available_models(openai_models, available_deployments)
logger.info("Available openai models: %s", ", ".join(available_models.keys()))
OpenAIModel = Enum('OpenAIModel', {name: name for name in available_models}) # type: ignore
default_openai_model = OpenAIModel[os.environ.get("LLM_DEFAULT_MODEL", "gpt-3.5-turbo")]
# Long descriptions will be displayed in the playground UI and are copied from the OpenAI docs
class OpenAIModelConfig(ModelConfig):
"""OpenAI LLM configuration."""
model_name: OpenAIModel = Field(default=default_openai_model, # type: ignore
description="The name of the model to use.")
max_tokens: PositiveInt = Field(1024, description="""\
The maximum number of [tokens](https://platform.openai.com/tokenizer) to generate in the chat completion.
The total length of input tokens and generated tokens is limited by the model's context length. \
[Example Python code](https://github.com/openai/openai-cookbook/blob/main/examples/How_to_count_tokens_with_tiktoken.ipynb) for counting tokens.\
""")
temperature: float = Field(default=0.0, ge=0, le=2, description="""\
What sampling temperature to use, between 0 | _async_wrap | identifier_name |
devicecash_trend.js | (){
fill = d3.scale.category20b();
w = $("#cloudchart").width();
h = $("#cloudchart").height();
words = [],max,scale = 1,complete = 0,
keyword = "",
tags,
fontSize,
maxLength = 30,
fetcher,
statusText ="";
layout = d3.layout.cloud()
.timeInterval(10)
.size([w, h])
.fontSize(function(d) { return fontSize(+d.value); })
.text(function(d) { return d.key; })
.on("end", draw);
svg = d3.select("#cloudchart").append("svg").attr("width", w).attr("height", h);
background = svg.append("g"),
vis = svg.append("g").attr("transform", "translate(" + [w >> 1, h >> 1] + ")");
tags=[];
for(var i=0;i<OldData.length;i++){
tags.push({key:OldData[i].group,value:Math.abs(parseInt(OldData[i].items[0]["日现金流"]))});
}
generate();
DeviceChange(OldData[0].group);//默认显示第一个装置
}
var fontfamily="黑体",spiralvalue="archimedean",fontsizecaltype="log",maxlength=1000;
function generate() {
layout
.font(fontfamily)
.spiral(spiralvalue);
fontSize = d3.scale[fontsizecaltype]().range([10, 18]);
if (tags.length){
fontSize.domain([+tags[tags.length - 1].value || 1, +tags[0].value]);
}
complete = 0;
words = [];
layout.stop().words(tags.slice(0, max = Math.min(tags.length, +maxlength))).start();
}
function draw(data, bounds) {
scale = bounds ? Math.min(
w / Math.abs(bounds[1].x - w / 2),
w / Math.abs(bounds[0].x - w / 2),
h / Math.abs(bounds[1].y - h / 2),
h / Math.abs(bounds[0].y - h / 2)) / 2 : 1;
words = data;
var text = vis.selectAll("text")
.data(words, function(d) { return d.text.toLowerCase(); });
text.transition()
.duration(1000)
.attr("transform", function(d) { return "translate(" + [d.x, d.y] + ")rotate(" + d.rotate + ")"; })
.style("font-size", function(d) { return d.size + "px"; });
text.enter().append("text")
.attr("text-anchor", "middle")
.attr("transform", function(d) { return "translate(" + [d.x, d.y] + ")rotate(" + d.rotate + ")"; })
.style("font-size", "1px")
.transition()
.duration(1000)
.style("font-size", function(d) {
return d.size + "px";
});
text.style("font-family", function(d) { return d.font; })
.style("cursor","pointer")
.style("fill", function(d) {
var tempvalue=getdevicevalue(d.text,"日现金流");
if(tempvalue>0){
return "#0cc7a2";
}else{
return "#d45e58";
}
//return fill(d.text.toLowerCase());
})
.text(function(d) { return d.text; });
var exitGroup = background.append("g")
.attr("transform", vis.attr("transform"));
var exitGroupNode = exitGroup.node();
text.exit().each(function() {
exitGroupNode.appendChild(this);
});
exitGroup.transition()
.duration(1000)
.style("opacity", 1e-6)
.remove();
vis.transition()
.delay(1000)
.duration(750)
.attr("transform", "translate(" + [w >> 1, h >> 1] + ")scale(" + scale + ")");
vis.selectAll("text").on("click",function(ev){
//ev:{"text":"1#乙二醇-环氧乙烷","font":,"rotate","size","padding","width":,"height":,"xoff":,"yoff":,"x1":,"y1":,"x0":,"y0":,"x":,"y"}
DeviceChange(ev.text);
});
}
//
function getdevicevalue(_devicename,_cashcol){
var tempvalue=0;
for(var i=0;i<OldData.length;i++){
if(_devicename==OldData[i].group){
for(var j=0;j<OldData[i].items.length;j++){
tempvalue+=OldData[i].items[j][_cashcol];
}
break;
}
}
return tempvalue;
}
//获取装置信息
function getdeviceinfo(_devicename){
var rowdata=null;
for(var i=0;i<OldData.length;i++){
| name==OldData[i].group){
rowdata=OldData[i].items;
break;
}
}
return rowdata;
}
function DeviceChange(_deviceName){
var _deviceitems=getdeviceinfo(_deviceName);
KPICompare(_deviceitems[0]);
DrawLineChart(_deviceitems);
}
//装置环比信息KPI
function KPICompare(_deviceinfo){
$("#cashkpi").html("");
var KPIItems=[
{v1:"日现金流",value1:parseInt(_deviceinfo["日现金流"]),v2:"上月日现金流",value2:parseInt(_deviceinfo["上月日现金流"])},
{v1:"累计现金流",value1:parseInt(_deviceinfo["累计现金流"]),v2:"上月累计现金流",value2:parseInt(_deviceinfo["上月累计现金流"])},
{v1:"预计现金流",value1:parseInt(_deviceinfo["预计现金流"]),v2:"上月预计现金流",value2:parseInt(_deviceinfo["上月预计现金流"])}
]
$("#cashkpi").append("<div id='kpicompare_item1' class='rowpanel'></div>");
$("#cashkpi").append("<div id='kpicompare_item2' class='rowpanel'></div>");
$("#cashkpi").append("<div id='kpicompare_item3' class='rowpanel'></div>");
for(var i=1;i<=KPIItems.length;i++){
DrawCashBar(KPIItems[i-1].value1,KPIItems[i-1].value2,"kpicompare_item"+i,KPIItems[i-1].v1,KPIItems[i-1].v2);
}
}
function DrawCashBar(_value,_firstvalue,_Panelid,_valuename,_firstname){
var differencevalue=_value-_firstvalue;
var sumvalue=0;
if(differencevalue>0){
sumvalue=differencevalue+_value;
}else{
sumvalue=Math.abs(differencevalue)+_firstvalue;
}
var Panel=$("#"+_Panelid);
var TextPanel=$("<div class='textpanel'></div>");
var BarPanel=$("<div class='Barpanel'></div>");
Panel.append(TextPanel);
Panel.append(BarPanel);
var steppx=(BarPanel.width()-200)/sumvalue;
var leftbarwidth=steppx*Math.abs(_value);
var leftpanel=$("<div class='panelbar'></div>");
var lefttext=$("<div>"+_valuename+":</br>"+_value+"万元</div>")
BarPanel.append(leftpanel);
TextPanel.append(lefttext);
var backgroundcolor="#d45e58";
if(differencevalue>=0 && _value>=0){
backgroundcolor="#0cc7a2";
lefttext.css({
"color":backgroundcolor,
"border-left":"solid 1px "+backgroundcolor
})
}else{
if(_value>=0){
lefttext.css({
"color":backgroundcolor,
"border-left":"solid 1px "+backgroundcolor
})
}else{
lefttext.css({
"position":"absolute",
"color":backgroundcolor,
"border-left":"solid 1px "+backgroundcolor,
"left":(Panel.offset().left+10)+"px"
})
}
}
leftpanel.css({
"width":leftbarwidth+"px",
"height":"20px",
"background-color":backgroundcolor
});
var centerpanel=$("<div class='centerpanelbar'></div>")
var centertext=$("<div class='centertext'>"+_firstname+":</br>"+_firstvalue+"万元</div>")
BarPanel.append(centerpanel);
TextPanel.append(centertext);
centerpanel.css({
"left":(Math.abs(_firstvalue)*steppx)+Panel.offset().left+"px"
})
centertext.css({
"left":(Math.abs(_firstvalue)*steppx)+Panel.offset().left+"px"
})
var rightpanelleftvalue=0;
if(differencevalue>=0 && _value>=0){
rightpanelleftvalue=steppx*Math.abs(_value)+150;
}else{
rightpanelleftvalue=steppx*Math.abs(_firstvalue)+150;
}
var rightbarwidth=steppx | if(_device | identifier_name |
devicecash_trend.js | .abs(bounds[1].x - w / 2),
w / Math.abs(bounds[0].x - w / 2),
h / Math.abs(bounds[1].y - h / 2),
h / Math.abs(bounds[0].y - h / 2)) / 2 : 1;
words = data;
var text = vis.selectAll("text")
.data(words, function(d) { return d.text.toLowerCase(); });
text.transition()
.duration(1000)
.attr("transform", function(d) { return "translate(" + [d.x, d.y] + ")rotate(" + d.rotate + ")"; })
.style("font-size", function(d) { return d.size + "px"; });
text.enter().append("text")
.attr("text-anchor", "middle")
.attr("transform", function(d) { return "translate(" + [d.x, d.y] + ")rotate(" + d.rotate + ")"; })
.style("font-size", "1px")
.transition()
.duration(1000)
.style("font-size", function(d) {
return d.size + "px";
});
text.style("font-family", function(d) { return d.font; })
.style("cursor","pointer")
.style("fill", function(d) {
var tempvalue=getdevicevalue(d.text,"日现金流");
if(tempvalue>0){
return "#0cc7a2";
}else{
return "#d45e58";
}
//return fill(d.text.toLowerCase());
})
.text(function(d) { return d.text; });
var exitGroup = background.append("g")
.attr("transform", vis.attr("transform"));
var exitGroupNode = exitGroup.node();
text.exit().each(function() {
exitGroupNode.appendChild(this);
});
exitGroup.transition()
.duration(1000)
.style("opacity", 1e-6)
.remove();
vis.transition()
.delay(1000)
.duration(750)
.attr("transform", "translate(" + [w >> 1, h >> 1] + ")scale(" + scale + ")");
vis.selectAll("text").on("click",function(ev){
//ev:{"text":"1#乙二醇-环氧乙烷","font":,"rotate","size","padding","width":,"height":,"xoff":,"yoff":,"x1":,"y1":,"x0":,"y0":,"x":,"y"}
DeviceChange(ev.text);
});
}
//
function getdevicevalue(_devicename,_cashcol){
var tempvalue=0;
for(var i=0;i<OldData.length;i++){
if(_devicename==OldData[i].group){
for(var j=0;j<OldData[i].items.length;j++){
tempvalue+=OldData[i].items[j][_cashcol];
}
break;
}
}
return tempvalue;
}
//获取装置信息
function getdeviceinfo(_devicename){
var rowdata=null;
for(var i=0;i<OldData.length;i++){
if(_devicename==OldData[i].group){
rowdata=OldData[i].items;
break;
}
}
return rowdata;
}
function DeviceChange(_deviceName){
var _deviceitems=getdeviceinfo(_deviceName);
KPICompare(_deviceitems[0]);
DrawLineChart(_deviceitems);
}
//装置环比信息KPI
function KPICompare(_deviceinfo){
$("#cashkpi").html("");
var KPIItems=[
{v1:"日现金流",value1:parseInt(_deviceinfo["日现金流"]),v2:"上月日现金流",value2:parseInt(_deviceinfo["上月日现金流"])},
{v1:"累计现金流",value1:parseInt(_deviceinfo["累计现金流"]),v2:"上月累计现金流",value2:parseInt(_deviceinfo["上月累计现金流"])},
{v1:"预计现金流",value1:parseInt(_deviceinfo["预计现金流"]),v2:"上月预计现金流",value2:parseInt(_deviceinfo["上月预计现金流"])}
]
$("#cashkpi").append("<div id='kpicompare_item1' class='rowpanel'></div>");
$("#cashkpi").append("<div id='kpicompare_item2' class='rowpanel'></div>");
$("#cashkpi").append("<div id='kpicompare_item3' class='rowpanel'></div>");
for(var i=1;i<=KPIItems.length;i++){
DrawCashBar(KPIItems[i-1].value1,KPIItems[i-1].value2,"kpicompare_item"+i,KPIItems[i-1].v1,KPIItems[i-1].v2);
}
}
function DrawCashBar(_value,_firstvalue,_Panelid,_valuename,_firstname){
var differencevalue=_value-_firstvalue;
var sumvalue=0;
if(differencevalue>0){
sumvalue=differencevalue+_value;
}else{
sumvalue=Math.abs(differencevalue)+_firstvalue;
}
var Panel=$("#"+_Panelid);
var TextPanel=$("<div class='textpanel'></div>");
var BarPanel=$("<div class='Barpanel'></div>");
Panel.append(TextPanel);
Panel.append(BarPanel);
var steppx=(BarPanel.width()-200)/sumvalue;
var leftbarwidth=steppx*Math.abs(_value);
var leftpanel=$("<div class='panelbar'></div>");
var lefttext=$("<div>"+_valuename+":</br>"+_value+"万元</div>")
BarPanel.append(leftpanel);
TextPanel.append(lefttext);
var backgroundcolor="#d45e58";
if(differencevalue>=0 && _value>=0){
backgroundcolor="#0cc7a2";
lefttext.css({
"color":backgroundcolor,
"border-left":"solid 1px "+backgroundcolor
})
}else{
if(_value>=0){
lefttext.css({
"color":backgroundcolor,
"border-left":"solid 1px "+backgroundcolor
})
}else{
lefttext.css({
"position":"absolute",
"color":backgroundcolor,
"border-left":"solid 1px "+backgroundcolor,
"left":(Panel.offset().left+10)+"px"
})
}
}
leftpanel.css({
"width":leftbarwidth+"px",
"height":"20px",
"background-color":backgroundcolor
});
var centerpanel=$("<div class='centerpanelbar'></div>")
var centertext=$("<div class='centertext'>"+_firstname+":</br>"+_firstvalue+"万元</div>")
BarPanel.append(centerpanel);
TextPanel.append(centertext);
centerpanel.css({
"left":(Math.abs(_firstvalue)*steppx)+Panel.offset().left+"px"
})
centertext.css({
"left":(Math.abs(_firstvalue)*steppx)+Panel.offset().left+"px"
})
var rightpanelleftvalue=0;
if(differencevalue>=0 && _value>=0){
rightpanelleftvalue=steppx*Math.abs(_value)+150;
}else{
rightpanelleftvalue=steppx*Math.abs(_firstvalue)+150;
}
var rightbarwidth=steppx*Math.abs(differencevalue);
var rightpanel=$("<div class='rightpanelbar'></div>");
var righttext=$("<div>差:</br>"+differencevalue+"万元</div>");
BarPanel.append(rightpanel);
TextPanel.append(righttext);
backgroundcolor="#d45e58";
if(differencevalue>=0 && _value>=0){
backgroundcolor="#0cc7a2";
righttext.css({
"color":backgroundcolor,
"position":"absolute",
"left":rightpanelleftvalue+Panel.offset().left+"px",
"border-left":"solid 1px "+backgroundcolor
})
}else{
righttext.css({
"position":"absolute",
"color":backgroundcolor,
"position":"absolute",
"border-right":"solid 1px "+backgroundcolor,
"left":(rightpanelleftvalue+Panel.offset().left+rightbarwidth-righttext.width()-2)+"px"
})
}
rightpanel.css({
"width":rightbarwidth+"px",
"height":"20px",
"background-color":backgroundcolor,
"left":rightpanelleftvalue+Panel.offset().left+"px"
});
}
function DrawLineChart(_Deviceitems){
var SeriesData=[{name:"日现金流",data:[],color:"#12a771"},{name:"上月日现金流",color:"#b54f3e",data:[]}];
if(_Deviceitems!=null && _Deviceitems.length>0){
for(var i=0;i<_Deviceitems.length;i++){
for(var j=0;j<SeriesData.length;j++){
SeriesData[j].data.push([i+1+"日",parseInt(_Deviceitems[i][SeriesData[j].name])]);
}
}
}
$('#monthchart').highcharts({
chart:{
backgroundColor:"" | },
credits:{enabled:false},
xAxis:{
labels:{ | random_line_split | |
devicecash_trend.js | fill = d3.scale.category20b();
w = $("#cloudchart").width();
h = $("#cloudchart").height();
words = [],max,scale = 1,complete = 0,
keyword = "",
tags,
fontSize,
maxLength = 30,
fetcher,
statusText ="";
layout = d3.layout.cloud()
.timeInterval(10)
.size([w, h])
.fontSize(function(d) { return fontSize(+d.value); })
.text(function(d) { return d.key; })
.on("end", draw);
svg = d3.select("#cloudchart").append("svg").attr("width", w).attr("height", h);
background = svg.append("g"),
vis = svg.append("g").attr("transform", "translate(" + [w >> 1, h >> 1] + ")");
tags=[];
for(var i=0;i<OldData.length;i++){
tags.push({key:OldData[i].group,value:Math.abs(parseInt(OldData[i].items[0]["日现金流"]))});
}
generate();
DeviceChange(OldData[0].group);//默认显示第一个装置
}
var fontfamily="黑体",spiralvalue="archimedean",fontsizecaltype="log",maxlength=1000;
function generate() {
layout
.font(fontfamily)
.spiral(spiralvalue);
fontSize = d3.scale[fontsizecaltype]().range([10, 18]);
if (tags.length){
fontSize.domain([+tags[tags.length - 1].value || 1, +tags[0].value]);
}
complete = 0;
words = [];
layout.stop().words(tags.slice(0, max = Math.min(tags.length, +maxlength))).start();
}
function draw(data, bounds) {
scale = bounds ? Math.min(
w / Math.abs(bounds[1].x - w / 2),
w / Math.abs(bounds[0].x - w / 2),
h / Math.abs(bounds[1].y - h / 2),
h / Math.abs(bounds[0].y - h / 2)) / 2 : 1;
words = data;
var text = vis.selectAll("text")
.data(words, function(d) { return d.text.toLowerCase(); });
text.transition()
.duration(1000)
.attr("transform", function(d) { return "translate(" + [d.x, d.y] + ")rotate(" + d.rotate + ")"; })
.style("font-size", function(d) { return d.size + "px"; });
text.enter().append("text")
.attr("text-anchor", "middle")
.attr("transform", function(d) { return "translate(" + [d.x, d.y] + ")rotate(" + d.rotate + ")"; })
.style("font-size", "1px")
.transition()
.duration(1000)
.style("font-size", function(d) {
return d.size + "px";
});
text.style("font-family", function(d) { return d.font; })
.style("cursor","pointer")
.style("fill", function(d) {
var tempvalue=getdevicevalue(d.text,"日现金流");
if(tempvalue>0){
return "#0cc7a2";
}else{
return "#d45e58";
}
//return fill(d.text.toLowerCase());
})
.text(function(d) { return d.text; });
var exitGroup = background.append("g")
.attr("transform", vis.attr("transform"));
var exitGroupNode = exitGroup.node();
text.exit().each(function() {
exitGroupNode.appendChild(this);
});
exitGroup.transition()
.duration(1000)
.style("opacity", 1e-6)
.remove();
vis.transition()
.delay(1000)
.duration(750)
.attr("transform", "translate(" + [w >> 1, h >> 1] + ")scale(" + scale + ")");
vis.selectAll("text").on("click",function(ev){
//ev:{"text":"1#乙二醇-环氧乙烷","font":,"rotate","size","padding","width":,"height":,"xoff":,"yoff":,"x1":,"y1":,"x0":,"y0":,"x":,"y"}
DeviceChange(ev.text);
});
}
//
function getdevicevalue(_devicename,_cashcol){
var tempvalue=0;
for(var i=0;i<OldData.length;i++){
if(_devicena | =0;i<OldData.length;i++){
if(_devicename==OldData[i].group){
rowdata=OldData[i].items;
break;
}
}
return rowdata;
}
function DeviceChange(_deviceName){
var _deviceitems=getdeviceinfo(_deviceName);
KPICompare(_deviceitems[0]);
DrawLineChart(_deviceitems);
}
//装置环比信息KPI
function KPICompare(_deviceinfo){
$("#cashkpi").html("");
var KPIItems=[
{v1:"日现金流",value1:parseInt(_deviceinfo["日现金流"]),v2:"上月日现金流",value2:parseInt(_deviceinfo["上月日现金流"])},
{v1:"累计现金流",value1:parseInt(_deviceinfo["累计现金流"]),v2:"上月累计现金流",value2:parseInt(_deviceinfo["上月累计现金流"])},
{v1:"预计现金流",value1:parseInt(_deviceinfo["预计现金流"]),v2:"上月预计现金流",value2:parseInt(_deviceinfo["上月预计现金流"])}
]
$("#cashkpi").append("<div id='kpicompare_item1' class='rowpanel'></div>");
$("#cashkpi").append("<div id='kpicompare_item2' class='rowpanel'></div>");
$("#cashkpi").append("<div id='kpicompare_item3' class='rowpanel'></div>");
for(var i=1;i<=KPIItems.length;i++){
DrawCashBar(KPIItems[i-1].value1,KPIItems[i-1].value2,"kpicompare_item"+i,KPIItems[i-1].v1,KPIItems[i-1].v2);
}
}
function DrawCashBar(_value,_firstvalue,_Panelid,_valuename,_firstname){
var differencevalue=_value-_firstvalue;
var sumvalue=0;
if(differencevalue>0){
sumvalue=differencevalue+_value;
}else{
sumvalue=Math.abs(differencevalue)+_firstvalue;
}
var Panel=$("#"+_Panelid);
var TextPanel=$("<div class='textpanel'></div>");
var BarPanel=$("<div class='Barpanel'></div>");
Panel.append(TextPanel);
Panel.append(BarPanel);
var steppx=(BarPanel.width()-200)/sumvalue;
var leftbarwidth=steppx*Math.abs(_value);
var leftpanel=$("<div class='panelbar'></div>");
var lefttext=$("<div>"+_valuename+":</br>"+_value+"万元</div>")
BarPanel.append(leftpanel);
TextPanel.append(lefttext);
var backgroundcolor="#d45e58";
if(differencevalue>=0 && _value>=0){
backgroundcolor="#0cc7a2";
lefttext.css({
"color":backgroundcolor,
"border-left":"solid 1px "+backgroundcolor
})
}else{
if(_value>=0){
lefttext.css({
"color":backgroundcolor,
"border-left":"solid 1px "+backgroundcolor
})
}else{
lefttext.css({
"position":"absolute",
"color":backgroundcolor,
"border-left":"solid 1px "+backgroundcolor,
"left":(Panel.offset().left+10)+"px"
})
}
}
leftpanel.css({
"width":leftbarwidth+"px",
"height":"20px",
"background-color":backgroundcolor
});
var centerpanel=$("<div class='centerpanelbar'></div>")
var centertext=$("<div class='centertext'>"+_firstname+":</br>"+_firstvalue+"万元</div>")
BarPanel.append(centerpanel);
TextPanel.append(centertext);
centerpanel.css({
"left":(Math.abs(_firstvalue)*steppx)+Panel.offset().left+"px"
})
centertext.css({
"left":(Math.abs(_firstvalue)*steppx)+Panel.offset().left+"px"
})
var rightpanelleftvalue=0;
if(differencevalue>=0 && _value>=0){
rightpanelleftvalue=steppx*Math.abs(_value)+150;
}else{
rightpanelleftvalue=steppx*Math.abs(_firstvalue)+150;
}
var rightbarwidth=steppx | me==OldData[i].group){
for(var j=0;j<OldData[i].items.length;j++){
tempvalue+=OldData[i].items[j][_cashcol];
}
break;
}
}
return tempvalue;
}
//获取装置信息
function getdeviceinfo(_devicename){
var rowdata=null;
for(var i | identifier_body |
devicecash_trend.js | fill = d3.scale.category20b();
w = $("#cloudchart").width();
h = $("#cloudchart").height();
words = [],max,scale = 1,complete = 0,
keyword = "",
tags,
fontSize,
maxLength = 30,
fetcher,
statusText ="";
layout = d3.layout.cloud()
.timeInterval(10)
.size([w, h])
.fontSize(function(d) { return fontSize(+d.value); })
.text(function(d) { return d.key; })
.on("end", draw);
svg = d3.select("#cloudchart").append("svg").attr("width", w).attr("height", h);
background = svg.append("g"),
vis = svg.append("g").attr("transform", "translate(" + [w >> 1, h >> 1] + ")");
tags=[];
for(var i=0;i<OldData.length;i++){
tags.push({key:OldData[i].group,value:Math.abs(parseInt(OldData[i].items[0]["日现金流"]))});
}
generate();
DeviceChange(OldData[0].group);//默认显示第一个装置
}
var fontfamily="黑体",spiralvalue="archimedean",fontsizecaltype="log",maxlength=1000;
function generate() {
layout
.font(fontfamily)
.spiral(spiralvalue);
fontSize = d3.scale[fontsizecaltype]().range([10, 18]);
if (tags.length){
fontSize.domain([+tags[tags.length - 1].value || 1, +tags[0].value]);
}
complete = 0;
words = [];
layout.stop().words(tags.slice(0, max = Math.min(tags.length, +maxlength))).start();
}
function draw(data, bounds) {
scale = bounds ? Math.min(
w / Math.abs(bounds[1].x - w / 2),
w / Math.abs(bounds[0].x - w / 2),
h / Math.abs(bounds[1].y - h / 2),
h / Math.abs(bounds[0].y - h / 2)) / 2 : 1;
words = data;
var text = vis.selectAll("text")
.data(words, function(d) { return d.text.toLowerCase(); });
text.transition()
.duration(1000)
.attr("transform", function(d) { return "translate(" + [d.x, d.y] + ")rotate(" + d.rotate + ")"; })
.style("font-size", function(d) { return d.size + "px"; });
text.enter().append("text")
.attr("text-anchor", "middle")
.attr("transform", function(d) { return "translate(" + [d.x, d.y] + ")rotate(" + d.rotate + ")"; })
.style("font-size", "1px")
.transition()
.duration(1000)
.style("font-size", function(d) {
return d.size + "px";
});
text.style("font-family", function(d) { return d.font; })
.style("cursor","pointer")
.style("fill", function(d) {
var tempvalue=getdevicevalue(d.text,"日现金流");
if(tempvalue>0){
return "#0cc7a2";
}else{
return "#d45e58";
}
//return fill(d.text.toLowerCase());
})
.text(function(d) { return d.text; });
var exitGroup = background.append("g")
.attr("transform", vis.attr("transform"));
var exitGroupNode = exitGroup.node();
text.exit().each(function() {
exitGroupNode.appendChild(this);
});
exitGroup.transition()
.duration(1000)
.style("opacity", 1e-6)
.remove();
vis.transition()
.delay(1000)
.duration(750)
.attr("transform", "translate(" + [w >> 1, h >> 1] + ")scale(" + scale + ")");
vis.selectAll("text").on("click",function(ev){
//ev:{"text":"1#乙二醇-环氧乙烷","font":,"rotate","size","padding","width":,"height":,"xoff":,"yoff":,"x1":,"y1":,"x0":,"y0":,"x":,"y"}
DeviceChange(ev.text);
});
}
//
function getdevicevalue(_devicename,_cashcol){
var tempvalue=0;
for(var i=0;i<OldData.length;i++){
if(_devicename==OldData[i].group){
for(var j=0;j<OldData[i].items.length;j++){
tempvalu | ar rowdata=null;
for(var i=0;i<OldData.length;i++){
if(_devicename==OldData[i].group){
rowdata=OldData[i].items;
break;
}
}
return rowdata;
}
function DeviceChange(_deviceName){
var _deviceitems=getdeviceinfo(_deviceName);
KPICompare(_deviceitems[0]);
DrawLineChart(_deviceitems);
}
//装置环比信息KPI
function KPICompare(_deviceinfo){
$("#cashkpi").html("");
var KPIItems=[
{v1:"日现金流",value1:parseInt(_deviceinfo["日现金流"]),v2:"上月日现金流",value2:parseInt(_deviceinfo["上月日现金流"])},
{v1:"累计现金流",value1:parseInt(_deviceinfo["累计现金流"]),v2:"上月累计现金流",value2:parseInt(_deviceinfo["上月累计现金流"])},
{v1:"预计现金流",value1:parseInt(_deviceinfo["预计现金流"]),v2:"上月预计现金流",value2:parseInt(_deviceinfo["上月预计现金流"])}
]
$("#cashkpi").append("<div id='kpicompare_item1' class='rowpanel'></div>");
$("#cashkpi").append("<div id='kpicompare_item2' class='rowpanel'></div>");
$("#cashkpi").append("<div id='kpicompare_item3' class='rowpanel'></div>");
for(var i=1;i<=KPIItems.length;i++){
DrawCashBar(KPIItems[i-1].value1,KPIItems[i-1].value2,"kpicompare_item"+i,KPIItems[i-1].v1,KPIItems[i-1].v2);
}
}
function DrawCashBar(_value,_firstvalue,_Panelid,_valuename,_firstname){
var differencevalue=_value-_firstvalue;
var sumvalue=0;
if(differencevalue>0){
sumvalue=differencevalue+_value;
}else{
sumvalue=Math.abs(differencevalue)+_firstvalue;
}
var Panel=$("#"+_Panelid);
var TextPanel=$("<div class='textpanel'></div>");
var BarPanel=$("<div class='Barpanel'></div>");
Panel.append(TextPanel);
Panel.append(BarPanel);
var steppx=(BarPanel.width()-200)/sumvalue;
var leftbarwidth=steppx*Math.abs(_value);
var leftpanel=$("<div class='panelbar'></div>");
var lefttext=$("<div>"+_valuename+":</br>"+_value+"万元</div>")
BarPanel.append(leftpanel);
TextPanel.append(lefttext);
var backgroundcolor="#d45e58";
if(differencevalue>=0 && _value>=0){
backgroundcolor="#0cc7a2";
lefttext.css({
"color":backgroundcolor,
"border-left":"solid 1px "+backgroundcolor
})
}else{
if(_value>=0){
lefttext.css({
"color":backgroundcolor,
"border-left":"solid 1px "+backgroundcolor
})
}else{
lefttext.css({
"position":"absolute",
"color":backgroundcolor,
"border-left":"solid 1px "+backgroundcolor,
"left":(Panel.offset().left+10)+"px"
})
}
}
leftpanel.css({
"width":leftbarwidth+"px",
"height":"20px",
"background-color":backgroundcolor
});
var centerpanel=$("<div class='centerpanelbar'></div>")
var centertext=$("<div class='centertext'>"+_firstname+":</br>"+_firstvalue+"万元</div>")
BarPanel.append(centerpanel);
TextPanel.append(centertext);
centerpanel.css({
"left":(Math.abs(_firstvalue)*steppx)+Panel.offset().left+"px"
})
centertext.css({
"left":(Math.abs(_firstvalue)*steppx)+Panel.offset().left+"px"
})
var rightpanelleftvalue=0;
if(differencevalue>=0 && _value>=0){
rightpanelleftvalue=steppx*Math.abs(_value)+150;
}else{
rightpanelleftvalue=steppx*Math.abs(_firstvalue)+150;
}
var rightbarwidth=stepp | e+=OldData[i].items[j][_cashcol];
}
break;
}
}
return tempvalue;
}
//获取装置信息
function getdeviceinfo(_devicename){
v | conditional_block |
train_models.go | CHRM2", "EDNRA", "MAOA", "LCK",
"PTGS2", "SLC6A2", "ACHE", "CNR2", "CNR1", "ADORA2A", "OPRD1", "NR3C1", "AR", "SLC6A4",
"OPRM1", "HTR1A", "SLC6A3", "OPRK1", "AVPR1A", "ADRB2", "DRD2", "KCNH2", "DRD1", "HTR2A",
"CHRM1",
},
"smallest1": []string{
"PDE3A",
},
"smallest3": []string{
"PDE3A", "SCN5A", "CCKAR",
},
"smallest4": []string{
"PDE3A", "SCN5A", "CCKAR", "ADRB1",
},
}
costVals = []string{
"1",
"10",
"100",
}
gammaVals = []string{
"0.1",
"0.01",
"0.001",
}
replicates = []string{
"r1", "r2", "r3",
}
)
func main() | // Show startup messages
// --------------------------------
sp.Info.Printf("Using max %d OS threads to schedule max %d tasks\n", *threads, *maxTasks)
sp.Info.Printf("Starting workflow for %s geneset\n", *geneSet)
// --------------------------------
// Initialize processes and add to runner
// --------------------------------
wf := sp.NewWorkflow("train_models", *maxTasks)
dbFileName := "pubchem.chembl.dataset4publication_inchi_smiles.tsv.xz"
dlExcapeDB := wf.NewProc("dlDB", fmt.Sprintf("wget https://zenodo.org/record/173258/files/%s -O {o:excapexz}", dbFileName))
dlExcapeDB.SetPathStatic("excapexz", "../../raw/"+dbFileName)
unPackDB := wf.NewProc("unPackDB", "xzcat {i:xzfile} > {o:unxzed}")
unPackDB.SetPathReplace("xzfile", "unxzed", ".xz", "")
unPackDB.In("xzfile").Connect(dlExcapeDB.Out("excapexz"))
//unPackDB.Prepend = "salloc -A snic2017-7-89 -n 2 -t 8:00:00 -J unpack_excapedb"
finalModelsSummary := NewFinalModelSummarizer(wf, "finalmodels_summary_creator", "res/final_models_summary.tsv", '\t')
// --------------------------------
// Set up gene-specific workflow branches
// --------------------------------
for _, gene := range geneSets[*geneSet] {
geneLC := str.ToLower(gene)
uniq_gene := geneLC
// --------------------------------------------------------------------------------
// Extract target data step
// --------------------------------------------------------------------------------
extractTargetData := wf.NewProc("extract_target_data_"+uniq_gene, `awk -F"\t" '$9 == "{p:gene}" { print $12"\t"$4 }' {i:raw_data} > {o:target_data}`)
extractTargetData.ParamPort("gene").ConnectStr(gene)
extractTargetData.SetPathStatic("target_data", fmt.Sprintf("dat/%s/%s.tsv", geneLC, geneLC))
extractTargetData.In("raw_data").Connect(unPackDB.Out("unxzed"))
if *runSlurm {
extractTargetData.Prepend = "salloc -A snic2017-7-89 -n 4 -c 4 -t 1:00:00 -J scipipe_cnt_comp_" + geneLC // SLURM string
}
countTargetDataRows := wf.NewProc("cnt_targetdata_rows_"+uniq_gene, `awk '$2 == "A" { a += 1 } $2 == "N" { n += 1 } END { print a "\t" n }' {i:targetdata} > {o:count} # {p:gene}`)
countTargetDataRows.SetPathExtend("targetdata", "count", ".count")
countTargetDataRows.In("targetdata").Connect(extractTargetData.Out("target_data"))
countTargetDataRows.ParamPort("gene").ConnectStr(gene)
// --------------------------------------------------------------------------------
// Pre-compute step
// --------------------------------------------------------------------------------
cpSignPrecomp := wf.NewProc("cpsign_precomp_"+uniq_gene,
`java -jar `+cpSignPath+` precompute \
--license ../../bin/cpsign.lic \
--cptype 1 \
--trainfile {i:traindata} \
--labels A, N \
--model-out {o:precomp} \
--model-name "`+gene+` target profile"`)
cpSignPrecomp.In("traindata").Connect(extractTargetData.Out("target_data"))
cpSignPrecomp.SetPathExtend("traindata", "precomp", ".precomp")
if *runSlurm {
cpSignPrecomp.Prepend = "salloc -A snic2017-7-89 -n 4 -c 4 -t 1-00:00:00 -J precmp_" + geneLC // SLURM string
}
for _, replicate := range replicates {
uniq_repl := uniq_gene + "_" + replicate
// --------------------------------------------------------------------------------
// Optimize cost/gamma-step
// --------------------------------------------------------------------------------
includeGamma := false // For liblinear
summarize := NewSummarizeCostGammaPerf(wf,
"summarize_cost_gamma_perf_"+uniq_repl,
"dat/"+geneLC+"/"+replicate+"/"+geneLC+"_cost_gamma_perf_stats.tsv",
includeGamma)
for _, cost := range costVals {
uniq_cost := uniq_repl + "_" + cost
// If Liblinear
evalCost := wf.NewProc("crossval_"+uniq_cost, `java -jar `+cpSignPath+` crossvalidate \
--license ../../bin/cpsign.lic \
--cptype 1 \
--trainfile {i:traindata} \
--impl liblinear \
--labels A, N \
--nr-models {p:nrmdl} \
--cost {p:cost} \
--cv-folds {p:cvfolds} \
--output-format json \
--confidence {p:confidence} | grep -P "^{" > {o:stats} # {p:gene} {p:replicate}`)
evalCost.SetPathCustom("stats", func(t *sp.SciTask) string {
c, err := strconv.ParseInt(t.Param("cost"), 10, 0)
geneLC := str.ToLower(t.Param("gene"))
sp.CheckErr(err)
return str.Replace(t.InPath("traindata"), geneLC+".tsv", t.Param("replicate")+"/"+geneLC+".tsv", 1) + fmt.Sprintf(".liblin_c%03d", c) + "_crossval_stats.json"
})
evalCost.In("traindata").Connect(extractTargetData.Out("target_data"))
evalCost.ParamPort("nrmdl").ConnectStr("10")
evalCost.ParamPort("cvfolds").ConnectStr("10")
evalCost.ParamPort("confidence").ConnectStr("0.9")
evalCost.ParamPort("gene").ConnectStr(gene)
evalCost.ParamPort("replicate").ConnectStr(replicate)
evalCost.ParamPort("cost").ConnectStr(cost)
if *runSlurm {
evalCost.Prepend = "salloc -A snic2017-7-89 -n 4 -c 4 -t 1-00:00:00 -J evalcg_" + uniq_cost // SLURM string
}
extractCostGammaStats := spc.NewMapToKeys(wf, "extract_cgstats_"+uniq_cost, func(ip *sp.InformationPacket) map[string]string {
crossValOut := &cpSignCrossValOutput{}
ip.UnMarshalJson(crossValOut)
newKeys := map[string]string{}
newKeys["validity"] = fmt.Sprintf("%.3f", crossValOut.Validity)
newKeys["efficiency"] = fmt.Sprintf("%.3f", crossValOut.Efficiency)
newKeys["class_confidence"] = | {
// --------------------------------
// Parse flags and stuff
// --------------------------------
flag.Parse()
if *debug {
sp.InitLogDebug()
} else {
sp.InitLogAudit()
}
if len(geneSets[*geneSet]) == 0 {
names := []string{}
for n, _ := range geneSets {
names = append(names, n)
}
sp.Error.Fatalf("Incorrect gene set %s specified! Only allowed values are: %s\n", *geneSet, str.Join(names, ", "))
}
runtime.GOMAXPROCS(*threads)
// -------------------------------- | identifier_body |
train_models.go | CHRM2", "EDNRA", "MAOA", "LCK",
"PTGS2", "SLC6A2", "ACHE", "CNR2", "CNR1", "ADORA2A", "OPRD1", "NR3C1", "AR", "SLC6A4",
"OPRM1", "HTR1A", "SLC6A3", "OPRK1", "AVPR1A", "ADRB2", "DRD2", "KCNH2", "DRD1", "HTR2A",
"CHRM1",
},
"smallest1": []string{
"PDE3A",
},
"smallest3": []string{
"PDE3A", "SCN5A", "CCKAR",
},
"smallest4": []string{
"PDE3A", "SCN5A", "CCKAR", "ADRB1",
},
}
costVals = []string{
"1",
"10",
"100",
} | "0.001",
}
replicates = []string{
"r1", "r2", "r3",
}
)
func main() {
// --------------------------------
// Parse flags and stuff
// --------------------------------
flag.Parse()
if *debug {
sp.InitLogDebug()
} else {
sp.InitLogAudit()
}
if len(geneSets[*geneSet]) == 0 {
names := []string{}
for n, _ := range geneSets {
names = append(names, n)
}
sp.Error.Fatalf("Incorrect gene set %s specified! Only allowed values are: %s\n", *geneSet, str.Join(names, ", "))
}
runtime.GOMAXPROCS(*threads)
// --------------------------------
// Show startup messages
// --------------------------------
sp.Info.Printf("Using max %d OS threads to schedule max %d tasks\n", *threads, *maxTasks)
sp.Info.Printf("Starting workflow for %s geneset\n", *geneSet)
// --------------------------------
// Initialize processes and add to runner
// --------------------------------
wf := sp.NewWorkflow("train_models", *maxTasks)
dbFileName := "pubchem.chembl.dataset4publication_inchi_smiles.tsv.xz"
dlExcapeDB := wf.NewProc("dlDB", fmt.Sprintf("wget https://zenodo.org/record/173258/files/%s -O {o:excapexz}", dbFileName))
dlExcapeDB.SetPathStatic("excapexz", "../../raw/"+dbFileName)
unPackDB := wf.NewProc("unPackDB", "xzcat {i:xzfile} > {o:unxzed}")
unPackDB.SetPathReplace("xzfile", "unxzed", ".xz", "")
unPackDB.In("xzfile").Connect(dlExcapeDB.Out("excapexz"))
//unPackDB.Prepend = "salloc -A snic2017-7-89 -n 2 -t 8:00:00 -J unpack_excapedb"
finalModelsSummary := NewFinalModelSummarizer(wf, "finalmodels_summary_creator", "res/final_models_summary.tsv", '\t')
// --------------------------------
// Set up gene-specific workflow branches
// --------------------------------
for _, gene := range geneSets[*geneSet] {
geneLC := str.ToLower(gene)
uniq_gene := geneLC
// --------------------------------------------------------------------------------
// Extract target data step
// --------------------------------------------------------------------------------
extractTargetData := wf.NewProc("extract_target_data_"+uniq_gene, `awk -F"\t" '$9 == "{p:gene}" { print $12"\t"$4 }' {i:raw_data} > {o:target_data}`)
extractTargetData.ParamPort("gene").ConnectStr(gene)
extractTargetData.SetPathStatic("target_data", fmt.Sprintf("dat/%s/%s.tsv", geneLC, geneLC))
extractTargetData.In("raw_data").Connect(unPackDB.Out("unxzed"))
if *runSlurm {
extractTargetData.Prepend = "salloc -A snic2017-7-89 -n 4 -c 4 -t 1:00:00 -J scipipe_cnt_comp_" + geneLC // SLURM string
}
countTargetDataRows := wf.NewProc("cnt_targetdata_rows_"+uniq_gene, `awk '$2 == "A" { a += 1 } $2 == "N" { n += 1 } END { print a "\t" n }' {i:targetdata} > {o:count} # {p:gene}`)
countTargetDataRows.SetPathExtend("targetdata", "count", ".count")
countTargetDataRows.In("targetdata").Connect(extractTargetData.Out("target_data"))
countTargetDataRows.ParamPort("gene").ConnectStr(gene)
// --------------------------------------------------------------------------------
// Pre-compute step
// --------------------------------------------------------------------------------
cpSignPrecomp := wf.NewProc("cpsign_precomp_"+uniq_gene,
`java -jar `+cpSignPath+` precompute \
--license ../../bin/cpsign.lic \
--cptype 1 \
--trainfile {i:traindata} \
--labels A, N \
--model-out {o:precomp} \
--model-name "`+gene+` target profile"`)
cpSignPrecomp.In("traindata").Connect(extractTargetData.Out("target_data"))
cpSignPrecomp.SetPathExtend("traindata", "precomp", ".precomp")
if *runSlurm {
cpSignPrecomp.Prepend = "salloc -A snic2017-7-89 -n 4 -c 4 -t 1-00:00:00 -J precmp_" + geneLC // SLURM string
}
for _, replicate := range replicates {
uniq_repl := uniq_gene + "_" + replicate
// --------------------------------------------------------------------------------
// Optimize cost/gamma-step
// --------------------------------------------------------------------------------
includeGamma := false // For liblinear
summarize := NewSummarizeCostGammaPerf(wf,
"summarize_cost_gamma_perf_"+uniq_repl,
"dat/"+geneLC+"/"+replicate+"/"+geneLC+"_cost_gamma_perf_stats.tsv",
includeGamma)
for _, cost := range costVals {
uniq_cost := uniq_repl + "_" + cost
// If Liblinear
evalCost := wf.NewProc("crossval_"+uniq_cost, `java -jar `+cpSignPath+` crossvalidate \
--license ../../bin/cpsign.lic \
--cptype 1 \
--trainfile {i:traindata} \
--impl liblinear \
--labels A, N \
--nr-models {p:nrmdl} \
--cost {p:cost} \
--cv-folds {p:cvfolds} \
--output-format json \
--confidence {p:confidence} | grep -P "^{" > {o:stats} # {p:gene} {p:replicate}`)
evalCost.SetPathCustom("stats", func(t *sp.SciTask) string {
c, err := strconv.ParseInt(t.Param("cost"), 10, 0)
geneLC := str.ToLower(t.Param("gene"))
sp.CheckErr(err)
return str.Replace(t.InPath("traindata"), geneLC+".tsv", t.Param("replicate")+"/"+geneLC+".tsv", 1) + fmt.Sprintf(".liblin_c%03d", c) + "_crossval_stats.json"
})
evalCost.In("traindata").Connect(extractTargetData.Out("target_data"))
evalCost.ParamPort("nrmdl").ConnectStr("10")
evalCost.ParamPort("cvfolds").ConnectStr("10")
evalCost.ParamPort("confidence").ConnectStr("0.9")
evalCost.ParamPort("gene").ConnectStr(gene)
evalCost.ParamPort("replicate").ConnectStr(replicate)
evalCost.ParamPort("cost").ConnectStr(cost)
if *runSlurm {
evalCost.Prepend = "salloc -A snic2017-7-89 -n 4 -c 4 -t 1-00:00:00 -J evalcg_" + uniq_cost // SLURM string
}
extractCostGammaStats := spc.NewMapToKeys(wf, "extract_cgstats_"+uniq_cost, func(ip *sp.InformationPacket) map[string]string {
crossValOut := &cpSignCrossValOutput{}
ip.UnMarshalJson(crossValOut)
newKeys := map[string]string{}
newKeys["validity"] = fmt.Sprintf("%.3f", crossValOut.Validity)
newKeys["efficiency"] = fmt.Sprintf("%.3f", crossValOut.Efficiency)
newKeys["class_confidence"] = | gammaVals = []string{
"0.1",
"0.01", | random_line_split |
train_models.go | CHRM2", "EDNRA", "MAOA", "LCK",
"PTGS2", "SLC6A2", "ACHE", "CNR2", "CNR1", "ADORA2A", "OPRD1", "NR3C1", "AR", "SLC6A4",
"OPRM1", "HTR1A", "SLC6A3", "OPRK1", "AVPR1A", "ADRB2", "DRD2", "KCNH2", "DRD1", "HTR2A",
"CHRM1",
},
"smallest1": []string{
"PDE3A",
},
"smallest3": []string{
"PDE3A", "SCN5A", "CCKAR",
},
"smallest4": []string{
"PDE3A", "SCN5A", "CCKAR", "ADRB1",
},
}
costVals = []string{
"1",
"10",
"100",
}
gammaVals = []string{
"0.1",
"0.01",
"0.001",
}
replicates = []string{
"r1", "r2", "r3",
}
)
func | () {
// --------------------------------
// Parse flags and stuff
// --------------------------------
flag.Parse()
if *debug {
sp.InitLogDebug()
} else {
sp.InitLogAudit()
}
if len(geneSets[*geneSet]) == 0 {
names := []string{}
for n, _ := range geneSets {
names = append(names, n)
}
sp.Error.Fatalf("Incorrect gene set %s specified! Only allowed values are: %s\n", *geneSet, str.Join(names, ", "))
}
runtime.GOMAXPROCS(*threads)
// --------------------------------
// Show startup messages
// --------------------------------
sp.Info.Printf("Using max %d OS threads to schedule max %d tasks\n", *threads, *maxTasks)
sp.Info.Printf("Starting workflow for %s geneset\n", *geneSet)
// --------------------------------
// Initialize processes and add to runner
// --------------------------------
wf := sp.NewWorkflow("train_models", *maxTasks)
dbFileName := "pubchem.chembl.dataset4publication_inchi_smiles.tsv.xz"
dlExcapeDB := wf.NewProc("dlDB", fmt.Sprintf("wget https://zenodo.org/record/173258/files/%s -O {o:excapexz}", dbFileName))
dlExcapeDB.SetPathStatic("excapexz", "../../raw/"+dbFileName)
unPackDB := wf.NewProc("unPackDB", "xzcat {i:xzfile} > {o:unxzed}")
unPackDB.SetPathReplace("xzfile", "unxzed", ".xz", "")
unPackDB.In("xzfile").Connect(dlExcapeDB.Out("excapexz"))
//unPackDB.Prepend = "salloc -A snic2017-7-89 -n 2 -t 8:00:00 -J unpack_excapedb"
finalModelsSummary := NewFinalModelSummarizer(wf, "finalmodels_summary_creator", "res/final_models_summary.tsv", '\t')
// --------------------------------
// Set up gene-specific workflow branches
// --------------------------------
for _, gene := range geneSets[*geneSet] {
geneLC := str.ToLower(gene)
uniq_gene := geneLC
// --------------------------------------------------------------------------------
// Extract target data step
// --------------------------------------------------------------------------------
extractTargetData := wf.NewProc("extract_target_data_"+uniq_gene, `awk -F"\t" '$9 == "{p:gene}" { print $12"\t"$4 }' {i:raw_data} > {o:target_data}`)
extractTargetData.ParamPort("gene").ConnectStr(gene)
extractTargetData.SetPathStatic("target_data", fmt.Sprintf("dat/%s/%s.tsv", geneLC, geneLC))
extractTargetData.In("raw_data").Connect(unPackDB.Out("unxzed"))
if *runSlurm {
extractTargetData.Prepend = "salloc -A snic2017-7-89 -n 4 -c 4 -t 1:00:00 -J scipipe_cnt_comp_" + geneLC // SLURM string
}
countTargetDataRows := wf.NewProc("cnt_targetdata_rows_"+uniq_gene, `awk '$2 == "A" { a += 1 } $2 == "N" { n += 1 } END { print a "\t" n }' {i:targetdata} > {o:count} # {p:gene}`)
countTargetDataRows.SetPathExtend("targetdata", "count", ".count")
countTargetDataRows.In("targetdata").Connect(extractTargetData.Out("target_data"))
countTargetDataRows.ParamPort("gene").ConnectStr(gene)
// --------------------------------------------------------------------------------
// Pre-compute step
// --------------------------------------------------------------------------------
cpSignPrecomp := wf.NewProc("cpsign_precomp_"+uniq_gene,
`java -jar `+cpSignPath+` precompute \
--license ../../bin/cpsign.lic \
--cptype 1 \
--trainfile {i:traindata} \
--labels A, N \
--model-out {o:precomp} \
--model-name "`+gene+` target profile"`)
cpSignPrecomp.In("traindata").Connect(extractTargetData.Out("target_data"))
cpSignPrecomp.SetPathExtend("traindata", "precomp", ".precomp")
if *runSlurm {
cpSignPrecomp.Prepend = "salloc -A snic2017-7-89 -n 4 -c 4 -t 1-00:00:00 -J precmp_" + geneLC // SLURM string
}
for _, replicate := range replicates {
uniq_repl := uniq_gene + "_" + replicate
// --------------------------------------------------------------------------------
// Optimize cost/gamma-step
// --------------------------------------------------------------------------------
includeGamma := false // For liblinear
summarize := NewSummarizeCostGammaPerf(wf,
"summarize_cost_gamma_perf_"+uniq_repl,
"dat/"+geneLC+"/"+replicate+"/"+geneLC+"_cost_gamma_perf_stats.tsv",
includeGamma)
for _, cost := range costVals {
uniq_cost := uniq_repl + "_" + cost
// If Liblinear
evalCost := wf.NewProc("crossval_"+uniq_cost, `java -jar `+cpSignPath+` crossvalidate \
--license ../../bin/cpsign.lic \
--cptype 1 \
--trainfile {i:traindata} \
--impl liblinear \
--labels A, N \
--nr-models {p:nrmdl} \
--cost {p:cost} \
--cv-folds {p:cvfolds} \
--output-format json \
--confidence {p:confidence} | grep -P "^{" > {o:stats} # {p:gene} {p:replicate}`)
evalCost.SetPathCustom("stats", func(t *sp.SciTask) string {
c, err := strconv.ParseInt(t.Param("cost"), 10, 0)
geneLC := str.ToLower(t.Param("gene"))
sp.CheckErr(err)
return str.Replace(t.InPath("traindata"), geneLC+".tsv", t.Param("replicate")+"/"+geneLC+".tsv", 1) + fmt.Sprintf(".liblin_c%03d", c) + "_crossval_stats.json"
})
evalCost.In("traindata").Connect(extractTargetData.Out("target_data"))
evalCost.ParamPort("nrmdl").ConnectStr("10")
evalCost.ParamPort("cvfolds").ConnectStr("10")
evalCost.ParamPort("confidence").ConnectStr("0.9")
evalCost.ParamPort("gene").ConnectStr(gene)
evalCost.ParamPort("replicate").ConnectStr(replicate)
evalCost.ParamPort("cost").ConnectStr(cost)
if *runSlurm {
evalCost.Prepend = "salloc -A snic2017-7-89 -n 4 -c 4 -t 1-00:00:00 -J evalcg_" + uniq_cost // SLURM string
}
extractCostGammaStats := spc.NewMapToKeys(wf, "extract_cgstats_"+uniq_cost, func(ip *sp.InformationPacket) map[string]string {
crossValOut := &cpSignCrossValOutput{}
ip.UnMarshalJson(crossValOut)
newKeys := map[string]string{}
newKeys["validity"] = fmt.Sprintf("%.3f", crossValOut.Validity)
newKeys["efficiency"] = fmt.Sprintf("%.3f", crossValOut.Efficiency)
newKeys["class_confidence"] | main | identifier_name |
train_models.go | CHRM2", "EDNRA", "MAOA", "LCK",
"PTGS2", "SLC6A2", "ACHE", "CNR2", "CNR1", "ADORA2A", "OPRD1", "NR3C1", "AR", "SLC6A4",
"OPRM1", "HTR1A", "SLC6A3", "OPRK1", "AVPR1A", "ADRB2", "DRD2", "KCNH2", "DRD1", "HTR2A",
"CHRM1",
},
"smallest1": []string{
"PDE3A",
},
"smallest3": []string{
"PDE3A", "SCN5A", "CCKAR",
},
"smallest4": []string{
"PDE3A", "SCN5A", "CCKAR", "ADRB1",
},
}
costVals = []string{
"1",
"10",
"100",
}
gammaVals = []string{
"0.1",
"0.01",
"0.001",
}
replicates = []string{
"r1", "r2", "r3",
}
)
func main() {
// --------------------------------
// Parse flags and stuff
// --------------------------------
flag.Parse()
if *debug {
sp.InitLogDebug()
} else {
sp.InitLogAudit()
}
if len(geneSets[*geneSet]) == 0 {
names := []string{}
for n, _ := range geneSets |
sp.Error.Fatalf("Incorrect gene set %s specified! Only allowed values are: %s\n", *geneSet, str.Join(names, ", "))
}
runtime.GOMAXPROCS(*threads)
// --------------------------------
// Show startup messages
// --------------------------------
sp.Info.Printf("Using max %d OS threads to schedule max %d tasks\n", *threads, *maxTasks)
sp.Info.Printf("Starting workflow for %s geneset\n", *geneSet)
// --------------------------------
// Initialize processes and add to runner
// --------------------------------
wf := sp.NewWorkflow("train_models", *maxTasks)
dbFileName := "pubchem.chembl.dataset4publication_inchi_smiles.tsv.xz"
dlExcapeDB := wf.NewProc("dlDB", fmt.Sprintf("wget https://zenodo.org/record/173258/files/%s -O {o:excapexz}", dbFileName))
dlExcapeDB.SetPathStatic("excapexz", "../../raw/"+dbFileName)
unPackDB := wf.NewProc("unPackDB", "xzcat {i:xzfile} > {o:unxzed}")
unPackDB.SetPathReplace("xzfile", "unxzed", ".xz", "")
unPackDB.In("xzfile").Connect(dlExcapeDB.Out("excapexz"))
//unPackDB.Prepend = "salloc -A snic2017-7-89 -n 2 -t 8:00:00 -J unpack_excapedb"
finalModelsSummary := NewFinalModelSummarizer(wf, "finalmodels_summary_creator", "res/final_models_summary.tsv", '\t')
// --------------------------------
// Set up gene-specific workflow branches
// --------------------------------
for _, gene := range geneSets[*geneSet] {
geneLC := str.ToLower(gene)
uniq_gene := geneLC
// --------------------------------------------------------------------------------
// Extract target data step
// --------------------------------------------------------------------------------
extractTargetData := wf.NewProc("extract_target_data_"+uniq_gene, `awk -F"\t" '$9 == "{p:gene}" { print $12"\t"$4 }' {i:raw_data} > {o:target_data}`)
extractTargetData.ParamPort("gene").ConnectStr(gene)
extractTargetData.SetPathStatic("target_data", fmt.Sprintf("dat/%s/%s.tsv", geneLC, geneLC))
extractTargetData.In("raw_data").Connect(unPackDB.Out("unxzed"))
if *runSlurm {
extractTargetData.Prepend = "salloc -A snic2017-7-89 -n 4 -c 4 -t 1:00:00 -J scipipe_cnt_comp_" + geneLC // SLURM string
}
countTargetDataRows := wf.NewProc("cnt_targetdata_rows_"+uniq_gene, `awk '$2 == "A" { a += 1 } $2 == "N" { n += 1 } END { print a "\t" n }' {i:targetdata} > {o:count} # {p:gene}`)
countTargetDataRows.SetPathExtend("targetdata", "count", ".count")
countTargetDataRows.In("targetdata").Connect(extractTargetData.Out("target_data"))
countTargetDataRows.ParamPort("gene").ConnectStr(gene)
// --------------------------------------------------------------------------------
// Pre-compute step
// --------------------------------------------------------------------------------
cpSignPrecomp := wf.NewProc("cpsign_precomp_"+uniq_gene,
`java -jar `+cpSignPath+` precompute \
--license ../../bin/cpsign.lic \
--cptype 1 \
--trainfile {i:traindata} \
--labels A, N \
--model-out {o:precomp} \
--model-name "`+gene+` target profile"`)
cpSignPrecomp.In("traindata").Connect(extractTargetData.Out("target_data"))
cpSignPrecomp.SetPathExtend("traindata", "precomp", ".precomp")
if *runSlurm {
cpSignPrecomp.Prepend = "salloc -A snic2017-7-89 -n 4 -c 4 -t 1-00:00:00 -J precmp_" + geneLC // SLURM string
}
for _, replicate := range replicates {
uniq_repl := uniq_gene + "_" + replicate
// --------------------------------------------------------------------------------
// Optimize cost/gamma-step
// --------------------------------------------------------------------------------
includeGamma := false // For liblinear
summarize := NewSummarizeCostGammaPerf(wf,
"summarize_cost_gamma_perf_"+uniq_repl,
"dat/"+geneLC+"/"+replicate+"/"+geneLC+"_cost_gamma_perf_stats.tsv",
includeGamma)
for _, cost := range costVals {
uniq_cost := uniq_repl + "_" + cost
// If Liblinear
evalCost := wf.NewProc("crossval_"+uniq_cost, `java -jar `+cpSignPath+` crossvalidate \
--license ../../bin/cpsign.lic \
--cptype 1 \
--trainfile {i:traindata} \
--impl liblinear \
--labels A, N \
--nr-models {p:nrmdl} \
--cost {p:cost} \
--cv-folds {p:cvfolds} \
--output-format json \
--confidence {p:confidence} | grep -P "^{" > {o:stats} # {p:gene} {p:replicate}`)
evalCost.SetPathCustom("stats", func(t *sp.SciTask) string {
c, err := strconv.ParseInt(t.Param("cost"), 10, 0)
geneLC := str.ToLower(t.Param("gene"))
sp.CheckErr(err)
return str.Replace(t.InPath("traindata"), geneLC+".tsv", t.Param("replicate")+"/"+geneLC+".tsv", 1) + fmt.Sprintf(".liblin_c%03d", c) + "_crossval_stats.json"
})
evalCost.In("traindata").Connect(extractTargetData.Out("target_data"))
evalCost.ParamPort("nrmdl").ConnectStr("10")
evalCost.ParamPort("cvfolds").ConnectStr("10")
evalCost.ParamPort("confidence").ConnectStr("0.9")
evalCost.ParamPort("gene").ConnectStr(gene)
evalCost.ParamPort("replicate").ConnectStr(replicate)
evalCost.ParamPort("cost").ConnectStr(cost)
if *runSlurm {
evalCost.Prepend = "salloc -A snic2017-7-89 -n 4 -c 4 -t 1-00:00:00 -J evalcg_" + uniq_cost // SLURM string
}
extractCostGammaStats := spc.NewMapToKeys(wf, "extract_cgstats_"+uniq_cost, func(ip *sp.InformationPacket) map[string]string {
crossValOut := &cpSignCrossValOutput{}
ip.UnMarshalJson(crossValOut)
newKeys := map[string]string{}
newKeys["validity"] = fmt.Sprintf("%.3f", crossValOut.Validity)
newKeys["efficiency"] = fmt.Sprintf("%.3f", crossValOut.Efficiency)
newKeys["class_confidence | {
names = append(names, n)
} | conditional_block |
beacon_chain_builder.rs | ,
};
enum BuildStrategy<T: BeaconChainTypes> {
FromGenesis {
genesis_state: Box<BeaconState<T::EthSpec>>,
genesis_block: Box<BeaconBlock<T::EthSpec>>,
},
LoadFromStore,
}
pub struct BeaconChainBuilder<T: BeaconChainTypes> {
build_strategy: BuildStrategy<T>,
spec: ChainSpec,
log: Logger,
}
impl<T: BeaconChainTypes> BeaconChainBuilder<T> {
pub fn recent_genesis(
keypairs: &[Keypair],
minutes: u64,
spec: ChainSpec,
log: Logger,
) -> Result<Self, String> {
Self::quick_start(recent_genesis_time(minutes), keypairs, spec, log)
}
pub fn quick_start(
genesis_time: u64,
keypairs: &[Keypair],
spec: ChainSpec,
log: Logger,
) -> Result<Self, String> {
let genesis_state = interop_genesis_state(keypairs, genesis_time, &spec)?;
Ok(Self::from_genesis_state(genesis_state, spec, log))
}
pub fn yaml_state(file: &PathBuf, spec: ChainSpec, log: Logger) -> Result<Self, String> {
let file = File::open(file.clone())
.map_err(|e| format!("Unable to open YAML genesis state file {:?}: {:?}", file, e))?;
let genesis_state = serde_yaml::from_reader(file)
.map_err(|e| format!("Unable to parse YAML genesis state file: {:?}", e))?;
Ok(Self::from_genesis_state(genesis_state, spec, log))
}
pub fn ssz_state(file: &PathBuf, spec: ChainSpec, log: Logger) -> Result<Self, String> {
let mut file = File::open(file.clone())
.map_err(|e| format!("Unable to open SSZ genesis state file {:?}: {:?}", file, e))?;
let mut bytes = vec![];
file.read_to_end(&mut bytes)
.map_err(|e| format!("Failed to read SSZ file: {:?}", e))?;
let genesis_state = BeaconState::from_ssz_bytes(&bytes)
.map_err(|e| format!("Unable to parse SSZ genesis state file: {:?}", e))?;
Ok(Self::from_genesis_state(genesis_state, spec, log))
}
pub fn json_state(file: &PathBuf, spec: ChainSpec, log: Logger) -> Result<Self, String> {
let file = File::open(file.clone())
.map_err(|e| format!("Unable to open JSON genesis state file {:?}: {:?}", file, e))?;
let genesis_state = serde_json::from_reader(file)
.map_err(|e| format!("Unable to parse JSON genesis state file: {:?}", e))?;
Ok(Self::from_genesis_state(genesis_state, spec, log))
}
pub fn http_bootstrap(server: &str, spec: ChainSpec, log: Logger) -> Result<Self, String> {
let bootstrapper = Bootstrapper::connect(server.to_string(), &log)
.map_err(|e| format!("Failed to initialize bootstrap client: {}", e))?;
let (genesis_state, genesis_block) = bootstrapper
.genesis()
.map_err(|e| format!("Failed to bootstrap genesis state: {}", e))?;
Ok(Self {
build_strategy: BuildStrategy::FromGenesis {
genesis_block: Box::new(genesis_block),
genesis_state: Box::new(genesis_state),
},
spec,
log,
})
}
fn from_genesis_state(
genesis_state: BeaconState<T::EthSpec>,
spec: ChainSpec,
log: Logger,
) -> Self {
Self {
build_strategy: BuildStrategy::FromGenesis {
genesis_block: Box::new(genesis_block(&genesis_state, &spec)),
genesis_state: Box::new(genesis_state),
},
spec,
log,
}
}
pub fn from_store(spec: ChainSpec, log: Logger) -> Self {
Self {
build_strategy: BuildStrategy::LoadFromStore,
spec,
log,
}
}
pub fn build(
self,
store: Arc<T::Store>,
eth1_backend: T::Eth1Chain,
event_handler: T::EventHandler,
) -> Result<BeaconChain<T>, String> {
Ok(match self.build_strategy {
BuildStrategy::LoadFromStore => {
BeaconChain::from_store(store, eth1_backend, event_handler, self.spec, self.log)
.map_err(|e| format!("Error loading BeaconChain from database: {:?}", e))?
.ok_or_else(|| "Unable to find exising BeaconChain in database.".to_string())?
}
BuildStrategy::FromGenesis {
genesis_block,
genesis_state,
} => BeaconChain::from_genesis(
store,
eth1_backend,
event_handler,
genesis_state.as_ref().clone(),
genesis_block.as_ref().clone(),
self.spec,
self.log,
)
.map_err(|e| format!("Failed to initialize new beacon chain: {:?}", e))?,
})
}
}
fn genesis_block<T: EthSpec>(genesis_state: &BeaconState<T>, spec: &ChainSpec) -> BeaconBlock<T> {
let mut genesis_block = BeaconBlock::empty(&spec);
genesis_block.state_root = genesis_state.canonical_root();
genesis_block
}
/// Builds a genesis state as defined by the Eth2 interop procedure (see below).
///
/// Reference:
/// https://github.com/ethereum/eth2.0-pm/tree/6e41fcf383ebeb5125938850d8e9b4e9888389b4/interop/mocked_start
fn interop_genesis_state<T: EthSpec>(
keypairs: &[Keypair],
genesis_time: u64,
spec: &ChainSpec,
) -> Result<BeaconState<T>, String> {
let eth1_block_hash = Hash256::from_slice(&[0x42; 32]);
let eth1_timestamp = 2_u64.pow(40);
let amount = spec.max_effective_balance;
let withdrawal_credentials = |pubkey: &PublicKey| {
let mut credentials = hash(&pubkey.as_ssz_bytes());
credentials[0] = spec.bls_withdrawal_prefix_byte;
Hash256::from_slice(&credentials)
};
let datas = keypairs
.into_par_iter()
.map(|keypair| {
let mut data = DepositData {
withdrawal_credentials: withdrawal_credentials(&keypair.pk),
pubkey: keypair.pk.clone().into(),
amount,
signature: Signature::empty_signature().into(),
};
let domain = spec.get_domain(
spec.genesis_slot.epoch(T::slots_per_epoch()),
Domain::Deposit,
&Fork::default(),
);
data.signature = Signature::new(&data.signed_root()[..], domain, &keypair.sk).into();
data
})
.collect::<Vec<_>>();
let deposit_root_leaves = datas
.par_iter()
.map(|data| Hash256::from_slice(&data.tree_hash_root()))
.collect::<Vec<_>>();
let mut proofs = vec![];
let depth = spec.deposit_contract_tree_depth as usize;
let mut tree = MerkleTree::create(&[], depth);
for (i, deposit_leaf) in deposit_root_leaves.iter().enumerate() {
if let Err(_) = tree.push_leaf(*deposit_leaf, depth) |
let (_, mut proof) = tree.generate_proof(i, depth);
proof.push(Hash256::from_slice(&int_to_bytes32(i + 1)));
assert_eq!(
proof.len(),
depth + 1,
"Deposit proof should be correct len"
);
proofs.push(proof);
}
let deposits = datas
.into_par_iter()
.zip(proofs.into_par_iter())
.map(|(data, proof)| (data, proof.into()))
.map(|(data, proof)| Deposit { proof, data })
.collect::<Vec<_>>();
let mut state =
initialize_beacon_state_from_eth1(eth1_block_hash, eth1_timestamp, deposits, spec)
.map_err(|e| format!("Unable to initialize genesis state: {:?}", e))?;
state.genesis_time = genesis_time;
// Invalid all the caches after all the manual state surgery.
state.drop_all_caches();
Ok(state)
}
/// Returns `int` as little-endian bytes with a length of 32.
fn int_to_bytes32(int: usize) -> Vec<u8> {
let mut vec = int.to_le_bytes().to_vec();
vec.resize(32, 0);
vec
}
/// Returns the system time, mod 30 minutes.
///
/// Used for easily creating testnets.
fn recent_genesis_time(minutes: u64) -> u64 {
let now = SystemTime::now()
.duration_since(SystemTime::UNIX_EPOCH)
.unwrap()
.as_secs();
let secs_after_last_period = now.checked_rem(minutes * 60).unwrap_or(0);
now - secs_after_last_period
}
#[cfg(test)]
mod test {
use super::*;
use types | {
return Err(String::from("Failed to push leaf"));
} | conditional_block |
beacon_chain_builder.rs | _genesis_state(genesis_state, spec, log))
}
pub fn yaml_state(file: &PathBuf, spec: ChainSpec, log: Logger) -> Result<Self, String> {
let file = File::open(file.clone())
.map_err(|e| format!("Unable to open YAML genesis state file {:?}: {:?}", file, e))?;
let genesis_state = serde_yaml::from_reader(file)
.map_err(|e| format!("Unable to parse YAML genesis state file: {:?}", e))?;
Ok(Self::from_genesis_state(genesis_state, spec, log))
}
pub fn ssz_state(file: &PathBuf, spec: ChainSpec, log: Logger) -> Result<Self, String> {
let mut file = File::open(file.clone())
.map_err(|e| format!("Unable to open SSZ genesis state file {:?}: {:?}", file, e))?;
let mut bytes = vec![];
file.read_to_end(&mut bytes)
.map_err(|e| format!("Failed to read SSZ file: {:?}", e))?;
let genesis_state = BeaconState::from_ssz_bytes(&bytes)
.map_err(|e| format!("Unable to parse SSZ genesis state file: {:?}", e))?;
Ok(Self::from_genesis_state(genesis_state, spec, log))
}
pub fn json_state(file: &PathBuf, spec: ChainSpec, log: Logger) -> Result<Self, String> {
let file = File::open(file.clone())
.map_err(|e| format!("Unable to open JSON genesis state file {:?}: {:?}", file, e))?;
let genesis_state = serde_json::from_reader(file)
.map_err(|e| format!("Unable to parse JSON genesis state file: {:?}", e))?;
Ok(Self::from_genesis_state(genesis_state, spec, log))
}
pub fn http_bootstrap(server: &str, spec: ChainSpec, log: Logger) -> Result<Self, String> {
let bootstrapper = Bootstrapper::connect(server.to_string(), &log)
.map_err(|e| format!("Failed to initialize bootstrap client: {}", e))?;
let (genesis_state, genesis_block) = bootstrapper
.genesis()
.map_err(|e| format!("Failed to bootstrap genesis state: {}", e))?;
Ok(Self {
build_strategy: BuildStrategy::FromGenesis {
genesis_block: Box::new(genesis_block),
genesis_state: Box::new(genesis_state),
},
spec,
log,
})
}
fn from_genesis_state(
genesis_state: BeaconState<T::EthSpec>,
spec: ChainSpec,
log: Logger,
) -> Self {
Self {
build_strategy: BuildStrategy::FromGenesis {
genesis_block: Box::new(genesis_block(&genesis_state, &spec)),
genesis_state: Box::new(genesis_state),
},
spec,
log,
}
}
pub fn from_store(spec: ChainSpec, log: Logger) -> Self {
Self {
build_strategy: BuildStrategy::LoadFromStore,
spec,
log,
}
}
pub fn build(
self,
store: Arc<T::Store>,
eth1_backend: T::Eth1Chain,
event_handler: T::EventHandler,
) -> Result<BeaconChain<T>, String> {
Ok(match self.build_strategy {
BuildStrategy::LoadFromStore => {
BeaconChain::from_store(store, eth1_backend, event_handler, self.spec, self.log)
.map_err(|e| format!("Error loading BeaconChain from database: {:?}", e))?
.ok_or_else(|| "Unable to find exising BeaconChain in database.".to_string())?
}
BuildStrategy::FromGenesis {
genesis_block,
genesis_state,
} => BeaconChain::from_genesis(
store,
eth1_backend,
event_handler,
genesis_state.as_ref().clone(),
genesis_block.as_ref().clone(),
self.spec,
self.log,
)
.map_err(|e| format!("Failed to initialize new beacon chain: {:?}", e))?,
})
}
}
fn genesis_block<T: EthSpec>(genesis_state: &BeaconState<T>, spec: &ChainSpec) -> BeaconBlock<T> {
let mut genesis_block = BeaconBlock::empty(&spec);
genesis_block.state_root = genesis_state.canonical_root();
genesis_block
}
/// Builds a genesis state as defined by the Eth2 interop procedure (see below).
///
/// Reference:
/// https://github.com/ethereum/eth2.0-pm/tree/6e41fcf383ebeb5125938850d8e9b4e9888389b4/interop/mocked_start
fn interop_genesis_state<T: EthSpec>(
keypairs: &[Keypair],
genesis_time: u64,
spec: &ChainSpec,
) -> Result<BeaconState<T>, String> {
let eth1_block_hash = Hash256::from_slice(&[0x42; 32]);
let eth1_timestamp = 2_u64.pow(40);
let amount = spec.max_effective_balance;
let withdrawal_credentials = |pubkey: &PublicKey| {
let mut credentials = hash(&pubkey.as_ssz_bytes());
credentials[0] = spec.bls_withdrawal_prefix_byte;
Hash256::from_slice(&credentials)
};
let datas = keypairs
.into_par_iter()
.map(|keypair| {
let mut data = DepositData {
withdrawal_credentials: withdrawal_credentials(&keypair.pk),
pubkey: keypair.pk.clone().into(),
amount,
signature: Signature::empty_signature().into(),
};
let domain = spec.get_domain(
spec.genesis_slot.epoch(T::slots_per_epoch()),
Domain::Deposit,
&Fork::default(),
);
data.signature = Signature::new(&data.signed_root()[..], domain, &keypair.sk).into();
data
})
.collect::<Vec<_>>();
let deposit_root_leaves = datas
.par_iter()
.map(|data| Hash256::from_slice(&data.tree_hash_root()))
.collect::<Vec<_>>();
let mut proofs = vec![];
let depth = spec.deposit_contract_tree_depth as usize;
let mut tree = MerkleTree::create(&[], depth);
for (i, deposit_leaf) in deposit_root_leaves.iter().enumerate() {
if let Err(_) = tree.push_leaf(*deposit_leaf, depth) {
return Err(String::from("Failed to push leaf"));
}
let (_, mut proof) = tree.generate_proof(i, depth);
proof.push(Hash256::from_slice(&int_to_bytes32(i + 1)));
assert_eq!(
proof.len(),
depth + 1,
"Deposit proof should be correct len"
);
proofs.push(proof);
}
let deposits = datas
.into_par_iter()
.zip(proofs.into_par_iter())
.map(|(data, proof)| (data, proof.into()))
.map(|(data, proof)| Deposit { proof, data })
.collect::<Vec<_>>();
let mut state =
initialize_beacon_state_from_eth1(eth1_block_hash, eth1_timestamp, deposits, spec)
.map_err(|e| format!("Unable to initialize genesis state: {:?}", e))?;
state.genesis_time = genesis_time;
// Invalid all the caches after all the manual state surgery.
state.drop_all_caches();
Ok(state)
}
/// Returns `int` as little-endian bytes with a length of 32.
fn int_to_bytes32(int: usize) -> Vec<u8> {
let mut vec = int.to_le_bytes().to_vec();
vec.resize(32, 0);
vec
}
/// Returns the system time, mod 30 minutes.
///
/// Used for easily creating testnets.
fn recent_genesis_time(minutes: u64) -> u64 {
let now = SystemTime::now()
.duration_since(SystemTime::UNIX_EPOCH)
.unwrap()
.as_secs();
let secs_after_last_period = now.checked_rem(minutes * 60).unwrap_or(0);
now - secs_after_last_period
}
#[cfg(test)]
mod test {
use super::*;
use types::{test_utils::generate_deterministic_keypairs, EthSpec, MinimalEthSpec};
type TestEthSpec = MinimalEthSpec;
#[test]
fn interop_state() {
let validator_count = 16;
let genesis_time = 42;
let spec = &TestEthSpec::default_spec();
let keypairs = generate_deterministic_keypairs(validator_count);
let state = interop_genesis_state::<TestEthSpec>(&keypairs, genesis_time, spec)
.expect("should build state");
assert_eq!(
state.eth1_data.block_hash,
Hash256::from_slice(&[0x42; 32]),
"eth1 block hash should be co-ordinated junk"
);
assert_eq!(
state.genesis_time, genesis_time,
"genesis time should be as specified"
);
for b in &state.balances {
assert_eq!(
*b, spec.max_effective_balance, | "validator balances should be max effective balance"
);
}
for v in &state.validators { | random_line_split | |
beacon_chain_builder.rs | ,
};
enum BuildStrategy<T: BeaconChainTypes> {
FromGenesis {
genesis_state: Box<BeaconState<T::EthSpec>>,
genesis_block: Box<BeaconBlock<T::EthSpec>>,
},
LoadFromStore,
}
pub struct BeaconChainBuilder<T: BeaconChainTypes> {
build_strategy: BuildStrategy<T>,
spec: ChainSpec,
log: Logger,
}
impl<T: BeaconChainTypes> BeaconChainBuilder<T> {
pub fn recent_genesis(
keypairs: &[Keypair],
minutes: u64,
spec: ChainSpec,
log: Logger,
) -> Result<Self, String> {
Self::quick_start(recent_genesis_time(minutes), keypairs, spec, log)
}
pub fn quick_start(
genesis_time: u64,
keypairs: &[Keypair],
spec: ChainSpec,
log: Logger,
) -> Result<Self, String> {
let genesis_state = interop_genesis_state(keypairs, genesis_time, &spec)?;
Ok(Self::from_genesis_state(genesis_state, spec, log))
}
pub fn yaml_state(file: &PathBuf, spec: ChainSpec, log: Logger) -> Result<Self, String> {
let file = File::open(file.clone())
.map_err(|e| format!("Unable to open YAML genesis state file {:?}: {:?}", file, e))?;
let genesis_state = serde_yaml::from_reader(file)
.map_err(|e| format!("Unable to parse YAML genesis state file: {:?}", e))?;
Ok(Self::from_genesis_state(genesis_state, spec, log))
}
pub fn | (file: &PathBuf, spec: ChainSpec, log: Logger) -> Result<Self, String> {
let mut file = File::open(file.clone())
.map_err(|e| format!("Unable to open SSZ genesis state file {:?}: {:?}", file, e))?;
let mut bytes = vec![];
file.read_to_end(&mut bytes)
.map_err(|e| format!("Failed to read SSZ file: {:?}", e))?;
let genesis_state = BeaconState::from_ssz_bytes(&bytes)
.map_err(|e| format!("Unable to parse SSZ genesis state file: {:?}", e))?;
Ok(Self::from_genesis_state(genesis_state, spec, log))
}
pub fn json_state(file: &PathBuf, spec: ChainSpec, log: Logger) -> Result<Self, String> {
let file = File::open(file.clone())
.map_err(|e| format!("Unable to open JSON genesis state file {:?}: {:?}", file, e))?;
let genesis_state = serde_json::from_reader(file)
.map_err(|e| format!("Unable to parse JSON genesis state file: {:?}", e))?;
Ok(Self::from_genesis_state(genesis_state, spec, log))
}
pub fn http_bootstrap(server: &str, spec: ChainSpec, log: Logger) -> Result<Self, String> {
let bootstrapper = Bootstrapper::connect(server.to_string(), &log)
.map_err(|e| format!("Failed to initialize bootstrap client: {}", e))?;
let (genesis_state, genesis_block) = bootstrapper
.genesis()
.map_err(|e| format!("Failed to bootstrap genesis state: {}", e))?;
Ok(Self {
build_strategy: BuildStrategy::FromGenesis {
genesis_block: Box::new(genesis_block),
genesis_state: Box::new(genesis_state),
},
spec,
log,
})
}
fn from_genesis_state(
genesis_state: BeaconState<T::EthSpec>,
spec: ChainSpec,
log: Logger,
) -> Self {
Self {
build_strategy: BuildStrategy::FromGenesis {
genesis_block: Box::new(genesis_block(&genesis_state, &spec)),
genesis_state: Box::new(genesis_state),
},
spec,
log,
}
}
pub fn from_store(spec: ChainSpec, log: Logger) -> Self {
Self {
build_strategy: BuildStrategy::LoadFromStore,
spec,
log,
}
}
pub fn build(
self,
store: Arc<T::Store>,
eth1_backend: T::Eth1Chain,
event_handler: T::EventHandler,
) -> Result<BeaconChain<T>, String> {
Ok(match self.build_strategy {
BuildStrategy::LoadFromStore => {
BeaconChain::from_store(store, eth1_backend, event_handler, self.spec, self.log)
.map_err(|e| format!("Error loading BeaconChain from database: {:?}", e))?
.ok_or_else(|| "Unable to find exising BeaconChain in database.".to_string())?
}
BuildStrategy::FromGenesis {
genesis_block,
genesis_state,
} => BeaconChain::from_genesis(
store,
eth1_backend,
event_handler,
genesis_state.as_ref().clone(),
genesis_block.as_ref().clone(),
self.spec,
self.log,
)
.map_err(|e| format!("Failed to initialize new beacon chain: {:?}", e))?,
})
}
}
fn genesis_block<T: EthSpec>(genesis_state: &BeaconState<T>, spec: &ChainSpec) -> BeaconBlock<T> {
let mut genesis_block = BeaconBlock::empty(&spec);
genesis_block.state_root = genesis_state.canonical_root();
genesis_block
}
/// Builds a genesis state as defined by the Eth2 interop procedure (see below).
///
/// Reference:
/// https://github.com/ethereum/eth2.0-pm/tree/6e41fcf383ebeb5125938850d8e9b4e9888389b4/interop/mocked_start
fn interop_genesis_state<T: EthSpec>(
keypairs: &[Keypair],
genesis_time: u64,
spec: &ChainSpec,
) -> Result<BeaconState<T>, String> {
let eth1_block_hash = Hash256::from_slice(&[0x42; 32]);
let eth1_timestamp = 2_u64.pow(40);
let amount = spec.max_effective_balance;
let withdrawal_credentials = |pubkey: &PublicKey| {
let mut credentials = hash(&pubkey.as_ssz_bytes());
credentials[0] = spec.bls_withdrawal_prefix_byte;
Hash256::from_slice(&credentials)
};
let datas = keypairs
.into_par_iter()
.map(|keypair| {
let mut data = DepositData {
withdrawal_credentials: withdrawal_credentials(&keypair.pk),
pubkey: keypair.pk.clone().into(),
amount,
signature: Signature::empty_signature().into(),
};
let domain = spec.get_domain(
spec.genesis_slot.epoch(T::slots_per_epoch()),
Domain::Deposit,
&Fork::default(),
);
data.signature = Signature::new(&data.signed_root()[..], domain, &keypair.sk).into();
data
})
.collect::<Vec<_>>();
let deposit_root_leaves = datas
.par_iter()
.map(|data| Hash256::from_slice(&data.tree_hash_root()))
.collect::<Vec<_>>();
let mut proofs = vec![];
let depth = spec.deposit_contract_tree_depth as usize;
let mut tree = MerkleTree::create(&[], depth);
for (i, deposit_leaf) in deposit_root_leaves.iter().enumerate() {
if let Err(_) = tree.push_leaf(*deposit_leaf, depth) {
return Err(String::from("Failed to push leaf"));
}
let (_, mut proof) = tree.generate_proof(i, depth);
proof.push(Hash256::from_slice(&int_to_bytes32(i + 1)));
assert_eq!(
proof.len(),
depth + 1,
"Deposit proof should be correct len"
);
proofs.push(proof);
}
let deposits = datas
.into_par_iter()
.zip(proofs.into_par_iter())
.map(|(data, proof)| (data, proof.into()))
.map(|(data, proof)| Deposit { proof, data })
.collect::<Vec<_>>();
let mut state =
initialize_beacon_state_from_eth1(eth1_block_hash, eth1_timestamp, deposits, spec)
.map_err(|e| format!("Unable to initialize genesis state: {:?}", e))?;
state.genesis_time = genesis_time;
// Invalid all the caches after all the manual state surgery.
state.drop_all_caches();
Ok(state)
}
/// Returns `int` as little-endian bytes with a length of 32.
fn int_to_bytes32(int: usize) -> Vec<u8> {
let mut vec = int.to_le_bytes().to_vec();
vec.resize(32, 0);
vec
}
/// Returns the system time, mod 30 minutes.
///
/// Used for easily creating testnets.
fn recent_genesis_time(minutes: u64) -> u64 {
let now = SystemTime::now()
.duration_since(SystemTime::UNIX_EPOCH)
.unwrap()
.as_secs();
let secs_after_last_period = now.checked_rem(minutes * 60).unwrap_or(0);
now - secs_after_last_period
}
#[cfg(test)]
mod test {
use super::*;
use types::{ | ssz_state | identifier_name |
beacon_chain_builder.rs | ,
};
enum BuildStrategy<T: BeaconChainTypes> {
FromGenesis {
genesis_state: Box<BeaconState<T::EthSpec>>,
genesis_block: Box<BeaconBlock<T::EthSpec>>,
},
LoadFromStore,
}
pub struct BeaconChainBuilder<T: BeaconChainTypes> {
build_strategy: BuildStrategy<T>,
spec: ChainSpec,
log: Logger,
}
impl<T: BeaconChainTypes> BeaconChainBuilder<T> {
pub fn recent_genesis(
keypairs: &[Keypair],
minutes: u64,
spec: ChainSpec,
log: Logger,
) -> Result<Self, String> {
Self::quick_start(recent_genesis_time(minutes), keypairs, spec, log)
}
pub fn quick_start(
genesis_time: u64,
keypairs: &[Keypair],
spec: ChainSpec,
log: Logger,
) -> Result<Self, String> {
let genesis_state = interop_genesis_state(keypairs, genesis_time, &spec)?;
Ok(Self::from_genesis_state(genesis_state, spec, log))
}
pub fn yaml_state(file: &PathBuf, spec: ChainSpec, log: Logger) -> Result<Self, String> {
let file = File::open(file.clone())
.map_err(|e| format!("Unable to open YAML genesis state file {:?}: {:?}", file, e))?;
let genesis_state = serde_yaml::from_reader(file)
.map_err(|e| format!("Unable to parse YAML genesis state file: {:?}", e))?;
Ok(Self::from_genesis_state(genesis_state, spec, log))
}
pub fn ssz_state(file: &PathBuf, spec: ChainSpec, log: Logger) -> Result<Self, String> {
let mut file = File::open(file.clone())
.map_err(|e| format!("Unable to open SSZ genesis state file {:?}: {:?}", file, e))?;
let mut bytes = vec![];
file.read_to_end(&mut bytes)
.map_err(|e| format!("Failed to read SSZ file: {:?}", e))?;
let genesis_state = BeaconState::from_ssz_bytes(&bytes)
.map_err(|e| format!("Unable to parse SSZ genesis state file: {:?}", e))?;
Ok(Self::from_genesis_state(genesis_state, spec, log))
}
pub fn json_state(file: &PathBuf, spec: ChainSpec, log: Logger) -> Result<Self, String> |
pub fn http_bootstrap(server: &str, spec: ChainSpec, log: Logger) -> Result<Self, String> {
let bootstrapper = Bootstrapper::connect(server.to_string(), &log)
.map_err(|e| format!("Failed to initialize bootstrap client: {}", e))?;
let (genesis_state, genesis_block) = bootstrapper
.genesis()
.map_err(|e| format!("Failed to bootstrap genesis state: {}", e))?;
Ok(Self {
build_strategy: BuildStrategy::FromGenesis {
genesis_block: Box::new(genesis_block),
genesis_state: Box::new(genesis_state),
},
spec,
log,
})
}
fn from_genesis_state(
genesis_state: BeaconState<T::EthSpec>,
spec: ChainSpec,
log: Logger,
) -> Self {
Self {
build_strategy: BuildStrategy::FromGenesis {
genesis_block: Box::new(genesis_block(&genesis_state, &spec)),
genesis_state: Box::new(genesis_state),
},
spec,
log,
}
}
pub fn from_store(spec: ChainSpec, log: Logger) -> Self {
Self {
build_strategy: BuildStrategy::LoadFromStore,
spec,
log,
}
}
pub fn build(
self,
store: Arc<T::Store>,
eth1_backend: T::Eth1Chain,
event_handler: T::EventHandler,
) -> Result<BeaconChain<T>, String> {
Ok(match self.build_strategy {
BuildStrategy::LoadFromStore => {
BeaconChain::from_store(store, eth1_backend, event_handler, self.spec, self.log)
.map_err(|e| format!("Error loading BeaconChain from database: {:?}", e))?
.ok_or_else(|| "Unable to find exising BeaconChain in database.".to_string())?
}
BuildStrategy::FromGenesis {
genesis_block,
genesis_state,
} => BeaconChain::from_genesis(
store,
eth1_backend,
event_handler,
genesis_state.as_ref().clone(),
genesis_block.as_ref().clone(),
self.spec,
self.log,
)
.map_err(|e| format!("Failed to initialize new beacon chain: {:?}", e))?,
})
}
}
fn genesis_block<T: EthSpec>(genesis_state: &BeaconState<T>, spec: &ChainSpec) -> BeaconBlock<T> {
let mut genesis_block = BeaconBlock::empty(&spec);
genesis_block.state_root = genesis_state.canonical_root();
genesis_block
}
/// Builds a genesis state as defined by the Eth2 interop procedure (see below).
///
/// Reference:
/// https://github.com/ethereum/eth2.0-pm/tree/6e41fcf383ebeb5125938850d8e9b4e9888389b4/interop/mocked_start
fn interop_genesis_state<T: EthSpec>(
keypairs: &[Keypair],
genesis_time: u64,
spec: &ChainSpec,
) -> Result<BeaconState<T>, String> {
let eth1_block_hash = Hash256::from_slice(&[0x42; 32]);
let eth1_timestamp = 2_u64.pow(40);
let amount = spec.max_effective_balance;
let withdrawal_credentials = |pubkey: &PublicKey| {
let mut credentials = hash(&pubkey.as_ssz_bytes());
credentials[0] = spec.bls_withdrawal_prefix_byte;
Hash256::from_slice(&credentials)
};
let datas = keypairs
.into_par_iter()
.map(|keypair| {
let mut data = DepositData {
withdrawal_credentials: withdrawal_credentials(&keypair.pk),
pubkey: keypair.pk.clone().into(),
amount,
signature: Signature::empty_signature().into(),
};
let domain = spec.get_domain(
spec.genesis_slot.epoch(T::slots_per_epoch()),
Domain::Deposit,
&Fork::default(),
);
data.signature = Signature::new(&data.signed_root()[..], domain, &keypair.sk).into();
data
})
.collect::<Vec<_>>();
let deposit_root_leaves = datas
.par_iter()
.map(|data| Hash256::from_slice(&data.tree_hash_root()))
.collect::<Vec<_>>();
let mut proofs = vec![];
let depth = spec.deposit_contract_tree_depth as usize;
let mut tree = MerkleTree::create(&[], depth);
for (i, deposit_leaf) in deposit_root_leaves.iter().enumerate() {
if let Err(_) = tree.push_leaf(*deposit_leaf, depth) {
return Err(String::from("Failed to push leaf"));
}
let (_, mut proof) = tree.generate_proof(i, depth);
proof.push(Hash256::from_slice(&int_to_bytes32(i + 1)));
assert_eq!(
proof.len(),
depth + 1,
"Deposit proof should be correct len"
);
proofs.push(proof);
}
let deposits = datas
.into_par_iter()
.zip(proofs.into_par_iter())
.map(|(data, proof)| (data, proof.into()))
.map(|(data, proof)| Deposit { proof, data })
.collect::<Vec<_>>();
let mut state =
initialize_beacon_state_from_eth1(eth1_block_hash, eth1_timestamp, deposits, spec)
.map_err(|e| format!("Unable to initialize genesis state: {:?}", e))?;
state.genesis_time = genesis_time;
// Invalid all the caches after all the manual state surgery.
state.drop_all_caches();
Ok(state)
}
/// Returns `int` as little-endian bytes with a length of 32.
fn int_to_bytes32(int: usize) -> Vec<u8> {
let mut vec = int.to_le_bytes().to_vec();
vec.resize(32, 0);
vec
}
/// Returns the system time, mod 30 minutes.
///
/// Used for easily creating testnets.
fn recent_genesis_time(minutes: u64) -> u64 {
let now = SystemTime::now()
.duration_since(SystemTime::UNIX_EPOCH)
.unwrap()
.as_secs();
let secs_after_last_period = now.checked_rem(minutes * 60).unwrap_or(0);
now - secs_after_last_period
}
#[cfg(test)]
mod test {
use super::*;
use types | {
let file = File::open(file.clone())
.map_err(|e| format!("Unable to open JSON genesis state file {:?}: {:?}", file, e))?;
let genesis_state = serde_json::from_reader(file)
.map_err(|e| format!("Unable to parse JSON genesis state file: {:?}", e))?;
Ok(Self::from_genesis_state(genesis_state, spec, log))
} | identifier_body |
networking.rs | {
pub graph: u64, // graph identifier
pub channel: u64, // index of channel
pub source: u64, // index of worker sending message
pub target: u64, // index of worker receiving message
pub length: u64, // number of bytes in message
}
impl MessageHeader {
// returns a header when there is enough supporting data
fn try_read(bytes: &mut &[u8]) -> Option<MessageHeader> {
if bytes.len() > size_of::<MessageHeader>() {
// capture original in case we need to rewind
let original = *bytes;
// unclear what order struct initializers run in, so ...
let graph = bytes.read_u64::<LittleEndian>().unwrap();
let channel = bytes.read_u64::<LittleEndian>().unwrap();
let source = bytes.read_u64::<LittleEndian>().unwrap();
let target = bytes.read_u64::<LittleEndian>().unwrap();
let length = bytes.read_u64::<LittleEndian>().unwrap();
if bytes.len() >= length as usize {
Some(MessageHeader {
graph: graph,
channel: channel,
source: source,
target: target,
length: length,
})
}
else {
// rewind the reader
*bytes = original;
None
}
}
else { None }
}
fn write_to<W: Write>(&self, writer: &mut W) -> Result<()> {
try!(writer.write_u64::<LittleEndian>(self.graph));
try!(writer.write_u64::<LittleEndian>(self.channel));
try!(writer.write_u64::<LittleEndian>(self.source));
try!(writer.write_u64::<LittleEndian>(self.target));
try!(writer.write_u64::<LittleEndian>(self.length));
Ok(())
}
}
// // structure in charge of receiving data from a Reader, for example the network
// struct BinaryReceiver<R: Read> {
// reader: R, // the generic reader
// buffer: Vec<u8>, // current working buffer
// double: Vec<u8>, // second working buffer
// staging: Vec<u8>, // 1 << 20 of buffer to read into
// targets: Switchboard<(Sender<Vec<u8>>, Receiver<Vec<u8>>)>,
// }
//
// impl<R: Read> BinaryReceiver<R> {
// fn new(reader: R, channels: Receiver<((u64, u64, u64), (Sender<Vec<u8>>, Receiver<Vec<u8>>))>) -> BinaryReceiver<R> {
// BinaryReceiver {
// reader: reader,
// buffer: Vec::new(),
// double: Vec::new(),
// staging: vec![0u8; 1 << 20],
// targets: Switchboard::new(channels),
// }
// }
//
// fn recv_loop(&mut self) {
// loop {
//
// // attempt to read some more bytes into our buffer
// // TODO : We read in to self.staging because extending a Vec<u8> is hard without
// // TODO : using set_len, which is unsafe.
// // TODO : Could consider optimizing for the self.buffer.len() == 0 case, swapping
// // TODO : self.staging with self.buffer, rather than using write_all.
// let read = self.reader.read(&mut self.staging[..]).unwrap_or(0);
// self.buffer.write_all(&self.staging[..read]).unwrap(); // <-- shouldn't fail
//
// {
// // get a view of available bytes
// let mut slice = &self.buffer[..];
//
// while let Some(header) = MessageHeader::try_read(&mut slice) {
//
// let h_len = header.length as usize; // length in bytes
// let target = self.targets.ensure(header.target, header.graph, header.channel);
// let mut buffer = target.1.try_recv().unwrap_or(Vec::new());
//
// buffer.clear();
// buffer.write_all(&slice[..h_len]).unwrap();
// slice = &slice[h_len..];
//
// target.0.send(buffer).unwrap();
// }
//
// // TODO: way inefficient... =/ Fix! :D
// // if slice.len() < self.buffer.len() {
// self.double.clear();
// self.double.write_all(slice).unwrap();
// // }
// }
//
// // if self.double.len() > 0 {
// mem::swap(&mut self.buffer, &mut self.double);
// // self.double.clear();
// // }
// }
// }
// }
// structure in charge of receiving data from a Reader, for example the network
struct BinaryReceiver<R: Read> {
reader: R, // the generic reader
buffer: Vec<u8>, // current working buffer
length: usize,
targets: Switchboard<(Sender<Vec<u8>>, Receiver<Vec<u8>>)>,
}
impl<R: Read> BinaryReceiver<R> {
fn new(reader: R, channels: Receiver<((u64, u64, u64), (Sender<Vec<u8>>, Receiver<Vec<u8>>))>) -> BinaryReceiver<R> {
BinaryReceiver {
reader: reader,
buffer: vec![0u8; 1 << 20],
length: 0,
targets: Switchboard::new(channels),
}
}
fn recv_loop(&mut self) {
loop {
// if we've mostly filled our buffer and still can't read a whole message from it,
// we'll need more space / to read more at once. let's double the buffer!
if self.length >= self.buffer.len() / 2 {
self.buffer.extend(::std::iter::repeat(0u8).take(self.length));
}
// attempt to read some more bytes into our buffer
let read = self.reader.read(&mut self.buffer[self.length..]).unwrap_or(0);
self.length += read;
let remaining = {
let mut slice = &self.buffer[..self.length];
while let Some(header) = MessageHeader::try_read(&mut slice) {
let h_len = header.length as usize; // length in bytes
let target = &mut self.targets.ensure(header.target, header.graph, header.channel).0;
target.send(slice[..h_len].to_vec()).unwrap();
slice = &slice[h_len..];
}
slice.len()
};
// we consumed bytes, must shift to beginning.
// this should optimize to copy_overlapping;
// would just do that if it weren't unsafe =/
if remaining < self.length {
for index in 0..remaining {
self.buffer[index] = self.buffer[index + self.length - remaining];
}
self.length = remaining;
}
}
}
}
// structure in charge of sending data to a Writer, for example the network
struct BinarySender<W: Write> {
id: u64, // destination process
writer: W,
sources: Receiver<(MessageHeader, Vec<u8>)>,
returns: Switchboard<Sender<Vec<u8>>>,
}
impl<W: Write> BinarySender<W> {
fn new(id: u64,
writer: W,
sources: Receiver<(MessageHeader, Vec<u8>)>,
channels: Receiver<((u64, u64, u64), Sender<Vec<u8>>)>) -> BinarySender<W> {
BinarySender {
id: id,
writer: writer,
sources: sources,
returns: Switchboard::new(channels),
}
}
fn send_loop(&mut self) {
let mut stash = Vec::new();
// block until data to recv
while let Ok((header, buffer)) = self.sources.recv() {
stash.push((header, buffer));
// collect any additional outstanding data to send
while let Ok((header, buffer)) = self.sources.try_recv() {
stash.push((header, buffer));
}
// println!("send loop to process {}:\tstarting", self.id);
for (mut header, mut buffer) in stash.drain_temp() {
header.length = buffer.len() as u64; // <-- is this really our job? O.o
header.write_to(&mut self.writer).unwrap();
self.writer.write_all(&buffer[..]).unwrap();
buffer.clear();
// self.returns.ensure(header.source, header.graph, header.channel).send(buffer).unwrap();
}
self.writer.flush().unwrap(); // <-- because writer is buffered
}
}
}
struct Switchboard<T:Send> {
source: Receiver<((u64, u64, u64), T)>,
buffer: Vec<Vec<Vec<Option<T>>>>,
}
impl<T:Send> Switchboard<T> {
pub fn new(source: Receiver<((u64, u64, u64), T)>) -> Switchboard<T> {
Switchboard {
source: source,
buffer: Vec::new(),
}
}
pub fn ensure(&mut self, a: u64, b: u64, c: u64) -> &mut T {
let a = a as usize;
let b = b as usize;
let c = c as usize | MessageHeader | identifier_name | |
networking.rs | // rewind the reader
*bytes = original;
None
}
}
else { None }
}
fn write_to<W: Write>(&self, writer: &mut W) -> Result<()> {
try!(writer.write_u64::<LittleEndian>(self.graph));
try!(writer.write_u64::<LittleEndian>(self.channel));
try!(writer.write_u64::<LittleEndian>(self.source));
try!(writer.write_u64::<LittleEndian>(self.target));
try!(writer.write_u64::<LittleEndian>(self.length));
Ok(())
}
}
// // structure in charge of receiving data from a Reader, for example the network
// struct BinaryReceiver<R: Read> {
// reader: R, // the generic reader
// buffer: Vec<u8>, // current working buffer
// double: Vec<u8>, // second working buffer
// staging: Vec<u8>, // 1 << 20 of buffer to read into
// targets: Switchboard<(Sender<Vec<u8>>, Receiver<Vec<u8>>)>,
// }
//
// impl<R: Read> BinaryReceiver<R> {
// fn new(reader: R, channels: Receiver<((u64, u64, u64), (Sender<Vec<u8>>, Receiver<Vec<u8>>))>) -> BinaryReceiver<R> {
// BinaryReceiver {
// reader: reader,
// buffer: Vec::new(),
// double: Vec::new(),
// staging: vec![0u8; 1 << 20],
// targets: Switchboard::new(channels),
// }
// }
//
// fn recv_loop(&mut self) {
// loop {
//
// // attempt to read some more bytes into our buffer
// // TODO : We read in to self.staging because extending a Vec<u8> is hard without
// // TODO : using set_len, which is unsafe.
// // TODO : Could consider optimizing for the self.buffer.len() == 0 case, swapping
// // TODO : self.staging with self.buffer, rather than using write_all.
// let read = self.reader.read(&mut self.staging[..]).unwrap_or(0);
// self.buffer.write_all(&self.staging[..read]).unwrap(); // <-- shouldn't fail
//
// {
// // get a view of available bytes
// let mut slice = &self.buffer[..];
//
// while let Some(header) = MessageHeader::try_read(&mut slice) {
//
// let h_len = header.length as usize; // length in bytes
// let target = self.targets.ensure(header.target, header.graph, header.channel);
// let mut buffer = target.1.try_recv().unwrap_or(Vec::new());
//
// buffer.clear();
// buffer.write_all(&slice[..h_len]).unwrap();
// slice = &slice[h_len..];
//
// target.0.send(buffer).unwrap();
// }
//
// // TODO: way inefficient... =/ Fix! :D
// // if slice.len() < self.buffer.len() {
// self.double.clear();
// self.double.write_all(slice).unwrap();
// // }
// }
//
// // if self.double.len() > 0 {
// mem::swap(&mut self.buffer, &mut self.double);
// // self.double.clear();
// // }
// }
// }
// }
// structure in charge of receiving data from a Reader, for example the network
struct BinaryReceiver<R: Read> {
reader: R, // the generic reader
buffer: Vec<u8>, // current working buffer
length: usize,
targets: Switchboard<(Sender<Vec<u8>>, Receiver<Vec<u8>>)>,
}
impl<R: Read> BinaryReceiver<R> {
fn new(reader: R, channels: Receiver<((u64, u64, u64), (Sender<Vec<u8>>, Receiver<Vec<u8>>))>) -> BinaryReceiver<R> {
BinaryReceiver {
reader: reader,
buffer: vec![0u8; 1 << 20],
length: 0,
targets: Switchboard::new(channels),
}
}
fn recv_loop(&mut self) {
loop {
// if we've mostly filled our buffer and still can't read a whole message from it, | // we'll need more space / to read more at once. let's double the buffer!
if self.length >= self.buffer.len() / 2 {
self.buffer.extend(::std::iter::repeat(0u8).take(self.length));
}
// attempt to read some more bytes into our buffer
let read = self.reader.read(&mut self.buffer[self.length..]).unwrap_or(0);
self.length += read;
let remaining = {
let mut slice = &self.buffer[..self.length];
while let Some(header) = MessageHeader::try_read(&mut slice) {
let h_len = header.length as usize; // length in bytes
let target = &mut self.targets.ensure(header.target, header.graph, header.channel).0;
target.send(slice[..h_len].to_vec()).unwrap();
slice = &slice[h_len..];
}
slice.len()
};
// we consumed bytes, must shift to beginning.
// this should optimize to copy_overlapping;
// would just do that if it weren't unsafe =/
if remaining < self.length {
for index in 0..remaining {
self.buffer[index] = self.buffer[index + self.length - remaining];
}
self.length = remaining;
}
}
}
}
// structure in charge of sending data to a Writer, for example the network
struct BinarySender<W: Write> {
id: u64, // destination process
writer: W,
sources: Receiver<(MessageHeader, Vec<u8>)>,
returns: Switchboard<Sender<Vec<u8>>>,
}
impl<W: Write> BinarySender<W> {
fn new(id: u64,
writer: W,
sources: Receiver<(MessageHeader, Vec<u8>)>,
channels: Receiver<((u64, u64, u64), Sender<Vec<u8>>)>) -> BinarySender<W> {
BinarySender {
id: id,
writer: writer,
sources: sources,
returns: Switchboard::new(channels),
}
}
fn send_loop(&mut self) {
let mut stash = Vec::new();
// block until data to recv
while let Ok((header, buffer)) = self.sources.recv() {
stash.push((header, buffer));
// collect any additional outstanding data to send
while let Ok((header, buffer)) = self.sources.try_recv() {
stash.push((header, buffer));
}
// println!("send loop to process {}:\tstarting", self.id);
for (mut header, mut buffer) in stash.drain_temp() {
header.length = buffer.len() as u64; // <-- is this really our job? O.o
header.write_to(&mut self.writer).unwrap();
self.writer.write_all(&buffer[..]).unwrap();
buffer.clear();
// self.returns.ensure(header.source, header.graph, header.channel).send(buffer).unwrap();
}
self.writer.flush().unwrap(); // <-- because writer is buffered
}
}
}
struct Switchboard<T:Send> {
source: Receiver<((u64, u64, u64), T)>,
buffer: Vec<Vec<Vec<Option<T>>>>,
}
impl<T:Send> Switchboard<T> {
pub fn new(source: Receiver<((u64, u64, u64), T)>) -> Switchboard<T> {
Switchboard {
source: source,
buffer: Vec::new(),
}
}
pub fn ensure(&mut self, a: u64, b: u64, c: u64) -> &mut T {
let a = a as usize;
let b = b as usize;
let c = c as usize;
while self.buffer.len() <= a { self.buffer.push(Vec::new()); }
while self.buffer[a].len() <= b { self.buffer[a].push(Vec::new()); }
while self.buffer[a][b].len() <= c { self.buffer[a][b].push(None); }
while let None = self.buffer[a][b][c] {
let ((x, y, z), s) = self.source.recv().unwrap();
let x = x as usize;
let y = y as usize;
let z = z as usize;
while self.buffer.len() <= x { self.buffer.push(Vec::new()); }
while self.buffer[x].len() <= y { self.buffer[x].push(Vec::new()); }
while self.buffer[x][y].len() <= z { self.buffer[x][y].push(None); }
self.buffer[x][y][z] = Some(s);
}
// we've just ensured that this is not None
self.buffer[a][b][c].as_mut().unwrap()
}
}
pub fn initialize_networking_from_file(filename: &str, my_index: u64, workers: u64) -> Result<Vec<Binary>> {
let reader = BufReader::new(try!(File::open(filename)));
let mut addresses = Vec::new();
for line in | random_line_split | |
networking.rs |
let read = self.reader.read(&mut self.buffer[self.length..]).unwrap_or(0);
self.length += read;
let remaining = {
let mut slice = &self.buffer[..self.length];
while let Some(header) = MessageHeader::try_read(&mut slice) {
let h_len = header.length as usize; // length in bytes
let target = &mut self.targets.ensure(header.target, header.graph, header.channel).0;
target.send(slice[..h_len].to_vec()).unwrap();
slice = &slice[h_len..];
}
slice.len()
};
// we consumed bytes, must shift to beginning.
// this should optimize to copy_overlapping;
// would just do that if it weren't unsafe =/
if remaining < self.length {
for index in 0..remaining {
self.buffer[index] = self.buffer[index + self.length - remaining];
}
self.length = remaining;
}
}
}
}
// structure in charge of sending data to a Writer, for example the network
struct BinarySender<W: Write> {
id: u64, // destination process
writer: W,
sources: Receiver<(MessageHeader, Vec<u8>)>,
returns: Switchboard<Sender<Vec<u8>>>,
}
impl<W: Write> BinarySender<W> {
fn new(id: u64,
writer: W,
sources: Receiver<(MessageHeader, Vec<u8>)>,
channels: Receiver<((u64, u64, u64), Sender<Vec<u8>>)>) -> BinarySender<W> {
BinarySender {
id: id,
writer: writer,
sources: sources,
returns: Switchboard::new(channels),
}
}
fn send_loop(&mut self) {
let mut stash = Vec::new();
// block until data to recv
while let Ok((header, buffer)) = self.sources.recv() {
stash.push((header, buffer));
// collect any additional outstanding data to send
while let Ok((header, buffer)) = self.sources.try_recv() {
stash.push((header, buffer));
}
// println!("send loop to process {}:\tstarting", self.id);
for (mut header, mut buffer) in stash.drain_temp() {
header.length = buffer.len() as u64; // <-- is this really our job? O.o
header.write_to(&mut self.writer).unwrap();
self.writer.write_all(&buffer[..]).unwrap();
buffer.clear();
// self.returns.ensure(header.source, header.graph, header.channel).send(buffer).unwrap();
}
self.writer.flush().unwrap(); // <-- because writer is buffered
}
}
}
struct Switchboard<T:Send> {
source: Receiver<((u64, u64, u64), T)>,
buffer: Vec<Vec<Vec<Option<T>>>>,
}
impl<T:Send> Switchboard<T> {
pub fn new(source: Receiver<((u64, u64, u64), T)>) -> Switchboard<T> {
Switchboard {
source: source,
buffer: Vec::new(),
}
}
pub fn ensure(&mut self, a: u64, b: u64, c: u64) -> &mut T {
let a = a as usize;
let b = b as usize;
let c = c as usize;
while self.buffer.len() <= a { self.buffer.push(Vec::new()); }
while self.buffer[a].len() <= b { self.buffer[a].push(Vec::new()); }
while self.buffer[a][b].len() <= c { self.buffer[a][b].push(None); }
while let None = self.buffer[a][b][c] {
let ((x, y, z), s) = self.source.recv().unwrap();
let x = x as usize;
let y = y as usize;
let z = z as usize;
while self.buffer.len() <= x { self.buffer.push(Vec::new()); }
while self.buffer[x].len() <= y { self.buffer[x].push(Vec::new()); }
while self.buffer[x][y].len() <= z { self.buffer[x][y].push(None); }
self.buffer[x][y][z] = Some(s);
}
// we've just ensured that this is not None
self.buffer[a][b][c].as_mut().unwrap()
}
}
pub fn initialize_networking_from_file(filename: &str, my_index: u64, workers: u64) -> Result<Vec<Binary>> {
let reader = BufReader::new(try!(File::open(filename)));
let mut addresses = Vec::new();
for line in reader.lines() {
addresses.push(try!(line));
}
initialize_networking(addresses, my_index, workers)
}
pub fn initialize_networking(addresses: Vec<String>, my_index: u64, workers: u64) -> Result<Vec<Binary>> {
let processes = addresses.len() as u64;
let hosts1 = Arc::new(addresses);
let hosts2 = hosts1.clone();
let start_task = thread::spawn(move || start_connections(hosts1, my_index));
let await_task = thread::spawn(move || await_connections(hosts2, my_index));
let mut results = try!(start_task.join().unwrap());
results.push(None);
let mut to_extend = try!(await_task.join().unwrap());
results.extend(to_extend.drain_temp());
println!("worker {}:\tinitialization complete", my_index);
let mut writers = Vec::new(); // handles to the BinarySenders (to present new channels)
let mut readers = Vec::new(); // handles to the BinaryReceivers (to present new channels)
let mut senders = Vec::new(); // destinations for serialized data (to send serialized data)
// for each process, if a stream exists (i.e. not local) ...
for index in (0..results.len()) {
if let Some(stream) = results[index].take() {
let (writer_channels_s, writer_channels_r) = channel();
let (reader_channels_s, reader_channels_r) = channel();
let (sender_channels_s, sender_channels_r) = channel();
writers.push(writer_channels_s); //
readers.push(reader_channels_s); //
senders.push(sender_channels_s); //
let mut sender = BinarySender::new(index as u64,
BufWriter::with_capacity(1 << 20, stream.try_clone().unwrap()),
sender_channels_r,
writer_channels_r);
let mut recver = BinaryReceiver::new(stream.try_clone().unwrap(), reader_channels_r);
// start senders and receivers associated with this stream
thread::Builder::new().name(format!("send thread {}", index))
.spawn(move || sender.send_loop())
.unwrap();
thread::Builder::new().name(format!("recv thread {}", index))
.spawn(move || recver.recv_loop())
.unwrap();
}
}
let proc_comms = Process::new_vector(workers);
let mut results = Vec::new();
for (index, proc_comm) in proc_comms.into_iter().enumerate() {
results.push(Binary {
inner: proc_comm,
index: my_index * workers + index as u64,
peers: workers * processes,
graph: 0, // TODO : Fix this
allocated: 0,
writers: writers.clone(),
readers: readers.clone(),
senders: senders.clone(),
});
}
return Ok(results);
}
// result contains connections [0, my_index - 1].
fn start_connections(addresses: Arc<Vec<String>>, my_index: u64) -> Result<Vec<Option<TcpStream>>> {
let mut results: Vec<_> = (0..my_index).map(|_| None).collect();
for index in (0..my_index) {
let mut connected = false;
while !connected {
match TcpStream::connect(&addresses[index as usize][..]) {
Ok(mut stream) => {
try!(stream.write_u64::<LittleEndian>(my_index));
results[index as usize] = Some(stream);
println!("worker {}:\tconnection to worker {}", my_index, index);
connected = true;
},
Err(error) => {
println!("worker {}:\terror connecting to worker {}: {}; retrying", my_index, index, error);
sleep_ms(1000);
},
}
}
}
return Ok(results);
}
// result contains connections [my_index + 1, addresses.len() - 1].
fn await_connections(addresses: Arc<Vec<String>>, my_index: u64) -> Result<Vec<Option<TcpStream>>> | {
let mut results: Vec<_> = (0..(addresses.len() - my_index as usize - 1)).map(|_| None).collect();
let listener = try!(TcpListener::bind(&addresses[my_index as usize][..]));
for _ in (my_index as usize + 1 .. addresses.len()) {
let mut stream = try!(listener.accept()).0;
let identifier = try!(stream.read_u64::<LittleEndian>()) as usize;
results[identifier - my_index as usize - 1] = Some(stream);
println!("worker {}:\tconnection from worker {}", my_index, identifier);
}
return Ok(results);
} | identifier_body | |
traits.rs | Color::On as Black, prelude::*, primitives::{Line, PrimitiveStyle},
///};
///use epd_waveshare::{epd4in2::*, prelude::*};
///#
///# let expectations = [];
///# let mut spi = spi::Mock::new(&expectations);
///# let expectations = [];
///# let cs_pin = pin::Mock::new(&expectations);
///# let busy_in = pin::Mock::new(&expectations);
///# let dc = pin::Mock::new(&expectations);
///# let rst = pin::Mock::new(&expectations);
///# let mut delay = delay::MockNoop::new();
///
///// Setup EPD
///let mut epd = Epd4in2::new(&mut spi, cs_pin, busy_in, dc, rst, &mut delay, None)?;
///
///// Use display graphics from embedded-graphics
///let mut display = Display4in2::default();
///
///// Use embedded graphics for drawing a line
///
///let _ = Line::new(Point::new(0, 120), Point::new(0, 295))
/// .into_styled(PrimitiveStyle::with_stroke(Color::Black, 1))
/// .draw(&mut display);
///
/// // Display updated frame
///epd.update_frame(&mut spi, &display.buffer(), &mut delay)?;
///epd.display_frame(&mut spi, &mut delay)?;
///
///// Set the EPD to sleep
///epd.sleep(&mut spi, &mut delay)?;
///# Ok(())
///# }
///```
pub trait WaveshareDisplay<SPI, CS, BUSY, DC, RST, DELAY>
where
SPI: Write<u8>,
CS: OutputPin,
BUSY: InputPin,
DC: OutputPin,
RST: OutputPin,
DELAY: DelayUs<u32>,
{
/// The Color Type used by the Display
type DisplayColor;
/// Creates a new driver from a SPI peripheral, CS Pin, Busy InputPin, DC
///
/// `delay_us` is the number of us the idle loop should sleep on.
/// Setting it to 0 implies busy waiting.
/// Setting it to None means a default value is used.
///
/// This already initialises the device.
fn new(
spi: &mut SPI,
cs: CS,
busy: BUSY,
dc: DC,
rst: RST,
delay: &mut DELAY,
delay_us: Option<u32>,
) -> Result<Self, SPI::Error>
where
Self: Sized;
/// Let the device enter deep-sleep mode to save power.
///
/// The deep sleep mode returns to standby with a hardware reset.
fn sleep(&mut self, spi: &mut SPI, delay: &mut DELAY) -> Result<(), SPI::Error>;
/// Wakes the device up from sleep
///
/// Also reintialises the device if necessary.
fn wake_up(&mut self, spi: &mut SPI, delay: &mut DELAY) -> Result<(), SPI::Error>;
/// Sets the backgroundcolor for various commands like [clear_frame](WaveshareDisplay::clear_frame)
fn set_background_color(&mut self, color: Self::DisplayColor);
/// Get current background color
fn background_color(&self) -> &Self::DisplayColor;
/// Get the width of the display
fn width(&self) -> u32;
/// Get the height of the display
fn height(&self) -> u32;
/// Transmit a full frame to the SRAM of the EPD
fn update_frame(
&mut self,
spi: &mut SPI,
buffer: &[u8],
delay: &mut DELAY,
) -> Result<(), SPI::Error>;
/// Transmits partial data to the SRAM of the EPD
///
/// (x,y) is the top left corner
///
/// BUFFER needs to be of size: width / 8 * height !
#[allow(clippy::too_many_arguments)]
fn update_partial_frame(
&mut self,
spi: &mut SPI,
delay: &mut DELAY,
buffer: &[u8],
x: u32,
y: u32,
width: u32,
height: u32,
) -> Result<(), SPI::Error>;
/// Displays the frame data from SRAM
///
/// This function waits until the device isn`t busy anymore
fn display_frame(&mut self, spi: &mut SPI, delay: &mut DELAY) -> Result<(), SPI::Error>;
/// Provide a combined update&display and save some time (skipping a busy check in between)
fn update_and_display_frame(
&mut self,
spi: &mut SPI,
buffer: &[u8],
delay: &mut DELAY,
) -> Result<(), SPI::Error>;
/// Clears the frame buffer on the EPD with the declared background color
///
/// The background color can be changed with [`WaveshareDisplay::set_background_color`]
fn clear_frame(&mut self, spi: &mut SPI, delay: &mut DELAY) -> Result<(), SPI::Error>;
/// Trait for using various Waveforms from different LUTs
/// E.g. for partial refreshes
///
/// A full refresh is needed after a certain amount of quick refreshes!
///
/// WARNING: Quick Refresh might lead to ghosting-effects/problems with your display. Especially for the 4.2in Display!
///
/// If None is used the old value will be loaded on the LUTs once more
fn set_lut(
&mut self,
spi: &mut SPI,
delay: &mut DELAY,
refresh_rate: Option<RefreshLut>,
) -> Result<(), SPI::Error>;
/// Wait until the display has stopped processing data
///
/// You can call this to make sure a frame is displayed before goin further
fn wait_until_idle(&mut self, spi: &mut SPI, delay: &mut DELAY) -> Result<(), SPI::Error>;
}
/// Allows quick refresh support for displays that support it; lets you send both
/// old and new frame data to support this.
///
/// When using the quick refresh look-up table, the display must receive separate display
/// buffer data marked as old, and new. This is used to determine which pixels need to change,
/// and how they will change. This isn't required when using full refreshes.
///
/// (todo: Example ommitted due to CI failures.)
/// Example:
///```rust, no_run
///# use embedded_hal_mock::*;
///# fn main() -> Result<(), MockError> {
///# use embedded_graphics::{
///# pixelcolor::BinaryColor::On as Black, prelude::*, primitives::{Line, PrimitiveStyle},
///# };
///# use epd_waveshare::{epd4in2::*, prelude::*};
///# use epd_waveshare::graphics::VarDisplay;
///#
///# let expectations = [];
///# let mut spi = spi::Mock::new(&expectations);
///# let expectations = [];
///# let cs_pin = pin::Mock::new(&expectations);
///# let busy_in = pin::Mock::new(&expectations);
///# let dc = pin::Mock::new(&expectations);
///# let rst = pin::Mock::new(&expectations);
///# let mut delay = delay::MockNoop::new();
///#
///# // Setup EPD
///# let mut epd = Epd4in2::new(&mut spi, cs_pin, busy_in, dc, rst, &mut delay, None)?;
///let (x, y, frame_width, frame_height) = (20, 40, 80,80);
///
///let mut buffer = [DEFAULT_BACKGROUND_COLOR.get_byte_value(); 80 / 8 * 80];
///let mut display = VarDisplay::new(frame_width, frame_height, &mut buffer,false).unwrap();
///
///epd.update_partial_old_frame(&mut spi, &mut delay, display.buffer(), x, y, frame_width, frame_height)
/// .ok();
///
///display.clear(Color::White).ok();
///// Execute drawing commands here.
///
///epd.update_partial_new_frame(&mut spi, &mut delay, display.buffer(), x, y, frame_width, frame_height)
/// .ok();
///# Ok(())
///# }
///```
pub trait QuickRefresh<SPI, CS, BUSY, DC, RST, DELAY>
where
SPI: Write<u8>,
CS: OutputPin,
BUSY: InputPin,
DC: OutputPin,
RST: OutputPin,
DELAY: DelayUs<u32>,
{
/// Updates the old frame.
fn update_old_frame(
&mut self,
spi: &mut SPI,
buffer: &[u8],
delay: &mut DELAY,
) -> Result<(), SPI::Error>;
/// Updates the new frame.
fn update_new_frame(
&mut self,
spi: &mut SPI,
buffer: &[u8],
delay: &mut DELAY,
) -> Result<(), SPI::Error>; |
/// Displays the new frame
fn display_new_frame(&mut self, spi: &mut SPI, _delay: &mut DELAY) -> Result<(), SPI::Error>;
| random_line_split | |
traits.rs | {
/// The "normal" full Lookuptable for the Refresh-Sequence
#[default]
Full,
/// The quick LUT where not the full refresh sequence is followed.
/// This might lead to some
Quick,
}
pub(crate) trait InternalWiAdditions<SPI, CS, BUSY, DC, RST, DELAY>
where
SPI: Write<u8>,
CS: OutputPin,
BUSY: InputPin,
DC: OutputPin,
RST: OutputPin,
DELAY: DelayUs<u32>,
{
/// This initialises the EPD and powers it up
///
/// This function is already called from
/// - [new()](WaveshareDisplay::new())
/// - [`wake_up`]
///
///
/// This function calls [reset](WaveshareDisplay::reset),
/// so you don't need to call reset your self when trying to wake your device up
/// after setting it to sleep.
fn init(&mut self, spi: &mut SPI, delay: &mut DELAY) -> Result<(), SPI::Error>;
}
/// Functions to interact with three color panels
pub trait WaveshareThreeColorDisplay<SPI, CS, BUSY, DC, RST, DELAY>:
WaveshareDisplay<SPI, CS, BUSY, DC, RST, DELAY>
where
SPI: Write<u8>,
CS: OutputPin,
BUSY: InputPin,
DC: OutputPin,
RST: OutputPin,
DELAY: DelayUs<u32>,
{
/// Transmit data to the SRAM of the EPD
///
/// Updates both the black and the secondary color layers
fn update_color_frame(
&mut self,
spi: &mut SPI,
delay: &mut DELAY,
black: &[u8],
chromatic: &[u8],
) -> Result<(), SPI::Error>;
/// Update only the black/white data of the display.
///
/// This must be finished by calling `update_chromatic_frame`.
fn update_achromatic_frame(
&mut self,
spi: &mut SPI,
delay: &mut DELAY,
black: &[u8],
) -> Result<(), SPI::Error>;
/// Update only the chromatic data of the display.
///
/// This should be preceded by a call to `update_achromatic_frame`.
/// This data takes precedence over the black/white data.
fn update_chromatic_frame(
&mut self,
spi: &mut SPI,
delay: &mut DELAY,
chromatic: &[u8],
) -> Result<(), SPI::Error>;
}
/// All the functions to interact with the EPDs
///
/// This trait includes all public functions to use the EPDs
///
/// # Example
///
///```rust, no_run
///# use embedded_hal_mock::*;
///# fn main() -> Result<(), MockError> {
///use embedded_graphics::{
/// pixelcolor::BinaryColor::On as Black, prelude::*, primitives::{Line, PrimitiveStyle},
///};
///use epd_waveshare::{epd4in2::*, prelude::*};
///#
///# let expectations = [];
///# let mut spi = spi::Mock::new(&expectations);
///# let expectations = [];
///# let cs_pin = pin::Mock::new(&expectations);
///# let busy_in = pin::Mock::new(&expectations);
///# let dc = pin::Mock::new(&expectations);
///# let rst = pin::Mock::new(&expectations);
///# let mut delay = delay::MockNoop::new();
///
///// Setup EPD
///let mut epd = Epd4in2::new(&mut spi, cs_pin, busy_in, dc, rst, &mut delay, None)?;
///
///// Use display graphics from embedded-graphics
///let mut display = Display4in2::default();
///
///// Use embedded graphics for drawing a line
///
///let _ = Line::new(Point::new(0, 120), Point::new(0, 295))
/// .into_styled(PrimitiveStyle::with_stroke(Color::Black, 1))
/// .draw(&mut display);
///
/// // Display updated frame
///epd.update_frame(&mut spi, &display.buffer(), &mut delay)?;
///epd.display_frame(&mut spi, &mut delay)?;
///
///// Set the EPD to sleep
///epd.sleep(&mut spi, &mut delay)?;
///# Ok(())
///# }
///```
pub trait WaveshareDisplay<SPI, CS, BUSY, DC, RST, DELAY>
where
SPI: Write<u8>,
CS: OutputPin,
BUSY: InputPin,
DC: OutputPin,
RST: OutputPin,
DELAY: DelayUs<u32>,
{
/// The Color Type used by the Display
type DisplayColor;
/// Creates a new driver from a SPI peripheral, CS Pin, Busy InputPin, DC
///
/// `delay_us` is the number of us the idle loop should sleep on.
/// Setting it to 0 implies busy waiting.
/// Setting it to None means a default value is used.
///
/// This already initialises the device.
fn new(
spi: &mut SPI,
cs: CS,
busy: BUSY,
dc: DC,
rst: RST,
delay: &mut DELAY,
delay_us: Option<u32>,
) -> Result<Self, SPI::Error>
where
Self: Sized;
/// Let the device enter deep-sleep mode to save power.
///
/// The deep sleep mode returns to standby with a hardware reset.
fn sleep(&mut self, spi: &mut SPI, delay: &mut DELAY) -> Result<(), SPI::Error>;
/// Wakes the device up from sleep
///
/// Also reintialises the device if necessary.
fn wake_up(&mut self, spi: &mut SPI, delay: &mut DELAY) -> Result<(), SPI::Error>;
/// Sets the backgroundcolor for various commands like [clear_frame](WaveshareDisplay::clear_frame)
fn set_background_color(&mut self, color: Self::DisplayColor);
/// Get current background color
fn background_color(&self) -> &Self::DisplayColor;
/// Get the width of the display
fn width(&self) -> u32;
/// Get the height of the display
fn height(&self) -> u32;
/// Transmit a full frame to the SRAM of the EPD
fn update_frame(
&mut self,
spi: &mut SPI,
buffer: &[u8],
delay: &mut DELAY,
) -> Result<(), SPI::Error>;
/// Transmits partial data to the SRAM of the EPD
///
/// (x,y) is the top left corner
///
/// BUFFER needs to be of size: width / 8 * height !
#[allow(clippy::too_many_arguments)]
fn update_partial_frame(
&mut self,
spi: &mut SPI,
delay: &mut DELAY,
buffer: &[u8],
x: u32,
y: u32,
width: u32,
height: u32,
) -> Result<(), SPI::Error>;
/// Displays the frame data from SRAM
///
/// This function waits until the device isn`t busy anymore
fn display_frame(&mut self, spi: &mut SPI, delay: &mut DELAY) -> Result<(), SPI::Error>;
/// Provide a combined update&display and save some time (skipping a busy check in between)
fn update_and_display_frame(
&mut self,
spi: &mut SPI,
buffer: &[u8],
delay: &mut DELAY,
) -> Result<(), SPI::Error>;
/// Clears the frame buffer on the EPD with the declared background color
///
/// The background color can be changed with [`WaveshareDisplay::set_background_color`]
fn clear_frame(&mut self, spi: &mut SPI, delay: &mut DELAY) -> Result<(), SPI::Error>;
/// Trait for using various Waveforms from different LUTs
/// E.g. for partial refreshes
///
/// A full refresh is needed after a certain amount of quick refreshes!
///
/// WARNING: Quick Refresh might lead to ghosting-effects/problems with your display. Especially for the 4.2in Display!
///
/// If None is used the old value will be loaded on the LUTs once more
fn set_lut(
&mut self,
spi: &mut SPI,
delay: &mut DELAY,
refresh_rate: Option<RefreshLut>,
) -> Result<(), SPI::Error>;
/// Wait until the display has stopped processing data
///
/// You can call this to make sure a frame is displayed before goin further
fn wait_until_idle(&mut self, spi: &mut SPI, delay: &mut DELAY) -> Result<(), SPI::Error>;
}
/// Allows quick refresh support for displays that support it; lets you send both
/// old and new frame data to support this.
///
/// When using the quick refresh look-up table, the display must receive separate display
/// buffer data marked as old, | RefreshLut | identifier_name | |
base.go | err != nil {
c.TraceServerError(errors.Annotatef(err, "failed to begin database transaction"))
}
c.model = model
c.orm = model.Orm()
c.pagingResult = models.NewQueryParams()
}
// Finish ends transaction
func (c *SeaterController) Finish() {
defer c.execDeferrers()
err := c.endTransaction()
if err != nil {
c.TraceServerError(errors.Annotatef(err, "failed to end transaction"))
}
}
// M returns the model object
func (c *SeaterController) M() *models.SeaterModel {
return c.model
}
type deferrer func() error
func (c *SeaterController) deferExec(f deferrer) {
c.deferrers = append(c.deferrers, f)
}
// Code sets the response status
func (c *SeaterController) Code(code int) {
c.Ctx.Output.SetStatus(code)
}
func (c *SeaterController) execDeferrers() {
var err error
for i := len(c.deferrers) - 1; i >= 0; i-- {
err = c.deferrers[i]()
if err != nil {
c.errs = append(c.errs, err)
}
}
}
func (c *SeaterController) traceJSONAbort(err error, code int, args ...string) {
c.jsonAbort(code, args...)
}
// jsonAbort trace and abort error
func (c *SeaterController) jsonAbort(code int, args ...string) {
defer c.execDeferrers()
c.Header("Content-Type", "application/json; charset=utf-8")
var msg string
if len(args) == 0 || args[0] == "" {
switch code {
case 400:
msg = "Bad Request"
case 401:
msg = "Unauthorized"
case 404:
msg = "Resource Not Found"
case 409:
msg = "Conflict"
case 500:
msg = "Server Error"
default:
msg = ""
}
} else {
msg = args[0]
}
c.addError(fmt.Errorf(msg))
err := c.endTransaction()
if err != nil {
code = 500
msg = "Server Error"
}
body, err := json.Marshal(msgBody{Msg: msg})
if err != nil {
c.CustomAbort(500, `{"msg": "Unknown Error"}`)
}
c.CustomAbort(code, string(body))
}
// BadRequestf returns bad request response with formatted message
func (c *SeaterController) BadRequestf(format string, args ...interface{}) {
c.TraceBadRequestf(nil, format, args...)
}
// TraceBadRequestf traces error and returns bad request response with formatted message
func (c *SeaterController) TraceBadRequestf(err error, format string, args ...interface{}) {
msg := fmt.Sprintf(format, args...)
c.traceJSONAbort(nil, 400, msg)
}
// TraceServerError traces error and returns server error
func (c *SeaterController) | (err error) {
c.traceJSONAbort(err, 500)
}
// Forbiddenf returns forbidden response with formatted message
func (c *SeaterController) Forbiddenf(format string, args ...interface{}) {
c.TraceForbiddenf(nil, format, args...)
}
// TraceForbiddenf traces error and returns forbidden response with formatted message
func (c *SeaterController) TraceForbiddenf(err error, format string, args ...interface{}) {
msg := fmt.Sprintf(format, args...)
c.traceJSONAbort(err, 403, msg)
}
// NotFoundf returns not found response with formatted message
func (c *SeaterController) NotFoundf(format string, args ...interface{}) {
c.TraceNotFoundf(nil, format, args...)
}
// TraceNotFoundf traces error and returns not found response with formatted message
func (c *SeaterController) TraceNotFoundf(err error, format string, args ...interface{}) {
msg := fmt.Sprintf(format, args...)
c.traceJSONAbort(err, 404, msg)
}
// Conflictf returns conflict response with formatted message
func (c *SeaterController) Conflictf(format string, args ...interface{}) {
c.TraceConflictf(nil, format, args...)
}
// TraceConflictf traces error and returns conflict response with formatted message
func (c *SeaterController) TraceConflictf(err error, format string, args ...interface{}) {
msg := fmt.Sprintf(format, args...)
c.traceJSONAbort(err, 409, msg)
}
// Unauthorizedf returns authorized response with formatted message
func (c *SeaterController) Unauthorizedf(format string, args ...interface{}) {
c.TraceUnauthorizedf(nil, format, args...)
}
// TraceUnauthorizedf traces error and returns authorized reponse with formatted message
func (c *SeaterController) TraceUnauthorizedf(err error, format string, args ...interface{}) {
msg := fmt.Sprintf(format, args...)
c.traceJSONAbort(err, 401, msg)
}
func (c *SeaterController) addError(err error) {
c.errs = append(c.errs, err)
}
// jsonResp serves json response
func (c *SeaterController) jsonResp(data interface{}) {
if obj, ok := data.(*simplejson.Json); ok {
data = obj.Interface()
}
paging := c.getPagingResult()
if paging != nil {
bytes, err := json.Marshal(data)
if err != nil {
err = errors.Annotatef(err, "failed to marshal resp interface")
c.TraceServerError(err)
}
j, err := simplejson.NewJson(bytes)
if err != nil {
err = errors.Annotatef(err, "failed to unmarshal resp bytes")
c.TraceServerError(err)
}
j.Set("paging", paging)
data = j.Interface()
}
c.Data["json"] = data
c.ServeJSON()
}
// OK response 200 OK with json data
func (c *SeaterController) OK(data interface{}) {
c.Code(200)
c.jsonResp(data)
}
// Accepted response an asynchronous resource
func (c *SeaterController) Accepted(data interface{}) {
c.Code(202)
c.jsonResp(data)
}
// Created response an asynchronous resource
func (c *SeaterController) Created(data interface{}) {
c.Code(201)
c.jsonResp(data)
}
// NoContent responses with code 204
func (c *SeaterController) NoContent(code ...int) {
if len(code) > 0 {
c.Code(code[0])
} else {
c.Code(204)
}
c.Ctx.Output.Body([]byte(""))
}
// Validate validates with json schema
func (c *SeaterController) Validate(sche string, document ...string) {
var doc string
if len(document) > 0 {
doc = document[0]
} else {
doc = string(c.Ctx.Input.RequestBody)
if len(doc) == 0 {
c.BadRequestf("request body is empty")
}
}
_, err := simplejson.NewJson([]byte(doc))
if err != nil {
c.BadRequestf("invalid json format")
}
result, err := schema.Validate(sche, doc)
if err != nil {
c.TraceServerError(errors.Annotatef(err, "invalid schema"))
}
if !result.Valid() {
s := "invalid parameters:\n"
var e interface{}
for _, err := range result.Errors() {
s += fmt.Sprintf("%s\n", err)
e = err
}
c.BadRequestf("%s", e)
}
}
func (c *SeaterController) getInt64(key string, defs ...int64) (v int64, ok bool) {
if strv := c.Ctx.Input.Query(key); strv != "" {
val, err := strconv.ParseInt(strv, 10, 64)
if err != nil {
c.BadRequestf("invalid int64 argument %s: %s", key, strv)
}
return val, true
}
return
}
func (c *SeaterController) getString(key string, defs ...string) (v string, ok bool) {
if v = c.Ctx.Input.Query(key); v != "" {
return v, true
}
if len(defs) > 0 {
return defs[0], false
}
return "", false
}
// getTime return input as an time and the existence of the input
func (c *SeaterController) getTime(key string, defs ...time.Time) (v time.Time, ok bool) {
if strv := c.Ctx.Input.Query(key); strv != "" {
val, err := time.Parse(TimestampLayout, strv)
if err != nil {
c.BadRequestf("invalid time argument %s: %s", key, strv)
}
return val, true
} else if len(defs) > 0 {
v = defs[0]
return
}
return
}
// Header get or set a header if value is provided
func (c *SeaterController) Header(key string, value ...interface{}) string {
if len(value) == 0 {
return c.Ctx.Input.Header(key)
}
retval := fmt.Sprintf("%v", value[0])
c.Ctx.Output.Header(key, retval)
return retval
}
func (c *SeaterController) endTransaction() (err error) {
if c.model == nil {
return
}
rollback | TraceServerError | identifier_name |
base.go | != nil {
c.TraceServerError(errors.Annotatef(err, "failed to begin database transaction"))
}
c.model = model
c.orm = model.Orm()
c.pagingResult = models.NewQueryParams()
}
// Finish ends transaction
func (c *SeaterController) Finish() {
defer c.execDeferrers()
err := c.endTransaction()
if err != nil {
c.TraceServerError(errors.Annotatef(err, "failed to end transaction"))
}
}
// M returns the model object
func (c *SeaterController) M() *models.SeaterModel {
return c.model
}
type deferrer func() error
func (c *SeaterController) deferExec(f deferrer) {
c.deferrers = append(c.deferrers, f)
}
// Code sets the response status
func (c *SeaterController) Code(code int) {
c.Ctx.Output.SetStatus(code)
}
func (c *SeaterController) execDeferrers() {
var err error
for i := len(c.deferrers) - 1; i >= 0; i-- {
err = c.deferrers[i]()
if err != nil {
c.errs = append(c.errs, err)
}
}
}
func (c *SeaterController) traceJSONAbort(err error, code int, args ...string) {
c.jsonAbort(code, args...)
}
// jsonAbort trace and abort error
func (c *SeaterController) jsonAbort(code int, args ...string) {
defer c.execDeferrers()
c.Header("Content-Type", "application/json; charset=utf-8")
var msg string
if len(args) == 0 || args[0] == "" {
switch code {
case 400:
msg = "Bad Request"
case 401:
msg = "Unauthorized"
case 404:
msg = "Resource Not Found"
case 409:
msg = "Conflict"
case 500:
msg = "Server Error"
default:
msg = ""
}
} else {
msg = args[0]
}
c.addError(fmt.Errorf(msg))
err := c.endTransaction()
if err != nil {
code = 500
msg = "Server Error"
}
body, err := json.Marshal(msgBody{Msg: msg})
if err != nil {
c.CustomAbort(500, `{"msg": "Unknown Error"}`)
}
c.CustomAbort(code, string(body))
}
// BadRequestf returns bad request response with formatted message
func (c *SeaterController) BadRequestf(format string, args ...interface{}) {
c.TraceBadRequestf(nil, format, args...)
}
// TraceBadRequestf traces error and returns bad request response with formatted message
func (c *SeaterController) TraceBadRequestf(err error, format string, args ...interface{}) {
msg := fmt.Sprintf(format, args...)
c.traceJSONAbort(nil, 400, msg)
}
// TraceServerError traces error and returns server error
func (c *SeaterController) TraceServerError(err error) {
c.traceJSONAbort(err, 500)
}
// Forbiddenf returns forbidden response with formatted message
func (c *SeaterController) Forbiddenf(format string, args ...interface{}) {
c.TraceForbiddenf(nil, format, args...)
}
// TraceForbiddenf traces error and returns forbidden response with formatted message
func (c *SeaterController) TraceForbiddenf(err error, format string, args ...interface{}) {
msg := fmt.Sprintf(format, args...)
c.traceJSONAbort(err, 403, msg)
}
// NotFoundf returns not found response with formatted message
func (c *SeaterController) NotFoundf(format string, args ...interface{}) {
c.TraceNotFoundf(nil, format, args...)
}
// TraceNotFoundf traces error and returns not found response with formatted message
func (c *SeaterController) TraceNotFoundf(err error, format string, args ...interface{}) {
msg := fmt.Sprintf(format, args...)
c.traceJSONAbort(err, 404, msg)
}
// Conflictf returns conflict response with formatted message
func (c *SeaterController) Conflictf(format string, args ...interface{}) {
c.TraceConflictf(nil, format, args...)
}
// TraceConflictf traces error and returns conflict response with formatted message
func (c *SeaterController) TraceConflictf(err error, format string, args ...interface{}) {
msg := fmt.Sprintf(format, args...)
c.traceJSONAbort(err, 409, msg)
}
// Unauthorizedf returns authorized response with formatted message
func (c *SeaterController) Unauthorizedf(format string, args ...interface{}) {
c.TraceUnauthorizedf(nil, format, args...)
}
// TraceUnauthorizedf traces error and returns authorized reponse with formatted message
func (c *SeaterController) TraceUnauthorizedf(err error, format string, args ...interface{}) {
msg := fmt.Sprintf(format, args...)
c.traceJSONAbort(err, 401, msg)
}
func (c *SeaterController) addError(err error) {
c.errs = append(c.errs, err)
}
// jsonResp serves json response
func (c *SeaterController) jsonResp(data interface{}) {
if obj, ok := data.(*simplejson.Json); ok {
data = obj.Interface()
}
paging := c.getPagingResult()
if paging != nil {
bytes, err := json.Marshal(data)
if err != nil {
err = errors.Annotatef(err, "failed to marshal resp interface")
c.TraceServerError(err)
}
j, err := simplejson.NewJson(bytes)
if err != nil {
err = errors.Annotatef(err, "failed to unmarshal resp bytes")
c.TraceServerError(err)
}
j.Set("paging", paging)
data = j.Interface()
}
c.Data["json"] = data
c.ServeJSON()
}
// OK response 200 OK with json data
func (c *SeaterController) OK(data interface{}) |
// Accepted response an asynchronous resource
func (c *SeaterController) Accepted(data interface{}) {
c.Code(202)
c.jsonResp(data)
}
// Created response an asynchronous resource
func (c *SeaterController) Created(data interface{}) {
c.Code(201)
c.jsonResp(data)
}
// NoContent responses with code 204
func (c *SeaterController) NoContent(code ...int) {
if len(code) > 0 {
c.Code(code[0])
} else {
c.Code(204)
}
c.Ctx.Output.Body([]byte(""))
}
// Validate validates with json schema
func (c *SeaterController) Validate(sche string, document ...string) {
var doc string
if len(document) > 0 {
doc = document[0]
} else {
doc = string(c.Ctx.Input.RequestBody)
if len(doc) == 0 {
c.BadRequestf("request body is empty")
}
}
_, err := simplejson.NewJson([]byte(doc))
if err != nil {
c.BadRequestf("invalid json format")
}
result, err := schema.Validate(sche, doc)
if err != nil {
c.TraceServerError(errors.Annotatef(err, "invalid schema"))
}
if !result.Valid() {
s := "invalid parameters:\n"
var e interface{}
for _, err := range result.Errors() {
s += fmt.Sprintf("%s\n", err)
e = err
}
c.BadRequestf("%s", e)
}
}
func (c *SeaterController) getInt64(key string, defs ...int64) (v int64, ok bool) {
if strv := c.Ctx.Input.Query(key); strv != "" {
val, err := strconv.ParseInt(strv, 10, 64)
if err != nil {
c.BadRequestf("invalid int64 argument %s: %s", key, strv)
}
return val, true
}
return
}
func (c *SeaterController) getString(key string, defs ...string) (v string, ok bool) {
if v = c.Ctx.Input.Query(key); v != "" {
return v, true
}
if len(defs) > 0 {
return defs[0], false
}
return "", false
}
// getTime return input as an time and the existence of the input
func (c *SeaterController) getTime(key string, defs ...time.Time) (v time.Time, ok bool) {
if strv := c.Ctx.Input.Query(key); strv != "" {
val, err := time.Parse(TimestampLayout, strv)
if err != nil {
c.BadRequestf("invalid time argument %s: %s", key, strv)
}
return val, true
} else if len(defs) > 0 {
v = defs[0]
return
}
return
}
// Header get or set a header if value is provided
func (c *SeaterController) Header(key string, value ...interface{}) string {
if len(value) == 0 {
return c.Ctx.Input.Header(key)
}
retval := fmt.Sprintf("%v", value[0])
c.Ctx.Output.Header(key, retval)
return retval
}
func (c *SeaterController) endTransaction() (err error) {
if c.model == nil {
return
}
rollback | {
c.Code(200)
c.jsonResp(data)
} | identifier_body |
base.go | err != nil {
c.TraceServerError(errors.Annotatef(err, "failed to begin database transaction"))
}
c.model = model
c.orm = model.Orm()
c.pagingResult = models.NewQueryParams()
}
// Finish ends transaction
func (c *SeaterController) Finish() {
defer c.execDeferrers()
err := c.endTransaction()
if err != nil {
c.TraceServerError(errors.Annotatef(err, "failed to end transaction"))
}
}
// M returns the model object
func (c *SeaterController) M() *models.SeaterModel {
return c.model
}
type deferrer func() error
func (c *SeaterController) deferExec(f deferrer) {
c.deferrers = append(c.deferrers, f)
}
// Code sets the response status
func (c *SeaterController) Code(code int) {
c.Ctx.Output.SetStatus(code)
}
func (c *SeaterController) execDeferrers() {
var err error
for i := len(c.deferrers) - 1; i >= 0; i-- {
err = c.deferrers[i]()
if err != nil {
c.errs = append(c.errs, err)
}
}
}
func (c *SeaterController) traceJSONAbort(err error, code int, args ...string) {
c.jsonAbort(code, args...)
}
// jsonAbort trace and abort error
func (c *SeaterController) jsonAbort(code int, args ...string) {
defer c.execDeferrers()
c.Header("Content-Type", "application/json; charset=utf-8")
var msg string
if len(args) == 0 || args[0] == "" {
switch code {
case 400:
msg = "Bad Request"
case 401:
msg = "Unauthorized"
case 404:
msg = "Resource Not Found"
case 409:
msg = "Conflict"
case 500:
msg = "Server Error"
default:
msg = ""
}
} else {
msg = args[0]
}
c.addError(fmt.Errorf(msg))
err := c.endTransaction()
if err != nil {
code = 500
msg = "Server Error"
}
body, err := json.Marshal(msgBody{Msg: msg})
if err != nil {
c.CustomAbort(500, `{"msg": "Unknown Error"}`)
}
c.CustomAbort(code, string(body))
}
// BadRequestf returns bad request response with formatted message
func (c *SeaterController) BadRequestf(format string, args ...interface{}) {
c.TraceBadRequestf(nil, format, args...)
}
// TraceBadRequestf traces error and returns bad request response with formatted message
func (c *SeaterController) TraceBadRequestf(err error, format string, args ...interface{}) {
msg := fmt.Sprintf(format, args...)
c.traceJSONAbort(nil, 400, msg)
}
// TraceServerError traces error and returns server error
func (c *SeaterController) TraceServerError(err error) {
c.traceJSONAbort(err, 500)
}
// Forbiddenf returns forbidden response with formatted message
func (c *SeaterController) Forbiddenf(format string, args ...interface{}) {
c.TraceForbiddenf(nil, format, args...)
}
// TraceForbiddenf traces error and returns forbidden response with formatted message
func (c *SeaterController) TraceForbiddenf(err error, format string, args ...interface{}) {
msg := fmt.Sprintf(format, args...)
c.traceJSONAbort(err, 403, msg)
}
// NotFoundf returns not found response with formatted message
func (c *SeaterController) NotFoundf(format string, args ...interface{}) {
c.TraceNotFoundf(nil, format, args...)
}
// TraceNotFoundf traces error and returns not found response with formatted message
func (c *SeaterController) TraceNotFoundf(err error, format string, args ...interface{}) {
msg := fmt.Sprintf(format, args...)
c.traceJSONAbort(err, 404, msg)
}
// Conflictf returns conflict response with formatted message
func (c *SeaterController) Conflictf(format string, args ...interface{}) {
c.TraceConflictf(nil, format, args...)
}
// TraceConflictf traces error and returns conflict response with formatted message
func (c *SeaterController) TraceConflictf(err error, format string, args ...interface{}) {
msg := fmt.Sprintf(format, args...)
c.traceJSONAbort(err, 409, msg)
}
// Unauthorizedf returns authorized response with formatted message
func (c *SeaterController) Unauthorizedf(format string, args ...interface{}) {
c.TraceUnauthorizedf(nil, format, args...)
}
// TraceUnauthorizedf traces error and returns authorized reponse with formatted message
func (c *SeaterController) TraceUnauthorizedf(err error, format string, args ...interface{}) {
msg := fmt.Sprintf(format, args...)
c.traceJSONAbort(err, 401, msg)
}
func (c *SeaterController) addError(err error) {
c.errs = append(c.errs, err)
}
// jsonResp serves json response
func (c *SeaterController) jsonResp(data interface{}) {
if obj, ok := data.(*simplejson.Json); ok {
data = obj.Interface()
}
paging := c.getPagingResult()
if paging != nil {
bytes, err := json.Marshal(data)
if err != nil {
err = errors.Annotatef(err, "failed to marshal resp interface")
c.TraceServerError(err)
}
j, err := simplejson.NewJson(bytes)
if err != nil {
err = errors.Annotatef(err, "failed to unmarshal resp bytes")
c.TraceServerError(err)
}
j.Set("paging", paging)
data = j.Interface()
}
c.Data["json"] = data
c.ServeJSON()
}
// OK response 200 OK with json data
func (c *SeaterController) OK(data interface{}) {
c.Code(200)
c.jsonResp(data)
}
// Accepted response an asynchronous resource
func (c *SeaterController) Accepted(data interface{}) {
c.Code(202)
c.jsonResp(data)
}
// Created response an asynchronous resource
func (c *SeaterController) Created(data interface{}) {
c.Code(201)
c.jsonResp(data)
}
// NoContent responses with code 204
func (c *SeaterController) NoContent(code ...int) {
if len(code) > 0 {
c.Code(code[0])
} else {
c.Code(204)
}
c.Ctx.Output.Body([]byte(""))
}
// Validate validates with json schema
func (c *SeaterController) Validate(sche string, document ...string) {
var doc string
if len(document) > 0 {
doc = document[0]
} else {
doc = string(c.Ctx.Input.RequestBody)
if len(doc) == 0 {
c.BadRequestf("request body is empty")
}
}
_, err := simplejson.NewJson([]byte(doc))
if err != nil {
c.BadRequestf("invalid json format")
}
result, err := schema.Validate(sche, doc)
if err != nil {
c.TraceServerError(errors.Annotatef(err, "invalid schema"))
}
if !result.Valid() { | s += fmt.Sprintf("%s\n", err)
e = err
}
c.BadRequestf("%s", e)
}
}
func (c *SeaterController) getInt64(key string, defs ...int64) (v int64, ok bool) {
if strv := c.Ctx.Input.Query(key); strv != "" {
val, err := strconv.ParseInt(strv, 10, 64)
if err != nil {
c.BadRequestf("invalid int64 argument %s: %s", key, strv)
}
return val, true
}
return
}
func (c *SeaterController) getString(key string, defs ...string) (v string, ok bool) {
if v = c.Ctx.Input.Query(key); v != "" {
return v, true
}
if len(defs) > 0 {
return defs[0], false
}
return "", false
}
// getTime return input as an time and the existence of the input
func (c *SeaterController) getTime(key string, defs ...time.Time) (v time.Time, ok bool) {
if strv := c.Ctx.Input.Query(key); strv != "" {
val, err := time.Parse(TimestampLayout, strv)
if err != nil {
c.BadRequestf("invalid time argument %s: %s", key, strv)
}
return val, true
} else if len(defs) > 0 {
v = defs[0]
return
}
return
}
// Header get or set a header if value is provided
func (c *SeaterController) Header(key string, value ...interface{}) string {
if len(value) == 0 {
return c.Ctx.Input.Header(key)
}
retval := fmt.Sprintf("%v", value[0])
c.Ctx.Output.Header(key, retval)
return retval
}
func (c *SeaterController) endTransaction() (err error) {
if c.model == nil {
return
}
rollback := | s := "invalid parameters:\n"
var e interface{}
for _, err := range result.Errors() { | random_line_split |
base.go | Transaction()
if err != nil {
code = 500
msg = "Server Error"
}
body, err := json.Marshal(msgBody{Msg: msg})
if err != nil {
c.CustomAbort(500, `{"msg": "Unknown Error"}`)
}
c.CustomAbort(code, string(body))
}
// BadRequestf returns bad request response with formatted message
func (c *SeaterController) BadRequestf(format string, args ...interface{}) {
c.TraceBadRequestf(nil, format, args...)
}
// TraceBadRequestf traces error and returns bad request response with formatted message
func (c *SeaterController) TraceBadRequestf(err error, format string, args ...interface{}) {
msg := fmt.Sprintf(format, args...)
c.traceJSONAbort(nil, 400, msg)
}
// TraceServerError traces error and returns server error
func (c *SeaterController) TraceServerError(err error) {
c.traceJSONAbort(err, 500)
}
// Forbiddenf returns forbidden response with formatted message
func (c *SeaterController) Forbiddenf(format string, args ...interface{}) {
c.TraceForbiddenf(nil, format, args...)
}
// TraceForbiddenf traces error and returns forbidden response with formatted message
func (c *SeaterController) TraceForbiddenf(err error, format string, args ...interface{}) {
msg := fmt.Sprintf(format, args...)
c.traceJSONAbort(err, 403, msg)
}
// NotFoundf returns not found response with formatted message
func (c *SeaterController) NotFoundf(format string, args ...interface{}) {
c.TraceNotFoundf(nil, format, args...)
}
// TraceNotFoundf traces error and returns not found response with formatted message
func (c *SeaterController) TraceNotFoundf(err error, format string, args ...interface{}) {
msg := fmt.Sprintf(format, args...)
c.traceJSONAbort(err, 404, msg)
}
// Conflictf returns conflict response with formatted message
func (c *SeaterController) Conflictf(format string, args ...interface{}) {
c.TraceConflictf(nil, format, args...)
}
// TraceConflictf traces error and returns conflict response with formatted message
func (c *SeaterController) TraceConflictf(err error, format string, args ...interface{}) {
msg := fmt.Sprintf(format, args...)
c.traceJSONAbort(err, 409, msg)
}
// Unauthorizedf returns authorized response with formatted message
func (c *SeaterController) Unauthorizedf(format string, args ...interface{}) {
c.TraceUnauthorizedf(nil, format, args...)
}
// TraceUnauthorizedf traces error and returns authorized reponse with formatted message
func (c *SeaterController) TraceUnauthorizedf(err error, format string, args ...interface{}) {
msg := fmt.Sprintf(format, args...)
c.traceJSONAbort(err, 401, msg)
}
func (c *SeaterController) addError(err error) {
c.errs = append(c.errs, err)
}
// jsonResp serves json response
func (c *SeaterController) jsonResp(data interface{}) {
if obj, ok := data.(*simplejson.Json); ok {
data = obj.Interface()
}
paging := c.getPagingResult()
if paging != nil {
bytes, err := json.Marshal(data)
if err != nil {
err = errors.Annotatef(err, "failed to marshal resp interface")
c.TraceServerError(err)
}
j, err := simplejson.NewJson(bytes)
if err != nil {
err = errors.Annotatef(err, "failed to unmarshal resp bytes")
c.TraceServerError(err)
}
j.Set("paging", paging)
data = j.Interface()
}
c.Data["json"] = data
c.ServeJSON()
}
// OK response 200 OK with json data
func (c *SeaterController) OK(data interface{}) {
c.Code(200)
c.jsonResp(data)
}
// Accepted response an asynchronous resource
func (c *SeaterController) Accepted(data interface{}) {
c.Code(202)
c.jsonResp(data)
}
// Created response an asynchronous resource
func (c *SeaterController) Created(data interface{}) {
c.Code(201)
c.jsonResp(data)
}
// NoContent responses with code 204
func (c *SeaterController) NoContent(code ...int) {
if len(code) > 0 {
c.Code(code[0])
} else {
c.Code(204)
}
c.Ctx.Output.Body([]byte(""))
}
// Validate validates with json schema
func (c *SeaterController) Validate(sche string, document ...string) {
var doc string
if len(document) > 0 {
doc = document[0]
} else {
doc = string(c.Ctx.Input.RequestBody)
if len(doc) == 0 {
c.BadRequestf("request body is empty")
}
}
_, err := simplejson.NewJson([]byte(doc))
if err != nil {
c.BadRequestf("invalid json format")
}
result, err := schema.Validate(sche, doc)
if err != nil {
c.TraceServerError(errors.Annotatef(err, "invalid schema"))
}
if !result.Valid() {
s := "invalid parameters:\n"
var e interface{}
for _, err := range result.Errors() {
s += fmt.Sprintf("%s\n", err)
e = err
}
c.BadRequestf("%s", e)
}
}
func (c *SeaterController) getInt64(key string, defs ...int64) (v int64, ok bool) {
if strv := c.Ctx.Input.Query(key); strv != "" {
val, err := strconv.ParseInt(strv, 10, 64)
if err != nil {
c.BadRequestf("invalid int64 argument %s: %s", key, strv)
}
return val, true
}
return
}
func (c *SeaterController) getString(key string, defs ...string) (v string, ok bool) {
if v = c.Ctx.Input.Query(key); v != "" {
return v, true
}
if len(defs) > 0 {
return defs[0], false
}
return "", false
}
// getTime return input as an time and the existence of the input
func (c *SeaterController) getTime(key string, defs ...time.Time) (v time.Time, ok bool) {
if strv := c.Ctx.Input.Query(key); strv != "" {
val, err := time.Parse(TimestampLayout, strv)
if err != nil {
c.BadRequestf("invalid time argument %s: %s", key, strv)
}
return val, true
} else if len(defs) > 0 {
v = defs[0]
return
}
return
}
// Header get or set a header if value is provided
func (c *SeaterController) Header(key string, value ...interface{}) string {
if len(value) == 0 {
return c.Ctx.Input.Header(key)
}
retval := fmt.Sprintf("%v", value[0])
c.Ctx.Output.Header(key, retval)
return retval
}
func (c *SeaterController) endTransaction() (err error) {
if c.model == nil {
return
}
rollback := false
if len(c.errs) > 0 {
rollback = true
}
if rollback {
err = c.model.Rollback()
if err != nil {
panic(fmt.Sprintf("failed to rollback transaction: %v", err))
}
} else {
err = c.model.Commit()
if err != nil {
panic(fmt.Sprintf("failed to commit transaction: %v", err))
}
}
return
}
func (c *SeaterController) parseJSONBody(keys ...string) (v *simplejson.Json) {
v, err := simplejson.NewJson(c.Ctx.Input.RequestBody)
if err != nil {
c.BadRequestf("invalid json format")
}
if len(keys) > 0 {
for _, k := range keys {
_, ok := v.CheckGet(k)
if !ok {
c.BadRequestf("Bad Request")
} else {
v = v.Get(k)
}
}
}
return
}
// UnmarshalJSONBody unmarshal request json body
func (c *SeaterController) UnmarshalJSONBody(v interface{}, keys ...string) {
var bytes []byte
var err error
if len(keys) > 0 {
j := c.parseJSONBody(keys...)
bytes, err = j.MarshalJSON()
if err != nil {
err = errors.Annotate(err, "failed to unmarshal json")
c.TraceServerError(err)
}
} else {
bytes = c.Ctx.Input.RequestBody
}
err = json.Unmarshal(bytes, v)
if err != nil {
c.BadRequestf("invalid request body")
}
}
// UserInfo defines session value
type UserInfo struct {
UserID int64 `json:"user_id"`
Openid string `json:"openid"`
SessionKey string `json:"session_key"`
}
func (c *SeaterController) getURLParam(key string) string {
return c.Ctx.Input.Param(key)
}
func (c *SeaterController) getURLID(name string) int64 {
id, err := strconv.ParseInt(c.getURLParam(name), 10, 64)
if err != nil | {
c.BadRequestf("invalid id")
} | conditional_block | |
app.js | �数判断
let tms_a = parseInt(ALM_AFTER / INTV_MIN);
let tms_b = parseInt(ALM_BETWEEN / INTV_MIN) || 2;
let real_tms = sensor.exc_count-1; // 因为上面 ALM_AFTER 是累积, 所以 exc_count-1 再判断
if(real_tms < tms_a) {
return false;
}
else {
real_tms -= tms_a;
return (real_tms%tms_b == 0)? true: false;
}
}
*/
}
/**
* 计算 或 设置时长
*/
function setDuration(check, exc_count) {
exc_count = exc_count || 0;
if(check.src == 'read') {
check.duration = (exc_count<1)? 0: (exc_count-1)*INTV_MIN;
}
else {
check.duration = (exc_count<2)? 0: (exc_count-2)*INTV_MIN; // -2:隐含减去用于确认的第一个 ALM_AFTER 分钟
}
//TODO: read 时,来自 point
}
/**
* 格式化下时长
*/
function formatDuration(minutes) {
minutes = minutes || 0;
if(minutes < 120 ) {
return minutes+ '分钟';
}
else {
let hr = parseInt(minutes/60);
let min = minutes%60;
let day = 0;
if( hr >= 24) {
day = parseInt(hr/24);
hr = hr%24;
}
return (day? day+ '天':'')+ (hr+ '小时')+ (min? min+ '分钟': '');
}
}
/**
* 发送报警
*/
function sendAlarm(sensor, check, users, blocks) {
let firstline = makeFirstline(sensor, check);
let curtime = new Date().formatTime('yyyy-MM-dd hh:mm');
let level = check.level;
let level_name = level==0? '通知': (level==1? '预警': '报警');
let level_color = level==0? '#16A765': (level==1? '#FFAD46': '#F83A22');
let durat_str = formatDuration(check.duration);
let lastline = (check.duration? '已持续约 '+ durat_str: '')+ (check.duration&&level? ', ': '') +(level? '请及时处理!':'');
console.error('SendAlarm', curtime, sensor.name, check);
let to_mobiles = [];
users.forEach( function(user) {
if(!user.openid) return;
// 传感器是否在用户的组?
if( user.groups.indexOf(sensor.group) == -1) {
return;
}
// 用户是否屏蔽该报警?
let sid = sensor.id, uid = user.id;
let until = blocks&&blocks[sid]&&blocks[sid][uid]? new Date(blocks[sid][uid]): null;
if(until && until> new Date() && level> 0) { //复位通知不能屏蔽
console.log('user <'+ uid+ '> blocks <'+ sid+ '> until', until.toLocaleString());
return;
}
to_mobiles.push(user.mobile);
// 发送微信消息(本地)
/*
var templateId = 'zOVAEaSZVEHPdRE1KM2uQJy5wPfuWibHSU6NmXpIqF8';
var url = WX_MSG_URL+ `?sid=${sid}&uid=${uid}`;
var data = {
"first": {
"value": firstline,
"color":"#173177"
},
"keyword1":{
"value": level_name,
"color": level_color
},
"keyword2": {
"value": curtime,
"color":"#173177"
},
"keyword3": {
"value": sensor.loc,
"color":"#173177"
},
"keyword4": {
"value": '何 138****1234',
"color":"#173177"
},
"keyword5": {
"value": 'n/a',
"color":"#173177"
},
"remark":{
"value": lastline,
"color":"#173177"
}
};
wechatApi.sendTemplate(user.openid, templateId, url, data, function(err, result) {
//console.log('sendTemplate err+result:', err, result)
})
*/
});
let mobiles = to_mobiles.join(',');
let json = {
"token":"20185523",
"mobile": mobiles,
"firstline": firstline,
"level_name": level_name,
"level_color": level_color,
"curtime": curtime,
"location": sensor.loc,
"contact": "何 138****2345",
"workorder": "n/a",
"lastline": lastline
};
postRequest(KPI_SERVICE, json, function(err, resp, body) {
if(err) console.log('Remote:', err);
else console.log('Remote:', resp.statusCode, body);
});
}
/**
* 生成报警主提示
*/
function makeFirstline(sensor, check) {
//eg. sensor.name+ '温度超标!数值:'+ check.value+ ' 标准:'+ check.standard,
let r = sensor.name+ ' ';
switch (check.measure) {
case 'temp':
r += '温度' + (!check.is_reset? '超标': '复位')+ '!';
r += '数值:' + check.value+ ' 标准:'+ check.standard;
break;
case 'offline':
r += '离线' + (!check.is_reset? '报警': '复位')+ '!';
r += !check.is_reset? '标准:'+ check.standard: '';
break;
}
return r;
}
/**
* 统计 tm 离线传感器并报警
*/
function alarmTmOfflineSensors(sensors, users, blocks) {
let offlines = sensors.filter(function(s) {
return s.tm_offline;
});
let new_off_count = {};
let new_off_snrs = groupBy(offlines, 'group');
for( group in new_off_snrs) {
new_off_count[group] = {};
new_off_count[group]['num'] = new_off_snrs[group].length;
new_off_count[group]['min_off'] = new_off_snrs[group][0].point.min_off;
}
console.log('old_off_count', tm_off_count);
console.log('new_off_count', new_off_count);
// 查找离线复位
for( group in tm_off_count) {
let od = tm_off_count[group];
let nw = new_off_count[group];
if(!nw) {
let sensor = {
name: '共计'+ od.num+ '个传感器',
group: group,
id: group+ '_group_offline',
loc: groupName(group)+'-全库范围',
};
let check = {
level: 0,
duration: od.min_off+ INTV_MIN,
measure: 'offline',
is_reset: true,
standard: '',
};
sendAlarm(sensor, check, users, blocks);
}
}
// 查找离线报警
for( group in new_off_count) {
let od = tm_off_count[group];
let nw = new_off_count[group];
let exc_count = od ? od['exc_count']+1 : 1;
let min_between = (exc_count-1)*INTV_MIN;
nw['exc_count'] = exc_count;
// console.log('min_between', min_between);
if(!od || min_between%ALM_BETWEEN==0) {
let sensor = {
name: '共计'+ nw.num+ '个传感器',
group: group,
id: group+ '_group_offline',
loc: groupName(group)+'-全库范围',
};
let check = {
level: 2,
duration: nw.min_off,
measure: 'offline',
is_reset: false,
standard: '数据停止更新',
};
sendAlarm(sensor, check, users, blocks);
}
}
// 保存新离线数量
tm_off_count = new_off_count;
}
/**
* 清理过期屏蔽项
*
* 注意: 要全部清除屏蔽项时, 不能清空文件内容, 请手动把内容设置为 {}
*/
function cleanBlocks() {
let count = 0;
let blocks = JSON.parse(fs.readFileSync(BLOCK_FILE));
console.log('------ cleanBlocks start ------------');
for( sid in blocks) {
for( uid in blocks[sid]) {
let until = new Date(blocks[sid][uid]);
let del = '';
if( until< new Date()) {
delete blocks[sid][uid];
count++; del = '(deleted)';
}
console.log(sid, uid, until.toLocaleString(), del);
} | }
fs.writeFileSync(BLOCK_FILE, JSON.stringify(blocks)); | random_line_split | |
app.js | {
if( timer) {
clearInterval(timer); timer = 0;
res? res.send('Timer stop.'): null;
}
else {
res? res.send('No timer.'): null;
}
}
/**
* 自动启动首次(分钟需5的倍数)
*/
function autoStart(enable) {
if(enable) {
let m1 = new Date().getMinutes();
let m2 = Math.ceil(m1/5)*5;
let df = (m2-m1)*1000* MIN_SECS;
console.log('AutoStart waits:', m2-m1, 'min to start.');
if(!timer) {
setTimeout(startTimer, df);
}
else {
console.log('Error: please stop previous timer.');
}
}
}
autoStart(MIN_SECS == 60);
/**
* 传感器检查流程
*/
function checkSensors() {
console.log('------ CheckSensors start '+ new Date().toLocaleString()+ '-------');
// 找出在线传感器
let onlines = sensors.filter(function(s) {
return !s.offline;
});
// 批量查询传感器
sensorBatchValues(onlines, function(err, sensors) {
if(err) { return; }
let blocks = JSON.parse(fs.readFileSync(BLOCK_FILE));
sensors.forEach( function(sensor) {
let pt = sensor.point, kpi = kpis[sensor.kpi];
let ck = checkKpi(pt, kpi);
let ex = exceedCount(sensor, ck);
if( ex) {
sendAlarm(sensor, ck, users, blocks);
}
console.log(sensor.name+':', ck.value, 'min-off:', sensor.point.min_off,
ck.exceed? 'exceed:'+ck.exceed+ ' count: ' +sensor.exc_count[ck.level] : '');
});
alarmTmOfflineSensors(sensors, users, blocks);
});
}
/**
* 读取传感器值(批量)
*/
function sensorBatchValues(sensors, callback) {
let qs = [];
sensors.forEach( function(sensor) {
let m = parseTagMesa(sensor.tag_mesa);
qs.push(`SELECT last(value) FROM ${m.measurement} WHERE ${m.mesa_where}`)
});
let q = qs.join('; ');
//console.log('batch q:', q);
influx.query(q).then(result => {
// 注: 结果实际不符合 json 格式, 可用 stringify 转
//console.log('result', JSON.stringify(result));
if( sensors.length == 1) {
result = [result]; // 一个传感器时,必须包装成二维
}
sensors.forEach( function(sensor, idx) {
if(sensor.test) {
sensor.point = {
time: out_date==0? new Date(): new Date(out_date),
last: out_value,
}
}
else {
sensor.point = (result&&result.length>idx&&result[idx].length>0)
? result[idx][0]: {};
}
});
callback(null, sensors);
}).catch(err => {
console.error('sensorBatchValues err:', err);
callback(err);
});
}
/**
* 读取传感器值(单次)
*/
function sensorValue(sensor, callback) {
let m = parseTagMesa(sensor.tag_mesa);
let q = `SELECT last(value) FROM ${m.measurement} WHERE ${m.mesa_where}`;
//console.log('q:', q);
influx.query(q).then(result => {
callback(null, (result&&result.length>0)? result[0]: {});
}).catch(err => {
console.error('SensorValue err:', err);
callback(err);
});
}
/**
* 计算 KPI
*/
function checkKpi(point, kpi) {
let ck = {
src: kpi.src || 'calc',
measure: kpi.measure || 'temp',
reset_alarm: kpi.reset_alarm,
exceed: 0,
level: 0,
standard: '',
is_reset: false,
};
//TODO: 来自 point 的其他值
let value = point.last;
let time = new Date(point.time.getTime());
//console.log('time', time.toLocaleString());
point.min_off = minOffsetNow(time);
ck.tm_offline = point.min_off > 12;
if( !kpi.src && !ck.tm_offline) {
if( kpi.ra_above && value > kpi.ra_above) {
ck.exceed = 2;
ck.standard = kpi.ar_below+ '~'+ kpi.ra_above;
}
else if( kpi.ag_above && value > kpi.ag_above) {
ck.exceed = 1;
ck.standard = kpi.ga_below+ '~'+ kpi.ag_above;
}
else if( kpi.ar_below && value < kpi.ar_below) {
ck.exceed = -2;
ck.standard = kpi.ar_below+ '~'+ kpi.ra_above;
}
else if( kpi.ga_below && value < kpi.ga_below) {
ck.exceed = -1;
ck.standard = kpi.ga_below+ '~'+ kpi.ag_above;
}
}
else if( kpi.src == 'read' && !ck.tm_offline) {
ck.exceed = value>0? 2: 0;
ck.standard = kpi.standard;
}
ck.level = Math.abs(ck.exceed);
ck.value = value;
ck.time = time;
return ck;
}
/**
* 统计超限次数
*
* @return true/false 是否累积到报警程度
*/
function exceedCount(sensor, check) {
let lvl = check.level;
sensor.exc_count = sensor.exc_count || {};
sensor.exc_count[lvl] = sensor.exc_count[lvl] || 0;
sensor.tm_offline = check.tm_offline;
if(sensor.tm_offline) {
return false; // tm_offline 数量太多, 1.不单独报警; 2.不累积计数
}
if( sensor.exc_count[lvl] == 0 && check.exceed == 0) {
return false;
}
// 0 0
// 1 1
// 累加计数
if(check.exceed == 0 || check.level >= ALARM_LEVEL) {
for(let lo=1; lo<=lvl; lo++)
sensor.exc_count[lo]++;
setDuration(check, sensor.exc_count[lvl]);
}
else {
return false; //不计数
}
if( check.exceed == 0) {
// 复位情况:也允许发送报警
check.is_reset = (sensor.exc_count[lvl] > 1);
for(let lo=1; lo<=lvl; lo++)
sensor.exc_count[lo] = 0;
return ( check.is_reset && check.reset_alarm)? true: false;
}
else {
// 超限情况:从计数判断
let tms_a = (check.src=='read')? 0: parseInt(ALM_AFTER / INTV_MIN); // ALM_AFTER 转为次数, read 的不用等待
let tms_b = parseInt(ALM_BETWEEN / INTV_MIN) || 6;
let real_tms = sensor.exc_count[lvl]-1; // 因为上面 ALM_AFTER 是累积, 所以 exc_count-1 | {
return false;
}
else {
real_tms -= tms_a;
return (real_tms%tms_b == 0)? true: false;
}
}
/*
if( check.exceed === 0 || check.level < ALARM_LEVEL) {
check.is_reset = (sensor.exc_count > 0);
setDuration(check, sensor.exc_count);
sensor.exc_count = 0;
// 复位也允许发送报警
return ( check.is_reset && check.reset_alarm)? true: false;
}
else {
// 累加计数
if(!sensor.exc_count) {
sensor.exc_count = 1;
}
else {
sensor.exc_count++;
}
setDuration(check, sensor.exc_count);
// 计数判断
let tms_a = parseInt(ALM_AFTER / INTV_MIN);
let tms_b = parseInt(ALM_BETWEEN / INTV_MIN) || 2;
let real_tms = sensor.exc_count-1; // 因为上面 ALM_AFTER 是累积, 所以 exc_count-1 再判断
if(real_tms < tms_a) {
return false;
}
else {
real_tms -= tms_a;
return (real_tms%tms_b == 0)? true: false;
}
}
*/
}
/**
* 计算 或 设置时长
*/
function setDuration(check, exc_count) {
exc_count = exc_count || 0;
if(check.src == 'read') {
check.duration = (exc_count<1)? 0: (exc_count-1)*INTV_MIN;
}
else {
check.duration = (exc_count<2)? 0: (exc_count-2)*INTV_MIN; // - | 再判断
if(real_tms < tms_a) | conditional_block |
app.js | ': '');
}
}
/**
* 发送报警
*/
function sendAlarm(sensor, check, users, blocks) {
let firstline = makeFirstline(sensor, check);
let curtime = new Date().formatTime('yyyy-MM-dd hh:mm');
let level = check.level;
let level_name = level==0? '通知': (level==1? '预警': '报警');
let level_color = level==0? '#16A765': (level==1? '#FFAD46': '#F83A22');
let durat_str = formatDuration(check.duration);
let lastline = (check.duration? '已持续约 '+ durat_str: '')+ (check.duration&&level? ', ': '') +(level? '请及时处理!':'');
console.error('SendAlarm', curtime, sensor.name, check);
let to_mobiles = [];
users.forEach( function(user) {
if(!user.openid) return;
// 传感器是否在用户的组?
if( user.groups.indexOf(sensor.group) == -1) {
return;
}
// 用户是否屏蔽该报警?
let sid = sensor.id, uid = user.id;
let until = blocks&&blocks[sid]&&blocks[sid][uid]? new Date(blocks[sid][uid]): null;
if(until && until> new Date() && level> 0) { //复位通知不能屏蔽
console.log('user <'+ uid+ '> blocks <'+ sid+ '> until', until.toLocaleString());
return;
}
to_mobiles.push(user.mobile);
// 发送微信消息(本地)
/*
var templateId = 'zOVAEaSZVEHPdRE1KM2uQJy5wPfuWibHSU6NmXpIqF8';
var url = WX_MSG_URL+ `?sid=${sid}&uid=${uid}`;
var data = {
"first": {
"value": firstline,
"color":"#173177"
},
"keyword1":{
"value": level_name,
"color": level_color
},
"keyword2": {
"value": curtime,
"color":"#173177"
},
"keyword3": {
"value": sensor.loc,
"color":"#173177"
},
"keyword4": {
"value": '何 138****1234',
"color":"#173177"
},
"keyword5": {
"value": 'n/a',
"color":"#173177"
},
"remark":{
"value": lastline,
"color":"#173177"
}
};
wechatApi.sendTemplate(user.openid, templateId, url, data, function(err, result) {
//console.log('sendTemplate err+result:', err, result)
})
*/
});
let mobiles = to_mobiles.join(',');
let json = {
"token":"20185523",
"mobile": mobiles,
"firstline": firstline,
"level_name": level_name,
"level_color": level_color,
"curtime": curtime,
"location": sensor.loc,
"contact": "何 138****2345",
"workorder": "n/a",
"lastline": lastline
};
postRequest(KPI_SERVICE, json, function(err, resp, body) {
if(err) console.log('Remote:', err);
else console.log('Remote:', resp.statusCode, body);
});
}
/**
* 生成报警主提示
*/
function makeFirstline(sensor, check) {
//eg. sensor.name+ '温度超标!数值:'+ check.value+ ' 标准:'+ check.standard,
let r = sensor.name+ ' ';
switch (check.measure) {
case 'temp':
r += '温度' + (!check.is_reset? '超标': '复位')+ '!';
r += '数值:' + check.value+ ' 标准:'+ check.standard;
break;
case 'offline':
r += '离线' + (!check.is_reset? '报警': '复位')+ '!';
r += !check.is_reset? '标准:'+ check.standard: '';
break;
}
return r;
}
/**
* 统计 tm 离线传感器并报警
*/
function alarmTmOfflineSensors(sensors, users, blocks) {
let offlines = sensors.filter(function(s) {
return s.tm_offline;
});
let new_off_count = {};
let new_off_snrs = groupBy(offlines, 'group');
for( group in new_off_snrs) {
new_off_count[group] = {};
new_off_count[group]['num'] = new_off_snrs[group].length;
new_off_count[group]['min_off'] = new_off_snrs[group][0].point.min_off;
}
console.log('old_off_count', tm_off_count);
console.log('new_off_count', new_off_count);
// 查找离线复位
for( group in tm_off_count) {
let od = tm_off_count[group];
let nw = new_off_count[group];
if(!nw) {
let sensor = {
name: '共计'+ od.num+ '个传感器',
group: group,
id: group+ '_group_offline',
loc: groupName(group)+'-全库范围',
};
let check = {
level: 0,
duration: od.min_off+ INTV_MIN,
measure: 'offline',
is_reset: true,
standard: '',
};
sendAlarm(sensor, check, users, blocks);
}
}
// 查找离线报警
for( group in new_off_count) {
let od = tm_off_count[group];
let nw = new_off_count[group];
let exc_count = od ? od['exc_count']+1 : 1;
let min_between = (exc_count-1)*INTV_MIN;
nw['exc_count'] = exc_count;
// console.log('min_between', min_between);
if(!od || min_between%ALM_BETWEEN==0) {
let sensor = {
name: '共计'+ nw.num+ '个传感器',
group: group,
id: group+ '_group_offline',
loc: groupName(group)+'-全库范围',
};
let check = {
level: 2,
duration: nw.min_off,
measure: 'offline',
is_reset: false,
standard: '数据停止更新',
};
sendAlarm(sensor, check, users, blocks);
}
}
// 保存新离线数量
tm_off_count = new_off_count;
}
/**
* 清理过期屏蔽项
*
* 注意: 要全部清除屏蔽项时, 不能清空文件内容, 请手动把内容设置为 {}
*/
function cleanBlocks() {
let count = 0;
let blocks = JSON.parse(fs.readFileSync(BLOCK_FILE));
console.log('------ cleanBlocks start ------------');
for( sid in blocks) {
for( uid in blocks[sid]) {
let until = new Date(blocks[sid][uid]);
let del = '';
if( until< new Date()) {
delete blocks[sid][uid];
count++; del = '(deleted)';
}
console.log(sid, uid, until.toLocaleString(), del);
}
}
fs.writeFileSync(BLOCK_FILE, JSON.stringify(blocks));
return count;
}
/**
* 发送 POST 请求
*/
function postRequest(url, json, callback) {
var options = {
uri: url,
method: 'POST',
json: json,
};
request(options, callback);
}
// -- routers ------------------------------------------------------
app.get('/', function (req, res) {
setTimeout(() => res.end('Hello sensor!'), Math.random() * 500);
})
app.get('/start', function (req, res) {
startTimer(res);
});
app.get('/stop', function (req, res) {
stopTimer(res);
});
/**
* 临时屏蔽报警(表单)
*/
app.get('/blockme', function (req, res) {
let sid = req.query.sid;
let uid = req.query.uid;
if(!sid || !uid) {
return res.send('错误: 参数错误!');
}
let blocks = JSON.parse(fs.readFileSync(BLOCK_FILE));
let until = blocks&&blocks[sid]&&blocks[sid][uid]? new Date(blocks[sid][uid]): null;
if(until && until > new Date()) {
res.render('blockme', {sid, uid, until: until.toLocaleString()});
}
else {
res.render('blockme', {sid, uid, until:null});
}
});
/**
* 临时屏蔽报警(提交)
*/
app.post('/blockme', function (req, res) {
let after = parseInt(req.body.after);
let sid = req.body.sid;
let uid = req.body.uid;
if(!sid || !uid || !after) {
return res.send('错误: 参 | 数错误!');
}
let until = new Date().addHours(after);
let blocks = JSON.parse(fs.readFileSync(BLOCK_F | identifier_body | |
app.js | Start(MIN_SECS == 60);
/**
* 传感器检查流程
*/
function checkSensors() {
console.log('------ CheckSensors start '+ new Date().toLocaleString()+ '-------');
// 找出在线传感器
let onlines = sensors.filter(function(s) {
return !s.offline;
});
// 批量查询传感器
sensorBatchValues(onlines, function(err, sensors) {
if(err) { return; }
let blocks = JSON.parse(fs.readFileSync(BLOCK_FILE));
sensors.forEach( function(sensor) {
let pt = sensor.point, kpi = kpis[sensor.kpi];
let ck = checkKpi(pt, kpi);
let ex = exceedCount(sensor, ck);
if( ex) {
sendAlarm(sensor, ck, users, blocks);
}
console.log(sensor.name+':', ck.value, 'min-off:', sensor.point.min_off,
ck.exceed? 'exceed:'+ck.exceed+ ' count: ' +sensor.exc_count[ck.level] : '');
});
alarmTmOfflineSensors(sensors, users, blocks);
});
}
/**
* 读取传感器值(批量)
*/
function sensorBatchValues(sensors, callback) {
let qs = [];
sensors.forEach( function(sensor) {
let m = parseTagMesa(sensor.tag_mesa);
qs.push(`SELECT last(value) FROM ${m.measurement} WHERE ${m.mesa_where}`)
});
let q = qs.join('; ');
//console.log('batch q:', q);
influx.query(q).then(result => {
// 注: 结果实际不符合 json 格式, 可用 stringify 转
//console.log('result', JSON.stringify(result));
if( sensors.length == 1) {
result = [result]; // 一个传感器时,必须包装成二维
}
sensors.forEach( function(sensor, idx) {
if(sensor.test) {
sensor.point = {
time: out_date==0? new Date(): new Date(out_date),
last: out_value,
}
}
else {
sensor.point = (result&&result.length>idx&&result[idx].length>0)
? result[idx][0]: {};
}
});
callback(null, sensors);
}).catch(err => {
console.error('sensorBatchValues err:', err);
callback(err);
});
}
/**
* 读取传感器值(单次)
*/
function sensorValue(sensor, callback) {
let m = parseTagMesa(sensor.tag_mesa);
let q = `SELECT last(value) FROM ${m.measurement} WHERE ${m.mesa_where}`;
//console.log('q:', q);
influx.query(q).then(result => {
callback(null, (result&&result.length>0)? result[0]: {});
}).catch(err => {
console.error('SensorValue err:', err);
callback(err);
});
}
/**
* 计算 KPI
*/
function checkKpi(point, kpi) {
let ck = {
src: kpi.src || 'calc',
measure: kpi.measure || 'temp',
reset_alarm: kpi.reset_alarm,
exceed: 0,
level: 0,
standard: '',
is_reset: false,
};
//TODO: 来自 point 的其他值
let value = point.last;
let time = new Date(point.time.getTime());
//console.log('time', time.toLocaleString());
point.min_off = minOffsetNow(time);
ck.tm_offline = point.min_off > 12;
if( !kpi.src && !ck.tm_offline) {
if( kpi.ra_above && value > kpi.ra_above) {
ck.exceed = 2;
ck.standard = kpi.ar_below+ '~'+ kpi.ra_above;
}
else if( kpi.ag_above && value > kpi.ag_above) {
ck.exceed = 1;
ck.standard = kpi.ga_below+ '~'+ kpi.ag_above;
}
else if( kpi.ar_below && value < kpi.ar_below) {
ck.exceed = -2;
ck.standard = kpi.ar_below+ '~'+ kpi.ra_above;
}
else if( kpi.ga_below && value < kpi.ga_below) {
ck.exceed = -1;
ck.standard = kpi.ga_below+ '~'+ kpi.ag_above;
}
}
else if( kpi.src == 'read' && !ck.tm_offline) {
ck.exceed = value>0? 2: 0;
ck.standard = kpi.standard;
}
ck.level = Math.abs(ck.exceed);
ck.value = value;
ck.time = time;
return ck;
}
/**
* 统计超限次数
*
* @return true/false 是否累积到报警程度
*/
function exceedCount(sensor, check) {
let lvl = check.level;
sensor.exc_count = sensor.exc_count || {};
sensor.exc_count[lvl] = sensor.exc_count[lvl] || 0;
sensor.tm_offline = check.tm_offline;
if(sensor.tm_offline) {
return false; // tm_offline 数量太多, 1.不单独报警; 2.不累积计数
}
if( sensor.exc_count[lvl] == 0 && check.exceed == 0) {
return false;
}
// 0 0
// 1 1
// 累加计数
if(check.exceed == 0 || check.level >= ALARM_LEVEL) {
for(let lo=1; lo<=lvl; lo++)
sensor.exc_count[lo]++;
setDuration(check, sensor.exc_count[lvl]);
}
else {
return false; //不计数
}
if( check.exceed == 0) {
// 复位情况:也允许发送报警
check.is_reset = (sensor.exc_count[lvl] > 1);
for(let lo=1; lo<=lvl; lo++)
sensor.exc_count[lo] = 0;
return ( check.is_reset && check.reset_alarm)? true: false;
}
else {
// 超限情况:从计数判断
let tms_a = (check.src=='read')? 0: parseInt(ALM_AFTER / INTV_MIN); // ALM_AFTER 转为次数, read 的不用等待
let tms_b = parseInt(ALM_BETWEEN / INTV_MIN) || 6;
let real_tms = sensor.exc_count[lvl]-1; // 因为上面 ALM_AFTER 是累积, 所以 exc_count-1 再判断
if(real_tms < tms_a) {
return false;
}
else {
real_tms -= tms_a;
return (real_tms%tms_b == 0)? true: false;
}
}
/*
if( check.exceed === 0 || check.level < ALARM_LEVEL) {
check.is_reset = (sensor.exc_count > 0);
setDuration(check, sensor.exc_count);
sensor.exc_count = 0;
// 复位也允许发送报警
return ( check.is_reset && check.reset_alarm)? true: false;
}
else {
// 累加计数
if(!sensor.exc_count) {
sensor.exc_count = 1;
}
else {
sensor.exc_count++;
}
setDuration(check, sensor.exc_count);
// 计数判断
let tms_a = parseInt(ALM_AFTER / INTV_MIN);
let tms_b = parseInt(ALM_BETWEEN / INTV_MIN) || 2;
let real_tms = sensor.exc_count-1; // 因为上面 ALM_AFTER 是累积, 所以 exc_count-1 再判断
if(real_tms < tms_a) {
return false;
}
else {
real_tms -= tms_a;
return (real_tms%tms_b == 0)? true: false;
}
}
*/
}
/**
* 计算 或 设置时长
*/
function setDuration(check, exc_count) {
exc_count = exc_count || 0;
if(check.src == 'read') {
check.duration = (exc_count<1)? 0: (exc_count-1)*INTV_MIN;
}
else {
check.duration = (exc_count<2)? 0: (exc_count-2)*INTV_MIN; // -2:隐含减去用于确认的第一个 ALM_AFTER 分钟
}
//TODO: read 时,来自 point
}
/**
* 格式化下时长
*/
function formatDuration(minutes) {
minutes = minutes || 0;
if(minutes < 120 ) {
return minutes+ '分钟';
}
else {
let hr = parseInt(minutes/60);
let min = minutes%60;
let day = 0;
if( hr >= 24) {
day = parseInt(hr/24);
hr = hr%24;
}
return (day? day+ '天':'')+ (hr+ '小时')+ (min? min+ '分钟': '');
}
}
/**
* 发送报警
*/
function sendAl | arm(sensor, | identifier_name | |
spamScore.js | = new MessageEmbed()
.setTitle(`Flagged message`)
.setDescription(`${this.cleanContent}`)
.setAuthor(this.author.tag, this.author.displayAvatarURL())
.setFooter(`Message channel **${this.channel.name}**`)
.addField(`Total Spam Score`, `Base: ${score}; multiplier: ${multiplier}; total: ${score * multiplier}`)
.setColor(`#ff7878`);
for (var key in scoreReasons) {
if (Object.prototype.hasOwnProperty.call(scoreReasons, key)) {
embed.addField(key, scoreReasons[ key ]);
}
}
_channel.sendEmbed(embed, `:bangbang: Please review message ${this.id}; it was flagged for having a high spam score.`)
}
}
score = parseInt(score * multiplier);
console.log(`Total score: ${score}`)
}
console.log('Message spam score ' + this.id)
// Add 5 score for each mention; mention spam
var nummentions = this.mentions.users.size + this.mentions.roles.size;
score += (5 * nummentions);
if (nummentions > 0) { scoreReasons[ "Mentions" ] = (nummentions * 5) }
// Add 10 score for each embed; link/embed spam
var numembeds = this.embeds.length;
score += (10 * numembeds);
if (numembeds > 0) { scoreReasons[ "Embeds" ] = (numembeds * 10) }
// Add 10 score for each attachment; attachment spam
var numattachments = this.attachments.size;
score += (10 * numattachments);
if (numattachments > 0) { scoreReasons[ "Attachments" ] = (numattachments * 10) }
// Calculate how many seconds this message took to type based off of 7 characters per second.
var messageTime = (this.cleanContent.length / 7);
//console.log(`${messageTime} messagetime`);
// Iterate through messages of this channel from the last 3 minutes by the same author
var collection = this.channel.messages
.filter((message) => {
if (message.partial || message === null || !message) return false;
return message.id !== this.id && message.author.id === this.author.id && moment(this.createdAt).subtract(3, 'minutes').isBefore(moment(message.createdAt)) && moment(this.createdAt).isAfter(moment(message.createdAt));
});
//console.log(`${collection.size} messages`);
collection.each((message) => {
// If the current message was sent at a time that causes the typing speed to be more than 7 characters per second,
// add score for flooding / copypasting. The faster / more characters typed, the more score added.
var timediff = moment(this.createdAt).diff(moment(message.createdAt), 'seconds');
if (timediff <= messageTime && !this.author.bot) {
score += parseInt((messageTime - timediff) + 1);
scoreReasons[ "Flooding / Rapid Typing" ] = parseInt((messageTime - timediff) + 1)
}
// If the current message is more than 80% or more similar to the comparing message,
// add 1 score for every (similarity % - 80) / 2; copy/paste spam. Multiply by 1 + (0.1 * (numcharacters / 100))
var similarity = stringSimilarity.compareTwoStrings(`${this.content || ''}${JSON.stringify(this.embeds)}${JSON.stringify(this.attachments.array())}`, `${message.content || ''}${JSON.stringify(message.embeds)}${JSON.stringify(message.attachments.array())}`);
if (similarity >= 0.8) {
score += parseInt((10 - ((1 - similarity) * 50)) * (1 + (0.1 * (this.cleanContent ? this.cleanContent.length / 100 : 0))));
scoreReasons[ "Copy-Pasting" ] = parseInt((10 - ((1 - similarity) * 50)) * (1 + (0.1 * (this.cleanContent ? this.cleanContent.length / 100 : 0))))
}
});
// Score checks only if message content exists
if (this.cleanContent && this.cleanContent.length > 0) {
/* DISABLED; many false positives for emojis etc
// If the message contains any off-the-wall characters, consider it spam and add 10 to the score.
if (/[^\x20-\x7E]/g.test(this.cleanContent || '')) {
score += 10;
console.log(`special characters: 10`);
}
*/
// Count uppercase and lowercase letters
var uppercase = this.cleanContent.replace(/[^A-Z]/g, "").length;
var lowercase = this.cleanContent.replace(/[^a-z]/g, "").length;
// If 50% or more of the characters are uppercase, consider it shout spam,
// and add a score of 5, plus 1 for every 12.5 uppercase characters.
if (uppercase >= lowercase) {
score += parseInt(5 + (20 * (uppercase / 250)));
scoreReasons[ "Uppercase / Shouting" ] = parseInt(5 + (20 * (uppercase / 250)))
}
// Add score for repeating consecutive characters
// 20 or more consecutive repeating characters = extremely spammy. Add 20 score.
if (/(.)\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1/.test(this.cleanContent.toLowerCase())) {
score += 20;
scoreReasons[ "Repeating Characters" ] = 20
// 10 or more consecutive repeating characters = spammy. Add 10 score.
} else if (/(.)\1\1\1\1\1\1\1\1\1\1/.test(this.cleanContent.toLowerCase())) {
score += 10;
scoreReasons[ "Repeating Characters" ] = 10
// 5 or more consecutive repeating characters = a little bit spammy. Add 5 score.
} else if (/(.)\1\1\1\1\1/.test(this.cleanContent.toLowerCase())) {
score += 5;
scoreReasons[ "Repeating Characters" ] = 5
}
// Add 40 score for here and everyone mentions as these are VERY spammy.
if (this.content.includes("@here") || this.content.includes("@everyone")) {
score += 40;
scoreReasons[ "Here / Everyone Mention" ] = 40
}
// Add spam score for every new line; but the more content : new lines, the less spam score is added.
// New lines when content length is 128 characters or less are considered very spammy.
var newlines = this.cleanContent.split(/\r\n|\r|\n/).length - 1;
var ratio = newlines / (this.cleanContent.length > 128 ? Math.ceil(this.cleanContent.length / 128) / 2 : 0.25);
score += Math.round(ratio);
if (newlines > 0 && ratio > 0) { scoreReasons[ "New Lines / Scrolling" ] = Math.round(ratio) }
// Add score for repeating patterns
// TODO: improve this algorithm
var newstring = this.cleanContent;
var regex = /(\W|^)(.+)\s\2/gmi;
var matcher = regex.exec(this.cleanContent);
while (matcher !== null) {
newstring = newstring.replace(matcher[ 2 ], ``);
matcher = regex.exec(this.cleanContent);
}
var patternScore = (this.cleanContent.length > 0 ? (newstring.length / this.cleanContent.length) : 1);
// Pattern score of 100% means no repeating patterns. For every 4% less than 100%, add 1 score. Multiply depending on content length.
score += parseInt(((1 - patternScore) * 25) * (1 + (0.1 * (this.cleanContent ? this.cleanContent.length / 100 : 0))))
if (patternScore < 1) { scoreReasons[ "Repeating Patterns" ] = parseInt(((1 - patternScore) * 25) * (1 + (0.1 * (this.cleanContent ? this.cleanContent.length / 100 : 0)))) }
// Add 3 points for every profane word used; excessive profanity spam
config.profanity.map((word) => {
var numbers = getIndicesOf(word, this.cleanContent, false);
if (numbers.length > 0) {
score += (numbers.length * 3);
if (typeof scoreReasons[ "Profanity" ] === `undefined`)
scoreReasons[ "Profanity" ] = 0
scoreReasons[ "Profanity" ] += (numbers.length * 3);
//console.log(`profanity`);
}
});
afterFunction()
return resolve(score)
} else {
afterFunction()
return resolve(score)
}
})
}
get | earnedSpamScore | identifier_name | |
spamScore.js | pliers
var afterFunction = () => {
// Start with a spam score multiplier of 0.5
// spam score 50% if less strict channel AND less strict role
// Spam score 100% if less strict channel OR less strict role
// Spam score 150% if neither less strict channel nor less strict role
// If the member is muted, the spam score will always be 150%
var multiplier = 0.5;
var isMuted = (this.member && this.guild && this.member.roles.get(this.guild.settings.muteRole));
// If this is not a less strict channel, add 0.5 to the multiplier.
if (this.guild.settings.antispamLessStrictChannels.indexOf(this.channel.id) === -1)
multiplier += 0.5;
// If the member does not have a role defined in less strict roles, add 0.5 to the multiplier.
if (typeof this.member !== 'undefined') {
var lessStrict = false;
this.member.roles
.filter((role) => {
return this.guild.settings.antispamLessStrictRoles.indexOf(role.id) !== -1;
})
.each((role) => {
lessStrict = true;
});
if (!lessStrict)
multiplier += 0.5;
}
if (isMuted)
multiplier = 1.5;
// Text channel conflict resolution should have very strict antispam regardless of bot settings.
if (this.channel && this.channel.settings.conflictResolution && this.channel.settings.conflictResolution.indexOf("ACTIVE") !== -1)
multiplier = 2;
//console.log(`${multiplier} multiplier`);
// Flag messages with a high spam score
var modLog = this.guild.settings.flagLogChannel;
const _channel = this.client.channels.resolve(modLog);
if (score > this.guild.settings.antispamCooldown) {
if (_channel) {
var embed = new MessageEmbed()
.setTitle(`Flagged message`)
.setDescription(`${this.cleanContent}`)
.setAuthor(this.author.tag, this.author.displayAvatarURL())
.setFooter(`Message channel **${this.channel.name}**`)
.addField(`Total Spam Score`, `Base: ${score}; multiplier: ${multiplier}; total: ${score * multiplier}`)
.setColor(`#ff7878`);
for (var key in scoreReasons) {
if (Object.prototype.hasOwnProperty.call(scoreReasons, key)) {
embed.addField(key, scoreReasons[ key ]);
}
}
_channel.sendEmbed(embed, `:bangbang: Please review message ${this.id}; it was flagged for having a high spam score.`)
}
}
score = parseInt(score * multiplier);
console.log(`Total score: ${score}`)
}
console.log('Message spam score ' + this.id)
// Add 5 score for each mention; mention spam
var nummentions = this.mentions.users.size + this.mentions.roles.size;
score += (5 * nummentions);
if (nummentions > 0) { scoreReasons[ "Mentions" ] = (nummentions * 5) }
// Add 10 score for each embed; link/embed spam
var numembeds = this.embeds.length;
score += (10 * numembeds);
if (numembeds > 0) { scoreReasons[ "Embeds" ] = (numembeds * 10) }
// Add 10 score for each attachment; attachment spam
var numattachments = this.attachments.size;
score += (10 * numattachments);
if (numattachments > 0) { scoreReasons[ "Attachments" ] = (numattachments * 10) }
// Calculate how many seconds this message took to type based off of 7 characters per second.
var messageTime = (this.cleanContent.length / 7);
//console.log(`${messageTime} messagetime`);
// Iterate through messages of this channel from the last 3 minutes by the same author
var collection = this.channel.messages
.filter((message) => {
if (message.partial || message === null || !message) return false;
return message.id !== this.id && message.author.id === this.author.id && moment(this.createdAt).subtract(3, 'minutes').isBefore(moment(message.createdAt)) && moment(this.createdAt).isAfter(moment(message.createdAt));
});
//console.log(`${collection.size} messages`);
collection.each((message) => {
// If the current message was sent at a time that causes the typing speed to be more than 7 characters per second,
// add score for flooding / copypasting. The faster / more characters typed, the more score added.
var timediff = moment(this.createdAt).diff(moment(message.createdAt), 'seconds');
if (timediff <= messageTime && !this.author.bot) {
score += parseInt((messageTime - timediff) + 1);
scoreReasons[ "Flooding / Rapid Typing" ] = parseInt((messageTime - timediff) + 1)
}
// If the current message is more than 80% or more similar to the comparing message,
// add 1 score for every (similarity % - 80) / 2; copy/paste spam. Multiply by 1 + (0.1 * (numcharacters / 100))
var similarity = stringSimilarity.compareTwoStrings(`${this.content || ''}${JSON.stringify(this.embeds)}${JSON.stringify(this.attachments.array())}`, `${message.content || ''}${JSON.stringify(message.embeds)}${JSON.stringify(message.attachments.array())}`);
if (similarity >= 0.8) {
score += parseInt((10 - ((1 - similarity) * 50)) * (1 + (0.1 * (this.cleanContent ? this.cleanContent.length / 100 : 0))));
scoreReasons[ "Copy-Pasting" ] = parseInt((10 - ((1 - similarity) * 50)) * (1 + (0.1 * (this.cleanContent ? this.cleanContent.length / 100 : 0))))
}
});
// Score checks only if message content exists
if (this.cleanContent && this.cleanContent.length > 0) {
/* DISABLED; many false positives for emojis etc
// If the message contains any off-the-wall characters, consider it spam and add 10 to the score.
if (/[^\x20-\x7E]/g.test(this.cleanContent || '')) {
score += 10;
console.log(`special characters: 10`);
}
*/
// Count uppercase and lowercase letters
var uppercase = this.cleanContent.replace(/[^A-Z]/g, "").length;
var lowercase = this.cleanContent.replace(/[^a-z]/g, "").length;
// If 50% or more of the characters are uppercase, consider it shout spam,
// and add a score of 5, plus 1 for every 12.5 uppercase characters.
if (uppercase >= lowercase) {
score += parseInt(5 + (20 * (uppercase / 250)));
scoreReasons[ "Uppercase / Shouting" ] = parseInt(5 + (20 * (uppercase / 250)))
}
// Add score for repeating consecutive characters
// 20 or more consecutive repeating characters = extremely spammy. Add 20 score.
if (/(.)\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1/.test(this.cleanContent.toLowerCase())) | else if (/(.)\1\1\1\1\1\1\1\1\1\1/.test(this.cleanContent.toLowerCase())) {
score += 10;
scoreReasons[ "Repeating Characters" ] = 10
// 5 or more consecutive repeating characters = a little bit spammy. Add 5 score.
} else if (/(.)\1\1\1\1\1/.test(this.cleanContent.toLowerCase())) {
score += 5;
scoreReasons[ "Repeating Characters" ] = 5
}
// Add 40 score for here and everyone mentions as these are VERY spammy.
if (this.content.includes("@here") || this.content.includes("@everyone")) {
score += 40;
scoreReasons[ "Here / Everyone Mention" ] = 40
}
// Add spam score for every new line; but the more content : new lines, the less spam score is added.
// New lines when content length is 128 characters or less are considered very spammy.
var newlines = this.cleanContent.split(/\r\n|\r|\n/).length - 1;
var ratio = newlines / (this.cleanContent.length > 128 ? Math.ceil(this.cleanContent.length / 128) / 2 : 0.25);
score += Math.round(ratio);
if (newlines > 0 && ratio > 0) { scoreReasons[ "New Lines / Scrolling" ] | {
score += 20;
scoreReasons[ "Repeating Characters" ] = 20
// 10 or more consecutive repeating characters = spammy. Add 10 score.
} | conditional_block |
spamScore.js | to the multiplier.
if (typeof this.member !== 'undefined') {
var lessStrict = false;
this.member.roles
.filter((role) => {
return this.guild.settings.antispamLessStrictRoles.indexOf(role.id) !== -1;
})
.each((role) => {
lessStrict = true;
});
if (!lessStrict)
multiplier += 0.5;
}
if (isMuted)
multiplier = 1.5;
// Text channel conflict resolution should have very strict antispam regardless of bot settings.
if (this.channel && this.channel.settings.conflictResolution && this.channel.settings.conflictResolution.indexOf("ACTIVE") !== -1)
multiplier = 2;
//console.log(`${multiplier} multiplier`);
// Flag messages with a high spam score
var modLog = this.guild.settings.flagLogChannel;
const _channel = this.client.channels.resolve(modLog);
if (score > this.guild.settings.antispamCooldown) {
if (_channel) {
var embed = new MessageEmbed()
.setTitle(`Flagged message`)
.setDescription(`${this.cleanContent}`)
.setAuthor(this.author.tag, this.author.displayAvatarURL())
.setFooter(`Message channel **${this.channel.name}**`)
.addField(`Total Spam Score`, `Base: ${score}; multiplier: ${multiplier}; total: ${score * multiplier}`)
.setColor(`#ff7878`);
for (var key in scoreReasons) {
if (Object.prototype.hasOwnProperty.call(scoreReasons, key)) {
embed.addField(key, scoreReasons[ key ]);
}
}
_channel.sendEmbed(embed, `:bangbang: Please review message ${this.id}; it was flagged for having a high spam score.`)
}
}
score = parseInt(score * multiplier);
console.log(`Total score: ${score}`)
}
console.log('Message spam score ' + this.id)
// Add 5 score for each mention; mention spam
var nummentions = this.mentions.users.size + this.mentions.roles.size;
score += (5 * nummentions);
if (nummentions > 0) { scoreReasons[ "Mentions" ] = (nummentions * 5) }
// Add 10 score for each embed; link/embed spam
var numembeds = this.embeds.length;
score += (10 * numembeds);
if (numembeds > 0) { scoreReasons[ "Embeds" ] = (numembeds * 10) }
// Add 10 score for each attachment; attachment spam
var numattachments = this.attachments.size;
score += (10 * numattachments);
if (numattachments > 0) { scoreReasons[ "Attachments" ] = (numattachments * 10) }
// Calculate how many seconds this message took to type based off of 7 characters per second.
var messageTime = (this.cleanContent.length / 7);
//console.log(`${messageTime} messagetime`);
// Iterate through messages of this channel from the last 3 minutes by the same author
var collection = this.channel.messages
.filter((message) => {
if (message.partial || message === null || !message) return false;
return message.id !== this.id && message.author.id === this.author.id && moment(this.createdAt).subtract(3, 'minutes').isBefore(moment(message.createdAt)) && moment(this.createdAt).isAfter(moment(message.createdAt));
});
//console.log(`${collection.size} messages`);
collection.each((message) => {
// If the current message was sent at a time that causes the typing speed to be more than 7 characters per second,
// add score for flooding / copypasting. The faster / more characters typed, the more score added.
var timediff = moment(this.createdAt).diff(moment(message.createdAt), 'seconds');
if (timediff <= messageTime && !this.author.bot) {
score += parseInt((messageTime - timediff) + 1);
scoreReasons[ "Flooding / Rapid Typing" ] = parseInt((messageTime - timediff) + 1)
}
// If the current message is more than 80% or more similar to the comparing message,
// add 1 score for every (similarity % - 80) / 2; copy/paste spam. Multiply by 1 + (0.1 * (numcharacters / 100))
var similarity = stringSimilarity.compareTwoStrings(`${this.content || ''}${JSON.stringify(this.embeds)}${JSON.stringify(this.attachments.array())}`, `${message.content || ''}${JSON.stringify(message.embeds)}${JSON.stringify(message.attachments.array())}`);
if (similarity >= 0.8) {
score += parseInt((10 - ((1 - similarity) * 50)) * (1 + (0.1 * (this.cleanContent ? this.cleanContent.length / 100 : 0))));
scoreReasons[ "Copy-Pasting" ] = parseInt((10 - ((1 - similarity) * 50)) * (1 + (0.1 * (this.cleanContent ? this.cleanContent.length / 100 : 0))))
}
});
// Score checks only if message content exists
if (this.cleanContent && this.cleanContent.length > 0) {
/* DISABLED; many false positives for emojis etc
// If the message contains any off-the-wall characters, consider it spam and add 10 to the score.
if (/[^\x20-\x7E]/g.test(this.cleanContent || '')) {
score += 10;
console.log(`special characters: 10`);
}
*/
// Count uppercase and lowercase letters
var uppercase = this.cleanContent.replace(/[^A-Z]/g, "").length;
var lowercase = this.cleanContent.replace(/[^a-z]/g, "").length;
// If 50% or more of the characters are uppercase, consider it shout spam,
// and add a score of 5, plus 1 for every 12.5 uppercase characters.
if (uppercase >= lowercase) {
score += parseInt(5 + (20 * (uppercase / 250)));
scoreReasons[ "Uppercase / Shouting" ] = parseInt(5 + (20 * (uppercase / 250)))
}
// Add score for repeating consecutive characters
// 20 or more consecutive repeating characters = extremely spammy. Add 20 score.
if (/(.)\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1/.test(this.cleanContent.toLowerCase())) {
score += 20;
scoreReasons[ "Repeating Characters" ] = 20
// 10 or more consecutive repeating characters = spammy. Add 10 score.
} else if (/(.)\1\1\1\1\1\1\1\1\1\1/.test(this.cleanContent.toLowerCase())) {
score += 10;
scoreReasons[ "Repeating Characters" ] = 10
// 5 or more consecutive repeating characters = a little bit spammy. Add 5 score.
} else if (/(.)\1\1\1\1\1/.test(this.cleanContent.toLowerCase())) {
score += 5;
scoreReasons[ "Repeating Characters" ] = 5
}
// Add 40 score for here and everyone mentions as these are VERY spammy.
if (this.content.includes("@here") || this.content.includes("@everyone")) {
score += 40;
scoreReasons[ "Here / Everyone Mention" ] = 40
}
// Add spam score for every new line; but the more content : new lines, the less spam score is added.
// New lines when content length is 128 characters or less are considered very spammy.
var newlines = this.cleanContent.split(/\r\n|\r|\n/).length - 1;
var ratio = newlines / (this.cleanContent.length > 128 ? Math.ceil(this.cleanContent.length / 128) / 2 : 0.25);
score += Math.round(ratio);
if (newlines > 0 && ratio > 0) { scoreReasons[ "New Lines / Scrolling" ] = Math.round(ratio) }
// Add score for repeating patterns
// TODO: improve this algorithm
var newstring = this.cleanContent;
var regex = /(\W|^)(.+)\s\2/gmi;
var matcher = regex.exec(this.cleanContent);
while (matcher !== null) {
newstring = newstring.replace(matcher[ 2 ], ``);
matcher = regex.exec(this.cleanContent);
} | var patternScore = (this.cleanContent.length > 0 ? (newstring.length / this.cleanContent.length) : 1);
// Pattern score of 100% means no repeating patterns. For every 4% less than 100%, add 1 score. Multiply depending on content length.
score += parseInt(((1 - patternScore) * 25) * (1 + (0.1 * (this.cleanContent ? this.cleanContent.length / 100 : 0)))) | random_line_split | |
spamScore.js | .setFooter(`Message channel **${this.channel.name}**`)
.addField(`Total Spam Score`, `Base: ${score}; multiplier: ${multiplier}; total: ${score * multiplier}`)
.setColor(`#ff7878`);
for (var key in scoreReasons) {
if (Object.prototype.hasOwnProperty.call(scoreReasons, key)) {
embed.addField(key, scoreReasons[ key ]);
}
}
_channel.sendEmbed(embed, `:bangbang: Please review message ${this.id}; it was flagged for having a high spam score.`)
}
}
score = parseInt(score * multiplier);
console.log(`Total score: ${score}`)
}
console.log('Message spam score ' + this.id)
// Add 5 score for each mention; mention spam
var nummentions = this.mentions.users.size + this.mentions.roles.size;
score += (5 * nummentions);
if (nummentions > 0) { scoreReasons[ "Mentions" ] = (nummentions * 5) }
// Add 10 score for each embed; link/embed spam
var numembeds = this.embeds.length;
score += (10 * numembeds);
if (numembeds > 0) { scoreReasons[ "Embeds" ] = (numembeds * 10) }
// Add 10 score for each attachment; attachment spam
var numattachments = this.attachments.size;
score += (10 * numattachments);
if (numattachments > 0) { scoreReasons[ "Attachments" ] = (numattachments * 10) }
// Calculate how many seconds this message took to type based off of 7 characters per second.
var messageTime = (this.cleanContent.length / 7);
//console.log(`${messageTime} messagetime`);
// Iterate through messages of this channel from the last 3 minutes by the same author
var collection = this.channel.messages
.filter((message) => {
if (message.partial || message === null || !message) return false;
return message.id !== this.id && message.author.id === this.author.id && moment(this.createdAt).subtract(3, 'minutes').isBefore(moment(message.createdAt)) && moment(this.createdAt).isAfter(moment(message.createdAt));
});
//console.log(`${collection.size} messages`);
collection.each((message) => {
// If the current message was sent at a time that causes the typing speed to be more than 7 characters per second,
// add score for flooding / copypasting. The faster / more characters typed, the more score added.
var timediff = moment(this.createdAt).diff(moment(message.createdAt), 'seconds');
if (timediff <= messageTime && !this.author.bot) {
score += parseInt((messageTime - timediff) + 1);
scoreReasons[ "Flooding / Rapid Typing" ] = parseInt((messageTime - timediff) + 1)
}
// If the current message is more than 80% or more similar to the comparing message,
// add 1 score for every (similarity % - 80) / 2; copy/paste spam. Multiply by 1 + (0.1 * (numcharacters / 100))
var similarity = stringSimilarity.compareTwoStrings(`${this.content || ''}${JSON.stringify(this.embeds)}${JSON.stringify(this.attachments.array())}`, `${message.content || ''}${JSON.stringify(message.embeds)}${JSON.stringify(message.attachments.array())}`);
if (similarity >= 0.8) {
score += parseInt((10 - ((1 - similarity) * 50)) * (1 + (0.1 * (this.cleanContent ? this.cleanContent.length / 100 : 0))));
scoreReasons[ "Copy-Pasting" ] = parseInt((10 - ((1 - similarity) * 50)) * (1 + (0.1 * (this.cleanContent ? this.cleanContent.length / 100 : 0))))
}
});
// Score checks only if message content exists
if (this.cleanContent && this.cleanContent.length > 0) {
/* DISABLED; many false positives for emojis etc
// If the message contains any off-the-wall characters, consider it spam and add 10 to the score.
if (/[^\x20-\x7E]/g.test(this.cleanContent || '')) {
score += 10;
console.log(`special characters: 10`);
}
*/
// Count uppercase and lowercase letters
var uppercase = this.cleanContent.replace(/[^A-Z]/g, "").length;
var lowercase = this.cleanContent.replace(/[^a-z]/g, "").length;
// If 50% or more of the characters are uppercase, consider it shout spam,
// and add a score of 5, plus 1 for every 12.5 uppercase characters.
if (uppercase >= lowercase) {
score += parseInt(5 + (20 * (uppercase / 250)));
scoreReasons[ "Uppercase / Shouting" ] = parseInt(5 + (20 * (uppercase / 250)))
}
// Add score for repeating consecutive characters
// 20 or more consecutive repeating characters = extremely spammy. Add 20 score.
if (/(.)\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1/.test(this.cleanContent.toLowerCase())) {
score += 20;
scoreReasons[ "Repeating Characters" ] = 20
// 10 or more consecutive repeating characters = spammy. Add 10 score.
} else if (/(.)\1\1\1\1\1\1\1\1\1\1/.test(this.cleanContent.toLowerCase())) {
score += 10;
scoreReasons[ "Repeating Characters" ] = 10
// 5 or more consecutive repeating characters = a little bit spammy. Add 5 score.
} else if (/(.)\1\1\1\1\1/.test(this.cleanContent.toLowerCase())) {
score += 5;
scoreReasons[ "Repeating Characters" ] = 5
}
// Add 40 score for here and everyone mentions as these are VERY spammy.
if (this.content.includes("@here") || this.content.includes("@everyone")) {
score += 40;
scoreReasons[ "Here / Everyone Mention" ] = 40
}
// Add spam score for every new line; but the more content : new lines, the less spam score is added.
// New lines when content length is 128 characters or less are considered very spammy.
var newlines = this.cleanContent.split(/\r\n|\r|\n/).length - 1;
var ratio = newlines / (this.cleanContent.length > 128 ? Math.ceil(this.cleanContent.length / 128) / 2 : 0.25);
score += Math.round(ratio);
if (newlines > 0 && ratio > 0) { scoreReasons[ "New Lines / Scrolling" ] = Math.round(ratio) }
// Add score for repeating patterns
// TODO: improve this algorithm
var newstring = this.cleanContent;
var regex = /(\W|^)(.+)\s\2/gmi;
var matcher = regex.exec(this.cleanContent);
while (matcher !== null) {
newstring = newstring.replace(matcher[ 2 ], ``);
matcher = regex.exec(this.cleanContent);
}
var patternScore = (this.cleanContent.length > 0 ? (newstring.length / this.cleanContent.length) : 1);
// Pattern score of 100% means no repeating patterns. For every 4% less than 100%, add 1 score. Multiply depending on content length.
score += parseInt(((1 - patternScore) * 25) * (1 + (0.1 * (this.cleanContent ? this.cleanContent.length / 100 : 0))))
if (patternScore < 1) { scoreReasons[ "Repeating Patterns" ] = parseInt(((1 - patternScore) * 25) * (1 + (0.1 * (this.cleanContent ? this.cleanContent.length / 100 : 0)))) }
// Add 3 points for every profane word used; excessive profanity spam
config.profanity.map((word) => {
var numbers = getIndicesOf(word, this.cleanContent, false);
if (numbers.length > 0) {
score += (numbers.length * 3);
if (typeof scoreReasons[ "Profanity" ] === `undefined`)
scoreReasons[ "Profanity" ] = 0
scoreReasons[ "Profanity" ] += (numbers.length * 3);
//console.log(`profanity`);
}
});
afterFunction()
return resolve(score)
} else {
afterFunction()
return resolve(score)
}
})
}
get earnedSpamScore () {
return this._earnedSpamScore;
}
set earnedSpamScore (value) | {
this._earnedSpamScore = value;
} | identifier_body | |
artifacts.py | -axis,
[0, 0, 0, CAMERA_FOCAL_LENGTH], # so that y-axis looks up
[0, 0, 1/0.42, 0]])
self.detectors = [
{
'artefact_name': 'cubesat',
'detector_type': 'classifier',
'classifier': cv2.CascadeClassifier(str(curdir/'xml/cubesat.xml')),
'min_size': 5,
'max_size': 110,
'subsequent_detects_required': 3
},
{
'artefact_name': 'homebase',
'detector_type': 'classifier',
'classifier': cv2.CascadeClassifier(str(curdir/'xml/homebase.xml')),
'min_size': 20,
'max_size': 400,
'subsequent_detects_required': 3
},
{
'artefact_name': 'basemarker',
'detector_type': 'colormatch',
'min_size': 10,
'max_size': 500,
'mask': [CAMERA_HEIGHT//2, CAMERA_HEIGHT, 0, CAMERA_WIDTH], # [Y,X] order, look only in lower half of the screen (avoid solar panels)
'pixel_count_threshold': 100,
'bbox_union_count': 1,
'hue_max_difference': 10,
'hue_match': 100, # from RGB 007DBD
'subsequent_detects_required': 3 # noise will add some of this color, wait for a consistent sequence
},
{
'artefact_name': 'homebase',
'detector_type': 'colormatch',
'min_size': 20,
'max_size': 700,
'mask': None,
'pixel_count_threshold': 400,
'bbox_union_count': 5,
'hue_max_difference': 10,
'hue_match': 19, # from RGB FFA616
'subsequent_detects_required': 3
},
{
'artefact_name': 'rover',
'detector_type': 'colormatch',
'min_size': 10,
'max_size': 700,
'mask': [180, CAMERA_HEIGHT, 0, CAMERA_WIDTH], # [Y,X] order - only look in lower half of screen
'pixel_count_threshold': 150,
'bbox_union_count': 10,
'hue_max_difference': 3,
'hue_match': 27, # from RGB FFA616
'subsequent_detects_required': 1
},
{
'artefact_name': 'excavator_arm',
'detector_type': 'colormatch',
'min_size': 10,
'max_size': 200,
'mask': [0, 120, 0, CAMERA_WIDTH], # [Y,X] order
'pixel_count_threshold': 150,
'bbox_union_count': 3,
'hue_max_difference': 3,
'hue_match': 27, # from RGB FFA616
'subsequent_detects_required': 1
}
]
self.detect_sequences = {}
def stdout(self, *args, **kwargs):
# maybe refactor to Node?
|
def waitForImage(self):
self.left_image = self.right_image = None
while self.left_image is None or self.right_image is None:
self.time, channel, data = self.listen()
if channel == "left_image":
self.left_image = data
elif channel == "right_image":
self.right_image = data
return self.time
def run(self):
try:
dropped = 0
while True:
now = self.publish("dropped", dropped)
dropped = -1
timestamp = now
while timestamp <= now:
# this thread is always running but wait and drop images if simulation is slower
timestamp = self.waitForImage()
dropped += 1
self.detect_and_publish(self.left_image, self.right_image)
except BusShutdownException:
pass
def detect_and_publish(self, left_image, right_image):
results = self.detect(left_image, right_image)
for r in results:
self.publish('artf', r)
def detect(self, left_image, right_image):
results = []
limg = cv2.imdecode(np.frombuffer(left_image, dtype=np.uint8), cv2.IMREAD_COLOR)
rimg = cv2.imdecode(np.frombuffer(right_image, dtype=np.uint8), cv2.IMREAD_COLOR)
if self.width is None:
self.stdout('Image resolution', limg.shape)
self.width = limg.shape[1]
assert self.width == limg.shape[1], (self.width, limg.shape[1])
def box_area(b):
return b[2]*b[3]
limg_rgb = cv2.cvtColor(limg, cv2.COLOR_BGR2RGB)
rimg_rgb = cv2.cvtColor(rimg, cv2.COLOR_BGR2RGB)
hsv = cv2.cvtColor(limg, cv2.COLOR_BGR2HSV)
hsv_blurred = cv2.medianBlur(hsv,5) # some frames have noise, need to blur otherwise threshold doesn't work
objects_detected = []
for c in self.detectors:
if c['artefact_name'] not in self.look_for_artefacts:
continue
if c['artefact_name'] not in self.detect_sequences:
self.detect_sequences[c['artefact_name']] = 0
if c['detector_type'] == 'colormatch':
lower_hue = np.array([c['hue_match'] - c['hue_max_difference'],50,50])
upper_hue = np.array([c['hue_match'] + c['hue_max_difference'],255,255])
# Threshold the HSV image to get only the matching colors
mask = cv2.inRange(hsv_blurred, lower_hue, upper_hue)
if c['mask'] is not None:
m = np.zeros([CAMERA_HEIGHT,CAMERA_WIDTH], dtype=np.uint8)
m[c['mask'][0]:c['mask'][1],c['mask'][2]:c['mask'][3]] = 255
mask &= m
bboxes = []
contours = cv2.findContours(mask, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
contours = contours[0] if len(contours) == 2 else contours[1]
for cont in contours:
contours_poly = cv2.approxPolyDP(cont, 3, True)
x,y,w,h = cv2.boundingRect(contours_poly)
if w > 1 or h > 1: # ignore isolated pixels
bboxes.append([int(x),int(y),int(w),int(h)])
if len(bboxes) > 0:
sb = sorted(bboxes, key = box_area, reverse = True)[:c['bbox_union_count']]
bbox = sb[0]
for b in sb[1:]:
bbox = union(bbox,b)
x, y, w, h = bbox
match_count = cv2.countNonZero(mask[y:y+h,x:x+w])
if (
match_count > c['pixel_count_threshold'] and
w >= c['min_size'] and h >= c['min_size'] and
w <= c['max_size'] and h <= c['max_size']
):
# print ("%s match count: %d; [%d %d %d %d]" % (c['artefact_name'], match_count, x, y, w, h))
objects_detected.append(c['artefact_name'])
if self.detect_sequences[c['artefact_name']] < c['subsequent_detects_required']:
# do not act until you have detections in a row
self.detect_sequences[c['artefact_name']] += 1
else:
if self.estimate_distance:
disp = self.stereo_calc.compute(limg_rgb, rimg_rgb).astype(np.float32) / 16.0
points = cv2.reprojectImageTo3D(disp, self.Q)
matching_points = points[mask != 0]
distances = matching_points[:,2] # third column are Z coords (distances)
mean = np.mean(distances)
sd = np.std(distances)
distances_clean = [x for x in distances if mean - 2 * sd < x < mean + 2 * sd]
#print("Artf distance: min %.1f median: %.1f" % (min(distances), median(distances)))
if len(distances_clean) == 0:
distances_clean = distances
# print("Artf cleaned: min %.1f median: %.1f" % (min(final_list), median(final_list)))
dist = max(0.0, min(distances_clean)) # subtract about half length of the rover
else:
dist = 0.0
results.append((c['artefact | output = StringIO()
print(*args, file=output, **kwargs)
contents = output.getvalue().strip()
output.close()
# self.publish('stdout', contents)
print(contents) | identifier_body |
artifacts.py | -axis,
[0, 0, 0, CAMERA_FOCAL_LENGTH], # so that y-axis looks up
[0, 0, 1/0.42, 0]])
self.detectors = [
{
'artefact_name': 'cubesat',
'detector_type': 'classifier',
'classifier': cv2.CascadeClassifier(str(curdir/'xml/cubesat.xml')),
'min_size': 5,
'max_size': 110,
'subsequent_detects_required': 3
},
{
'artefact_name': 'homebase',
'detector_type': 'classifier',
'classifier': cv2.CascadeClassifier(str(curdir/'xml/homebase.xml')),
'min_size': 20,
'max_size': 400,
'subsequent_detects_required': 3
},
{
'artefact_name': 'basemarker',
'detector_type': 'colormatch',
'min_size': 10,
'max_size': 500,
'mask': [CAMERA_HEIGHT//2, CAMERA_HEIGHT, 0, CAMERA_WIDTH], # [Y,X] order, look only in lower half of the screen (avoid solar panels)
'pixel_count_threshold': 100,
'bbox_union_count': 1,
'hue_max_difference': 10,
'hue_match': 100, # from RGB 007DBD
'subsequent_detects_required': 3 # noise will add some of this color, wait for a consistent sequence
},
{
'artefact_name': 'homebase',
'detector_type': 'colormatch',
'min_size': 20,
'max_size': 700,
'mask': None,
'pixel_count_threshold': 400,
'bbox_union_count': 5,
'hue_max_difference': 10,
'hue_match': 19, # from RGB FFA616
'subsequent_detects_required': 3
},
{
'artefact_name': 'rover',
'detector_type': 'colormatch',
'min_size': 10,
'max_size': 700,
'mask': [180, CAMERA_HEIGHT, 0, CAMERA_WIDTH], # [Y,X] order - only look in lower half of screen
'pixel_count_threshold': 150,
'bbox_union_count': 10,
'hue_max_difference': 3,
'hue_match': 27, # from RGB FFA616
'subsequent_detects_required': 1
},
{
'artefact_name': 'excavator_arm',
'detector_type': 'colormatch',
'min_size': 10,
'max_size': 200,
'mask': [0, 120, 0, CAMERA_WIDTH], # [Y,X] order
'pixel_count_threshold': 150,
'bbox_union_count': 3,
'hue_max_difference': 3,
'hue_match': 27, # from RGB FFA616
'subsequent_detects_required': 1
}
]
self.detect_sequences = {}
def | (self, *args, **kwargs):
# maybe refactor to Node?
output = StringIO()
print(*args, file=output, **kwargs)
contents = output.getvalue().strip()
output.close()
# self.publish('stdout', contents)
print(contents)
def waitForImage(self):
self.left_image = self.right_image = None
while self.left_image is None or self.right_image is None:
self.time, channel, data = self.listen()
if channel == "left_image":
self.left_image = data
elif channel == "right_image":
self.right_image = data
return self.time
def run(self):
try:
dropped = 0
while True:
now = self.publish("dropped", dropped)
dropped = -1
timestamp = now
while timestamp <= now:
# this thread is always running but wait and drop images if simulation is slower
timestamp = self.waitForImage()
dropped += 1
self.detect_and_publish(self.left_image, self.right_image)
except BusShutdownException:
pass
def detect_and_publish(self, left_image, right_image):
results = self.detect(left_image, right_image)
for r in results:
self.publish('artf', r)
def detect(self, left_image, right_image):
results = []
limg = cv2.imdecode(np.frombuffer(left_image, dtype=np.uint8), cv2.IMREAD_COLOR)
rimg = cv2.imdecode(np.frombuffer(right_image, dtype=np.uint8), cv2.IMREAD_COLOR)
if self.width is None:
self.stdout('Image resolution', limg.shape)
self.width = limg.shape[1]
assert self.width == limg.shape[1], (self.width, limg.shape[1])
def box_area(b):
return b[2]*b[3]
limg_rgb = cv2.cvtColor(limg, cv2.COLOR_BGR2RGB)
rimg_rgb = cv2.cvtColor(rimg, cv2.COLOR_BGR2RGB)
hsv = cv2.cvtColor(limg, cv2.COLOR_BGR2HSV)
hsv_blurred = cv2.medianBlur(hsv,5) # some frames have noise, need to blur otherwise threshold doesn't work
objects_detected = []
for c in self.detectors:
if c['artefact_name'] not in self.look_for_artefacts:
continue
if c['artefact_name'] not in self.detect_sequences:
self.detect_sequences[c['artefact_name']] = 0
if c['detector_type'] == 'colormatch':
lower_hue = np.array([c['hue_match'] - c['hue_max_difference'],50,50])
upper_hue = np.array([c['hue_match'] + c['hue_max_difference'],255,255])
# Threshold the HSV image to get only the matching colors
mask = cv2.inRange(hsv_blurred, lower_hue, upper_hue)
if c['mask'] is not None:
m = np.zeros([CAMERA_HEIGHT,CAMERA_WIDTH], dtype=np.uint8)
m[c['mask'][0]:c['mask'][1],c['mask'][2]:c['mask'][3]] = 255
mask &= m
bboxes = []
contours = cv2.findContours(mask, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
contours = contours[0] if len(contours) == 2 else contours[1]
for cont in contours:
contours_poly = cv2.approxPolyDP(cont, 3, True)
x,y,w,h = cv2.boundingRect(contours_poly)
if w > 1 or h > 1: # ignore isolated pixels
bboxes.append([int(x),int(y),int(w),int(h)])
if len(bboxes) > 0:
sb = sorted(bboxes, key = box_area, reverse = True)[:c['bbox_union_count']]
bbox = sb[0]
for b in sb[1:]:
bbox = union(bbox,b)
x, y, w, h = bbox
match_count = cv2.countNonZero(mask[y:y+h,x:x+w])
if (
match_count > c['pixel_count_threshold'] and
w >= c['min_size'] and h >= c['min_size'] and
w <= c['max_size'] and h <= c['max_size']
):
# print ("%s match count: %d; [%d %d %d %d]" % (c['artefact_name'], match_count, x, y, w, h))
objects_detected.append(c['artefact_name'])
if self.detect_sequences[c['artefact_name']] < c['subsequent_detects_required']:
# do not act until you have detections in a row
self.detect_sequences[c['artefact_name']] += 1
else:
if self.estimate_distance:
disp = self.stereo_calc.compute(limg_rgb, rimg_rgb).astype(np.float32) / 16.0
points = cv2.reprojectImageTo3D(disp, self.Q)
matching_points = points[mask != 0]
distances = matching_points[:,2] # third column are Z coords (distances)
mean = np.mean(distances)
sd = np.std(distances)
distances_clean = [x for x in distances if mean - 2 * sd < x < mean + 2 * sd]
#print("Artf distance: min %.1f median: %.1f" % (min(distances), median(distances)))
if len(distances_clean) == 0:
distances_clean = distances
# print("Artf cleaned: min %.1f median: %.1f" % (min(final_list), median(final_list)))
dist = max(0.0, min(distances_clean)) # subtract about half length of the rover
else:
dist = 0.0
results.append((c['artefact | stdout | identifier_name |
artifacts.py | x-axis,
[0, 0, 0, CAMERA_FOCAL_LENGTH], # so that y-axis looks up
[0, 0, 1/0.42, 0]])
self.detectors = [
{
'artefact_name': 'cubesat',
'detector_type': 'classifier',
'classifier': cv2.CascadeClassifier(str(curdir/'xml/cubesat.xml')),
'min_size': 5,
'max_size': 110,
'subsequent_detects_required': 3
},
{
'artefact_name': 'homebase',
'detector_type': 'classifier',
'classifier': cv2.CascadeClassifier(str(curdir/'xml/homebase.xml')),
'min_size': 20,
'max_size': 400,
'subsequent_detects_required': 3
},
{
'artefact_name': 'basemarker',
'detector_type': 'colormatch',
'min_size': 10,
'max_size': 500,
'mask': [CAMERA_HEIGHT//2, CAMERA_HEIGHT, 0, CAMERA_WIDTH], # [Y,X] order, look only in lower half of the screen (avoid solar panels)
'pixel_count_threshold': 100,
'bbox_union_count': 1,
'hue_max_difference': 10,
'hue_match': 100, # from RGB 007DBD
'subsequent_detects_required': 3 # noise will add some of this color, wait for a consistent sequence
},
{
'artefact_name': 'homebase',
'detector_type': 'colormatch',
'min_size': 20,
'max_size': 700,
'mask': None,
'pixel_count_threshold': 400,
'bbox_union_count': 5,
'hue_max_difference': 10,
'hue_match': 19, # from RGB FFA616
'subsequent_detects_required': 3
},
{
'artefact_name': 'rover',
'detector_type': 'colormatch',
'min_size': 10,
'max_size': 700,
'mask': [180, CAMERA_HEIGHT, 0, CAMERA_WIDTH], # [Y,X] order - only look in lower half of screen
'pixel_count_threshold': 150,
'bbox_union_count': 10,
'hue_max_difference': 3,
'hue_match': 27, # from RGB FFA616
'subsequent_detects_required': 1
},
{
'artefact_name': 'excavator_arm',
'detector_type': 'colormatch',
'min_size': 10,
'max_size': 200,
'mask': [0, 120, 0, CAMERA_WIDTH], # [Y,X] order
'pixel_count_threshold': 150,
'bbox_union_count': 3,
'hue_max_difference': 3,
'hue_match': 27, # from RGB FFA616
'subsequent_detects_required': 1
}
]
self.detect_sequences = {}
def stdout(self, *args, **kwargs):
# maybe refactor to Node?
output = StringIO()
print(*args, file=output, **kwargs)
contents = output.getvalue().strip()
output.close()
# self.publish('stdout', contents)
print(contents)
def waitForImage(self):
self.left_image = self.right_image = None
while self.left_image is None or self.right_image is None:
self.time, channel, data = self.listen()
if channel == "left_image":
self.left_image = data
elif channel == "right_image": | return self.time
def run(self):
try:
dropped = 0
while True:
now = self.publish("dropped", dropped)
dropped = -1
timestamp = now
while timestamp <= now:
# this thread is always running but wait and drop images if simulation is slower
timestamp = self.waitForImage()
dropped += 1
self.detect_and_publish(self.left_image, self.right_image)
except BusShutdownException:
pass
def detect_and_publish(self, left_image, right_image):
results = self.detect(left_image, right_image)
for r in results:
self.publish('artf', r)
def detect(self, left_image, right_image):
results = []
limg = cv2.imdecode(np.frombuffer(left_image, dtype=np.uint8), cv2.IMREAD_COLOR)
rimg = cv2.imdecode(np.frombuffer(right_image, dtype=np.uint8), cv2.IMREAD_COLOR)
if self.width is None:
self.stdout('Image resolution', limg.shape)
self.width = limg.shape[1]
assert self.width == limg.shape[1], (self.width, limg.shape[1])
def box_area(b):
return b[2]*b[3]
limg_rgb = cv2.cvtColor(limg, cv2.COLOR_BGR2RGB)
rimg_rgb = cv2.cvtColor(rimg, cv2.COLOR_BGR2RGB)
hsv = cv2.cvtColor(limg, cv2.COLOR_BGR2HSV)
hsv_blurred = cv2.medianBlur(hsv,5) # some frames have noise, need to blur otherwise threshold doesn't work
objects_detected = []
for c in self.detectors:
if c['artefact_name'] not in self.look_for_artefacts:
continue
if c['artefact_name'] not in self.detect_sequences:
self.detect_sequences[c['artefact_name']] = 0
if c['detector_type'] == 'colormatch':
lower_hue = np.array([c['hue_match'] - c['hue_max_difference'],50,50])
upper_hue = np.array([c['hue_match'] + c['hue_max_difference'],255,255])
# Threshold the HSV image to get only the matching colors
mask = cv2.inRange(hsv_blurred, lower_hue, upper_hue)
if c['mask'] is not None:
m = np.zeros([CAMERA_HEIGHT,CAMERA_WIDTH], dtype=np.uint8)
m[c['mask'][0]:c['mask'][1],c['mask'][2]:c['mask'][3]] = 255
mask &= m
bboxes = []
contours = cv2.findContours(mask, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
contours = contours[0] if len(contours) == 2 else contours[1]
for cont in contours:
contours_poly = cv2.approxPolyDP(cont, 3, True)
x,y,w,h = cv2.boundingRect(contours_poly)
if w > 1 or h > 1: # ignore isolated pixels
bboxes.append([int(x),int(y),int(w),int(h)])
if len(bboxes) > 0:
sb = sorted(bboxes, key = box_area, reverse = True)[:c['bbox_union_count']]
bbox = sb[0]
for b in sb[1:]:
bbox = union(bbox,b)
x, y, w, h = bbox
match_count = cv2.countNonZero(mask[y:y+h,x:x+w])
if (
match_count > c['pixel_count_threshold'] and
w >= c['min_size'] and h >= c['min_size'] and
w <= c['max_size'] and h <= c['max_size']
):
# print ("%s match count: %d; [%d %d %d %d]" % (c['artefact_name'], match_count, x, y, w, h))
objects_detected.append(c['artefact_name'])
if self.detect_sequences[c['artefact_name']] < c['subsequent_detects_required']:
# do not act until you have detections in a row
self.detect_sequences[c['artefact_name']] += 1
else:
if self.estimate_distance:
disp = self.stereo_calc.compute(limg_rgb, rimg_rgb).astype(np.float32) / 16.0
points = cv2.reprojectImageTo3D(disp, self.Q)
matching_points = points[mask != 0]
distances = matching_points[:,2] # third column are Z coords (distances)
mean = np.mean(distances)
sd = np.std(distances)
distances_clean = [x for x in distances if mean - 2 * sd < x < mean + 2 * sd]
#print("Artf distance: min %.1f median: %.1f" % (min(distances), median(distances)))
if len(distances_clean) == 0:
distances_clean = distances
# print("Artf cleaned: min %.1f median: %.1f" % (min(final_list), median(final_list)))
dist = max(0.0, min(distances_clean)) # subtract about half length of the rover
else:
dist = 0.0
results.append((c['artefact_name | self.right_image = data | random_line_split |
artifacts.py | :
pass
def detect_and_publish(self, left_image, right_image):
results = self.detect(left_image, right_image)
for r in results:
self.publish('artf', r)
def detect(self, left_image, right_image):
results = []
limg = cv2.imdecode(np.frombuffer(left_image, dtype=np.uint8), cv2.IMREAD_COLOR)
rimg = cv2.imdecode(np.frombuffer(right_image, dtype=np.uint8), cv2.IMREAD_COLOR)
if self.width is None:
self.stdout('Image resolution', limg.shape)
self.width = limg.shape[1]
assert self.width == limg.shape[1], (self.width, limg.shape[1])
def box_area(b):
return b[2]*b[3]
limg_rgb = cv2.cvtColor(limg, cv2.COLOR_BGR2RGB)
rimg_rgb = cv2.cvtColor(rimg, cv2.COLOR_BGR2RGB)
hsv = cv2.cvtColor(limg, cv2.COLOR_BGR2HSV)
hsv_blurred = cv2.medianBlur(hsv,5) # some frames have noise, need to blur otherwise threshold doesn't work
objects_detected = []
for c in self.detectors:
if c['artefact_name'] not in self.look_for_artefacts:
continue
if c['artefact_name'] not in self.detect_sequences:
self.detect_sequences[c['artefact_name']] = 0
if c['detector_type'] == 'colormatch':
lower_hue = np.array([c['hue_match'] - c['hue_max_difference'],50,50])
upper_hue = np.array([c['hue_match'] + c['hue_max_difference'],255,255])
# Threshold the HSV image to get only the matching colors
mask = cv2.inRange(hsv_blurred, lower_hue, upper_hue)
if c['mask'] is not None:
m = np.zeros([CAMERA_HEIGHT,CAMERA_WIDTH], dtype=np.uint8)
m[c['mask'][0]:c['mask'][1],c['mask'][2]:c['mask'][3]] = 255
mask &= m
bboxes = []
contours = cv2.findContours(mask, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
contours = contours[0] if len(contours) == 2 else contours[1]
for cont in contours:
contours_poly = cv2.approxPolyDP(cont, 3, True)
x,y,w,h = cv2.boundingRect(contours_poly)
if w > 1 or h > 1: # ignore isolated pixels
bboxes.append([int(x),int(y),int(w),int(h)])
if len(bboxes) > 0:
sb = sorted(bboxes, key = box_area, reverse = True)[:c['bbox_union_count']]
bbox = sb[0]
for b in sb[1:]:
bbox = union(bbox,b)
x, y, w, h = bbox
match_count = cv2.countNonZero(mask[y:y+h,x:x+w])
if (
match_count > c['pixel_count_threshold'] and
w >= c['min_size'] and h >= c['min_size'] and
w <= c['max_size'] and h <= c['max_size']
):
# print ("%s match count: %d; [%d %d %d %d]" % (c['artefact_name'], match_count, x, y, w, h))
objects_detected.append(c['artefact_name'])
if self.detect_sequences[c['artefact_name']] < c['subsequent_detects_required']:
# do not act until you have detections in a row
self.detect_sequences[c['artefact_name']] += 1
else:
if self.estimate_distance:
disp = self.stereo_calc.compute(limg_rgb, rimg_rgb).astype(np.float32) / 16.0
points = cv2.reprojectImageTo3D(disp, self.Q)
matching_points = points[mask != 0]
distances = matching_points[:,2] # third column are Z coords (distances)
mean = np.mean(distances)
sd = np.std(distances)
distances_clean = [x for x in distances if mean - 2 * sd < x < mean + 2 * sd]
#print("Artf distance: min %.1f median: %.1f" % (min(distances), median(distances)))
if len(distances_clean) == 0:
distances_clean = distances
# print("Artf cleaned: min %.1f median: %.1f" % (min(final_list), median(final_list)))
dist = max(0.0, min(distances_clean)) # subtract about half length of the rover
else:
dist = 0.0
results.append((c['artefact_name'], int(x), int(y), int(w), int(h), int(match_count), float(dist)))
if c['detector_type'] == 'classifier':
lfound = c['classifier'].detectMultiScale(limg_rgb, minSize =(c['min_size'], c['min_size']), maxSize =(c['max_size'], c['max_size']))
rfound = c['classifier'].detectMultiScale(rimg_rgb, minSize =(c['min_size'], c['min_size']), maxSize =(c['max_size'], c['max_size']))
if len(lfound) > 0 and len(rfound) > 0: # only report if both cameras see it
objects_detected.append(c['artefact_name'])
if self.detect_sequences[c['artefact_name']] < c['subsequent_detects_required']: # do not act until you have detections in a row
self.detect_sequences[c['artefact_name']] += 1
else:
# TODO: tweak the filtering (blur and threshold), sometimes not all background is filtered out and the bbox looks bigger than it should be
x,y,width,height = lfound[0]
# print(self.time, "Pre: %d %d %d %d" % (x,y,width,height))
gray = cv2.cvtColor(limg_rgb[y:y+height, x:x+width], cv2.COLOR_BGR2GRAY)
blur = cv2.medianBlur(gray,3) # some frames have noise, need to blur otherwise threshold doesn't work
th, threshed = cv2.threshold(blur, 30, 255, cv2.THRESH_BINARY)
coords = cv2.findNonZero(threshed)
nonzerocount = cv2.countNonZero(threshed)
nx, ny, nw, nh = cv2.boundingRect(coords)
# print(self.time, "Post: %d %d %d %d" % (x+nx,y+ny,nw,nh))
results.append((c['artefact_name'], int(x+nx), int(y+ny), int(nw), int(nh), int(nonzerocount)))
for artefact_name in self.detect_sequences.keys():
if artefact_name not in objects_detected:
self.detect_sequences[artefact_name] = 0
return results
def debug2dir(filename, out_dir):
from osgar.logger import LogReader, lookup_stream_names
from osgar.lib.serialize import deserialize
names = lookup_stream_names(filename)
assert 'detector.debug_artf' in names, names
assert 'detector.artf' in names, names
assert 'rosmsg.sim_time_sec' in names, names
image_id = names.index('detector.debug_artf') + 1
artf_id = names.index('detector.artf') + 1
sim_sec_id = names.index('rosmsg.sim_time_sec') + 1
sim_time_sec = None
image = None
artf = None
for dt, channel, data in LogReader(filename, only_stream_id=[image_id, artf_id, sim_sec_id]):
data = deserialize(data)
if channel == sim_sec_id:
sim_time_sec = data
elif channel == image_id:
image = data
assert artf is not None
time_sec = sim_time_sec if sim_time_sec is not None else int(dt.total_seconds())
name = os.path.basename(filename)[:-4] + '-' + artf[0] + '-' + str(time_sec) + '.jpg'
print(name)
with open(os.path.join(out_dir, name), 'wb') as f:
f.write(image)
elif channel == artf_id:
artf = data
if __name__ == '__main__':
| from unittest.mock import MagicMock
from queue import Queue
import argparse
import datetime
import sys
from osgar.bus import Bus
parser = argparse.ArgumentParser(description='Run artifact detection and classification for given JPEG image')
parser.add_argument('filename', help='JPEG filename')
parser.add_argument('--debug2dir', help='dump clasified debug images into directory')
parser.add_argument('-v', '--verbose', help='verbose mode', action='store_true')
args = parser.parse_args()
if args.debug2dir is not None:
debug2dir(args.filename, args.debug2dir)
sys.exit()
with open(args.filename.replace('.npz', '.jpg'), 'rb') as f:
jpeg_data = f.read()
| conditional_block | |
main.go | () {
// Create and run application
app.Create().Run()
}
//package main
//
//
//import (
// "flag"
// "github.com/SynthBrain/synthBrain/baseStruct"
// "github.com/SynthBrain/synthBrain/myGui"
// "github.com/g3n/engine/app"
// "github.com/g3n/engine/camera"
// "github.com/g3n/engine/core"
// "github.com/g3n/engine/geometry"
// "github.com/g3n/engine/gls"
// "github.com/g3n/engine/graphic"
// "github.com/g3n/engine/gui"
// "github.com/g3n/engine/light"
// "github.com/g3n/engine/material"
// "github.com/g3n/engine/math32"
// "github.com/g3n/engine/renderer"
//
// "github.com/g3n/engine/util/helper"
// "github.com/g3n/engine/util/logger"
// "github.com/g3n/engine/window"
//
// "time"
//)
//
//var (
// // TODO uncomment and implement usage of the following flags
// //oFullScreen = flag.Bool("fullscreen", false, "Starts application with full screen")
// //oSwapInterval = flag.Int("swapinterval", -1, "Sets the swap buffers interval to this value")
// oHideFPS = flag.Bool("hidefps", false, "Do now show calculated FPS in the GUI")
// oUpdateFPS = flag.Uint("updatefps", 1000, "Time interval in milliseconds to update the FPS in the GUI")
// oTargetFPS = flag.Uint("targetfps", 60, "Sets the frame rate in frames per second")
// oNoglErrors = flag.Bool("noglerrors", false, "Do not check OpenGL errors at each call (may increase FPS)")
// oCpuProfile = flag.String("cpuprofile", "", "Activate cpu profiling writing profile to the specified file")
// oExecTrace = flag.String("exectrace", "", "Activate execution tracer writing data to the specified file")
// oNogui = flag.Bool("nogui", false, "Do not show the GUI, only the specified demo")
// oLogs = flag.String("logs", "", "Set log levels for packages. Ex: gui:debug,gls:info")
// oStats = flag.Bool("stats", false, "Shows statistics control panel in the GUI")
// oRenderStats = flag.Bool("renderstats", false, "Shows gui renderer statistics in the console")
//)
//
//var log *logger.Logger
//
///*
// Рисовать только тех что имеют достаточный уровень активность и окончательно не затухли
//*/
//func main() {
// // Create application and scene
// app := app.App()
// scene := core.NewNode()
//
// // Set the scene to be managed by the gui manager
// gui.Manager().Set(scene)
//
// // Create SynthBrain struct
// synB := new(baseStruct.SynthBrain)
// //frameRater := synB.FrameRater
//
// // Create perspective camera
// cam := camera.New(1)
// cam.SetPosition(0, 0, 3)
// scene.Add(cam)
//
// // Set up orbit control for the camera
// camera.NewOrbitControl(cam)
//
// // Set up callback to update viewport and camera aspect ratio when the window is resized
// onResize := func(evname string, ev interface{}) {
// // Get framebuffer size and update viewport accordingly
// width, height := app.GetSize()
// app.Gls().Viewport(0, 0, int32(width), int32(height))
// // Update the camera's aspect ratio
// cam.SetAspect(float32(width) / float32(height))
// }
// app.Subscribe(window.OnWindowSize, onResize)
// onResize("", nil)
//
// // Create a blue torus and add it to the scene
// //geom := geometry.NewTorus(1, .4, 12, 32, math32.Pi*2)
// geom := geometry.NewTorus(0, .4, 3, 3, math32.Pi*2)
// mat := material.NewStandard(math32.NewColor("DarkBlue"))
// mesh := graphic.NewMesh(geom, mat)
// scene.Add(mesh)
//
// // Create and add a button to the scene
// btn := gui.NewButton("Make Red")
// btn.SetPosition(30, 40)
// btn.SetSize(40, 40)
// btn.Subscribe(gui.OnClick, func(name string, ev interface{}) {
// mat.SetColor(math32.NewColor("DarkRed"))
// })
// scene.Add(btn)
// // Create and add a button to the scene
// btn1 := gui.NewButton("Make Blue")
// btn1.SetPosition(30, 90)
// btn1.SetSize(40, 40)
// btn1.Subscribe(gui.OnClick, func(name string, ev interface{}) {
// mat.SetColor(math32.NewColor("DarkBlue"))
// })
// scene.Add(btn1)
// // Create and add a button to the scene
// onOff := false
// chOnOffFlag := make(chan bool, 1)
// exit := myGui.Exit(30, 240, &onOff, app, chOnOffFlag)
// scene.Add(exit)
//
// // Create and add a button to the scene
// lbl := gui.NewLabel("FPS: ")
// lbl.SetPosition(10, 10)
// lbl.SetPaddings(2, 2, 2, 2)
// scene.Add(lbl)
//
// // Create and add lights to the scene
// scene.Add(light.NewAmbient(&math32.Color{1.0, 1.0, 1.0}, 0.8))
// pointLight := light.NewPoint(&math32.Color{1, 1, 1}, 5.0)
// pointLight.SetPosition(1, 0, 2)
// scene.Add(pointLight)
//
// // Create and add an axis helper to the scene
// scene.Add(helper.NewAxes(0.5))
//
// // Set background color to gray
// app.Gls().ClearColor(0.5, 0.5, 0.5, 1.0)
//
// //synB.InitLevel(0)
//
// now := time.Now()
// newNow := time.Now()
// //log.Info("Starting Render Loop")
// //Run the application
// app.Run(func(renderer *renderer.Renderer, deltaTime time.Duration) {
// app.Gls().Clear(gls.DEPTH_BUFFER_BIT | gls.STENCIL_BUFFER_BIT | gls.COLOR_BUFFER_BIT)
// newNow = time.Now()
// timeDelta := now.Sub(newNow)
// now = newNow
// //fps, pfps, _ := frameRater.FPS(time.Duration(*oUpdateFPS) * time.Millisecond)
// //
// //lbl.SetText("FPS: ")
// //lbl.SetText("FPS: " + fmt.Sprintf("%3.1f / %3.1f", fps, pfps) )
// synB.Update(timeDelta.Seconds())
// renderer.Render(scene, cam)
// })
//
// // ABROAD**********************************************************************************************
//
// //// OpenGL functions must be executed in the same thread where
// //// the context was created (by window.New())
// //runtime.LockOSThread()
// //
// //// Parse command line flags
// //showLog := flag.Bool("debug", false, "display the debug log")
// //flag.Parse()
// //
// //// Create logger
// //log = logger.New("SynthBrain", nil)
// //log.AddWriter(logger.NewConsole(false))
// //log.SetFormat(logger.FTIME | logger.FMICROS)
// //if *showLog == true {
// // log.SetLevel(logger.DEBUG)
// //} else {
// // log.SetLevel(logger.INFO)
// //}
// //log.Info("Initializing SynthBrain")
// //
// //// Create SynthBrain struct
// //synB := new(baseStruct.SynthBrain)
// //
// //// Manually scan the $GOPATH directories to find the data directory
// //rawPaths := os.Getenv("GOPATH")
// //paths := strings.Split(rawPaths, ":")
// //for _, j := range paths {
// // // Checks data path
// // path := filepath.Join(j, "src", "github.com", "SynthBrain", "synthBrain")
// // if _, err := os.Stat(path); err == nil {
// // synB.DataDir = path
// // }
// //}
// //
// //// Get the window manager
// //var err error
// //synB.Wmgr, err = window.Manager("glfw")
// //if err != nil {
// // panic(err)
// //}
// //
// //// Create window and OpenGL context
// //synB.Win, err = synB.Wmgr.CreateWindow(900, 640, "SynthBrain", false)
// //if err != nil {
// // panic(err)
// //}
// //
// //// Create OpenGL state
// //synB.Gs, err | main | identifier_name | |
main.go | // app := app.App()
// scene := core.NewNode()
//
// // Set the scene to be managed by the gui manager
// gui.Manager().Set(scene)
//
// // Create SynthBrain struct
// synB := new(baseStruct.SynthBrain)
// //frameRater := synB.FrameRater
//
// // Create perspective camera
// cam := camera.New(1)
// cam.SetPosition(0, 0, 3)
// scene.Add(cam)
//
// // Set up orbit control for the camera
// camera.NewOrbitControl(cam)
//
// // Set up callback to update viewport and camera aspect ratio when the window is resized
// onResize := func(evname string, ev interface{}) {
// // Get framebuffer size and update viewport accordingly
// width, height := app.GetSize()
// app.Gls().Viewport(0, 0, int32(width), int32(height))
// // Update the camera's aspect ratio
// cam.SetAspect(float32(width) / float32(height))
// }
// app.Subscribe(window.OnWindowSize, onResize)
// onResize("", nil)
//
// // Create a blue torus and add it to the scene
// //geom := geometry.NewTorus(1, .4, 12, 32, math32.Pi*2)
// geom := geometry.NewTorus(0, .4, 3, 3, math32.Pi*2)
// mat := material.NewStandard(math32.NewColor("DarkBlue"))
// mesh := graphic.NewMesh(geom, mat)
// scene.Add(mesh)
//
// // Create and add a button to the scene
// btn := gui.NewButton("Make Red")
// btn.SetPosition(30, 40)
// btn.SetSize(40, 40)
// btn.Subscribe(gui.OnClick, func(name string, ev interface{}) {
// mat.SetColor(math32.NewColor("DarkRed"))
// })
// scene.Add(btn)
// // Create and add a button to the scene
// btn1 := gui.NewButton("Make Blue")
// btn1.SetPosition(30, 90)
// btn1.SetSize(40, 40)
// btn1.Subscribe(gui.OnClick, func(name string, ev interface{}) {
// mat.SetColor(math32.NewColor("DarkBlue"))
// })
// scene.Add(btn1)
// // Create and add a button to the scene
// onOff := false
// chOnOffFlag := make(chan bool, 1)
// exit := myGui.Exit(30, 240, &onOff, app, chOnOffFlag)
// scene.Add(exit)
//
// // Create and add a button to the scene
// lbl := gui.NewLabel("FPS: ")
// lbl.SetPosition(10, 10)
// lbl.SetPaddings(2, 2, 2, 2)
// scene.Add(lbl)
//
// // Create and add lights to the scene
// scene.Add(light.NewAmbient(&math32.Color{1.0, 1.0, 1.0}, 0.8))
// pointLight := light.NewPoint(&math32.Color{1, 1, 1}, 5.0)
// pointLight.SetPosition(1, 0, 2)
// scene.Add(pointLight)
//
// // Create and add an axis helper to the scene
// scene.Add(helper.NewAxes(0.5))
//
// // Set background color to gray
// app.Gls().ClearColor(0.5, 0.5, 0.5, 1.0)
//
// //synB.InitLevel(0)
//
// now := time.Now()
// newNow := time.Now()
// //log.Info("Starting Render Loop")
// //Run the application
// app.Run(func(renderer *renderer.Renderer, deltaTime time.Duration) {
// app.Gls().Clear(gls.DEPTH_BUFFER_BIT | gls.STENCIL_BUFFER_BIT | gls.COLOR_BUFFER_BIT)
// newNow = time.Now()
// timeDelta := now.Sub(newNow)
// now = newNow
// //fps, pfps, _ := frameRater.FPS(time.Duration(*oUpdateFPS) * time.Millisecond)
// //
// //lbl.SetText("FPS: ")
// //lbl.SetText("FPS: " + fmt.Sprintf("%3.1f / %3.1f", fps, pfps) )
// synB.Update(timeDelta.Seconds())
// renderer.Render(scene, cam)
// })
//
// // ABROAD**********************************************************************************************
//
// //// OpenGL functions must be executed in the same thread where
// //// the context was created (by window.New())
// //runtime.LockOSThread()
// //
// //// Parse command line flags
// //showLog := flag.Bool("debug", false, "display the debug log")
// //flag.Parse()
// //
// //// Create logger
// //log = logger.New("SynthBrain", nil)
// //log.AddWriter(logger.NewConsole(false))
// //log.SetFormat(logger.FTIME | logger.FMICROS)
// //if *showLog == true {
// // log.SetLevel(logger.DEBUG)
// //} else {
// // log.SetLevel(logger.INFO)
// //}
// //log.Info("Initializing SynthBrain")
// //
// //// Create SynthBrain struct
// //synB := new(baseStruct.SynthBrain)
// //
// //// Manually scan the $GOPATH directories to find the data directory
// //rawPaths := os.Getenv("GOPATH")
// //paths := strings.Split(rawPaths, ":")
// //for _, j := range paths {
// // // Checks data path
// // path := filepath.Join(j, "src", "github.com", "SynthBrain", "synthBrain")
// // if _, err := os.Stat(path); err == nil {
// // synB.DataDir = path
// // }
// //}
// //
// //// Get the window manager
// //var err error
// //synB.Wmgr, err = window.Manager("glfw")
// //if err != nil {
// // panic(err)
// //}
// //
// //// Create window and OpenGL context
// //synB.Win, err = synB.Wmgr.CreateWindow(900, 640, "SynthBrain", false)
// //if err != nil {
// // panic(err)
// //}
// //
// //// Create OpenGL state
// //synB.Gs, err = gls.New()
// //if err != nil {
// // panic(err)
// //}
// //
// //// Speed up a bit by not checking OpenGL errors
// //synB.Gs.SetCheckErrors(false)
// //
// //// Sets window background color
// //synB.Gs.ClearColor(0, 0.2, 0.4, 1) //(0.1, 0.1, 0.1, 1.0)
// //
// //// Sets the OpenGL viewport size the same as the window size
// //// This normally should be updated if the window is resized.
// //width, height := synB.Win.Size()
// //synB.Gs.Viewport(0, 0, int32(width), int32(height))
// //
// //// Creates GUI root panel
// //synB.Root = gui.NewRoot(synB.Gs, synB.Win)
// //synB.Root.SetSize(float32(width), float32(height))
// //
// //// Update window if resize
// //synB.Win.Subscribe(window.OnWindowSize, func(evname string, ev interface{}) {
// // width, height := synB.Win.Size()
// // synB.Gs.Viewport(0, 0, int32(width), int32(height))
// // synB.Root.SetSize(float32(width), float32(height))
// // aspect := float32(width) / float32(height)
// // synB.Camera.SetAspect(aspect)
// //})
// //
// ////add GUI*********************************************************
// //// Create and add a label to the root panel
// //synB.LabelFps = myGui.LabelFps(10, 10, "240")
// //synB.Root.Add(synB.LabelFps)
// //
// //// Create and add button 1 to the root panel
// //onOff := false
// //chOnOffFlag := make(chan bool, 1)
// //synB.WebCam = myGui.WebCam(10, 40, &onOff, chOnOffFlag)
// //synB.Root.Add(synB.WebCam)
// //
// //// Create and add exit button to the root panel
// //synB.Exit = myGui.Exit(10, 70, &onOff, synB.Win, chOnOffFlag)
// //synB.Root.Add(synB.Exit)
// ////****************************************************************
// //
// //// Creates a renderer and adds default shaders
// //synB.Renderer = renderer.NewRenderer(syn | // // Create application and scene | random_line_split | |
main.go |
//package main
//
//
//import (
// "flag"
// "github.com/SynthBrain/synthBrain/baseStruct"
// "github.com/SynthBrain/synthBrain/myGui"
// "github.com/g3n/engine/app"
// "github.com/g3n/engine/camera"
// "github.com/g3n/engine/core"
// "github.com/g3n/engine/geometry"
// "github.com/g3n/engine/gls"
// "github.com/g3n/engine/graphic"
// "github.com/g3n/engine/gui"
// "github.com/g3n/engine/light"
// "github.com/g3n/engine/material"
// "github.com/g3n/engine/math32"
// "github.com/g3n/engine/renderer"
//
// "github.com/g3n/engine/util/helper"
// "github.com/g3n/engine/util/logger"
// "github.com/g3n/engine/window"
//
// "time"
//)
//
//var (
// // TODO uncomment and implement usage of the following flags
// //oFullScreen = flag.Bool("fullscreen", false, "Starts application with full screen")
// //oSwapInterval = flag.Int("swapinterval", -1, "Sets the swap buffers interval to this value")
// oHideFPS = flag.Bool("hidefps", false, "Do now show calculated FPS in the GUI")
// oUpdateFPS = flag.Uint("updatefps", 1000, "Time interval in milliseconds to update the FPS in the GUI")
// oTargetFPS = flag.Uint("targetfps", 60, "Sets the frame rate in frames per second")
// oNoglErrors = flag.Bool("noglerrors", false, "Do not check OpenGL errors at each call (may increase FPS)")
// oCpuProfile = flag.String("cpuprofile", "", "Activate cpu profiling writing profile to the specified file")
// oExecTrace = flag.String("exectrace", "", "Activate execution tracer writing data to the specified file")
// oNogui = flag.Bool("nogui", false, "Do not show the GUI, only the specified demo")
// oLogs = flag.String("logs", "", "Set log levels for packages. Ex: gui:debug,gls:info")
// oStats = flag.Bool("stats", false, "Shows statistics control panel in the GUI")
// oRenderStats = flag.Bool("renderstats", false, "Shows gui renderer statistics in the console")
//)
//
//var log *logger.Logger
//
///*
// Рисовать только тех что имеют достаточный уровень активность и окончательно не затухли
//*/
//func main() {
// // Create application and scene
// app := app.App()
// scene := core.NewNode()
//
// // Set the scene to be managed by the gui manager
// gui.Manager().Set(scene)
//
// // Create SynthBrain struct
// synB := new(baseStruct.SynthBrain)
// //frameRater := synB.FrameRater
//
// // Create perspective camera
// cam := camera.New(1)
// cam.SetPosition(0, 0, 3)
// scene.Add(cam)
//
// // Set up orbit control for the camera
// camera.NewOrbitControl(cam)
//
// // Set up callback to update viewport and camera aspect ratio when the window is resized
// onResize := func(evname string, ev interface{}) {
// // Get framebuffer size and update viewport accordingly
// width, height := app.GetSize()
// app.Gls().Viewport(0, 0, int32(width), int32(height))
// // Update the camera's aspect ratio
// cam.SetAspect(float32(width) / float32(height))
// }
// app.Subscribe(window.OnWindowSize, onResize)
// onResize("", nil)
//
// // Create a blue torus and add it to the scene
// //geom := geometry.NewTorus(1, .4, 12, 32, math32.Pi*2)
// geom := geometry.NewTorus(0, .4, 3, 3, math32.Pi*2)
// mat := material.NewStandard(math32.NewColor("DarkBlue"))
// mesh := graphic.NewMesh(geom, mat)
// scene.Add(mesh)
//
// // Create and add a button to the scene
// btn := gui.NewButton("Make Red")
// btn.SetPosition(30, 40)
// btn.SetSize(40, 40)
// btn.Subscribe(gui.OnClick, func(name string, ev interface{}) {
// mat.SetColor(math32.NewColor("DarkRed"))
// })
// scene.Add(btn)
// // Create and add a button to the scene
// btn1 := gui.NewButton("Make Blue")
// btn1.SetPosition(30, 90)
// btn1.SetSize(40, 40)
// btn1.Subscribe(gui.OnClick, func(name string, ev interface{}) {
// mat.SetColor(math32.NewColor("DarkBlue"))
// })
// scene.Add(btn1)
// // Create and add a button to the scene
// onOff := false
// chOnOffFlag := make(chan bool, 1)
// exit := myGui.Exit(30, 240, &onOff, app, chOnOffFlag)
// scene.Add(exit)
//
// // Create and add a button to the scene
// lbl := gui.NewLabel("FPS: ")
// lbl.SetPosition(10, 10)
// lbl.SetPaddings(2, 2, 2, 2)
// scene.Add(lbl)
//
// // Create and add lights to the scene
// scene.Add(light.NewAmbient(&math32.Color{1.0, 1.0, 1.0}, 0.8))
// pointLight := light.NewPoint(&math32.Color{1, 1, 1}, 5.0)
// pointLight.SetPosition(1, 0, 2)
// scene.Add(pointLight)
//
// // Create and add an axis helper to the scene
// scene.Add(helper.NewAxes(0.5))
//
// // Set background color to gray
// app.Gls().ClearColor(0.5, 0.5, 0.5, 1.0)
//
// //synB.InitLevel(0)
//
// now := time.Now()
// newNow := time.Now()
// //log.Info("Starting Render Loop")
// //Run the application
// app.Run(func(renderer *renderer.Renderer, deltaTime time.Duration) {
// app.Gls().Clear(gls.DEPTH_BUFFER_BIT | gls.STENCIL_BUFFER_BIT | gls.COLOR_BUFFER_BIT)
// newNow = time.Now()
// timeDelta := now.Sub(newNow)
// now = newNow
// //fps, pfps, _ := frameRater.FPS(time.Duration(*oUpdateFPS) * time.Millisecond)
// //
// //lbl.SetText("FPS: ")
// //lbl.SetText("FPS: " + fmt.Sprintf("%3.1f / %3.1f", fps, pfps) )
// synB.Update(timeDelta.Seconds())
// renderer.Render(scene, cam)
// })
//
// // ABROAD**********************************************************************************************
//
// //// OpenGL functions must be executed in the same thread where
// //// the context was created (by window.New())
// //runtime.LockOSThread()
// //
// //// Parse command line flags
// //showLog := flag.Bool("debug", false, "display the debug log")
// //flag.Parse()
// //
// //// Create logger
// //log = logger.New("SynthBrain", nil)
// //log.AddWriter(logger.NewConsole(false))
// //log.SetFormat(logger.FTIME | logger.FMICROS)
// //if *showLog == true {
// // log.SetLevel(logger.DEBUG)
// //} else {
// // log.SetLevel(logger.INFO)
// //}
// //log.Info("Initializing SynthBrain")
// //
// //// Create SynthBrain struct
// //synB := new(baseStruct.SynthBrain)
// //
// //// Manually scan the $GOPATH directories to find the data directory
// //rawPaths := os.Getenv("GOPATH")
// //paths := strings.Split(rawPaths, ":")
// //for _, j := range paths {
// // // Checks data path
// // path := filepath.Join(j, "src", "github.com", "SynthBrain", "synthBrain")
// // if _, err := os.Stat(path); err == nil {
// // synB.DataDir = path
// // }
// //}
// //
// //// Get the window manager
// //var err error
// //synB.Wmgr, err = window.Manager("glfw")
// //if err != nil {
// // panic(err)
// //}
// //
// //// Create window and OpenGL context
// //synB.Win, err = synB.Wmgr.CreateWindow(900, 640, "SynthBrain", false)
// //if err != nil {
// // panic(err)
// //}
// //
// //// Create OpenGL state
// //synB.Gs, err = | {
// Create and run application
app.Create().Run()
} | identifier_body | |
thm.py | .levels = sorted(levels, reverse=True)
m = self._get_m(levels)
# pyre-fixme[4]: Attribute must be annotated.
self.m = m
# pyre-fixme[4]: Attribute must be annotated.
self.freq = {k: int(m / k) for k in self.levels}
self.baseModels = baseModels
# pyre-fixme[4]: Attribute must be annotated.
self.info_fcsts = {}
# pyre-fixme[4]: Attribute must be annotated.
self.info_residuals = {}
def _get_m(self, ks: List[int]) -> int:
"""Calculate m.
m is the minimum common multiple of all levels.
Args:
ks: the list of integers representing all the levels.
Returns:
An integer representing the minimum common multiple.
"""
base = 1
for c in ks:
base = base * c // gcd(base, c)
return base
def fit(self) -> None:
"""Fit all base models.
If base model only has residuals and forecasts, store the information.
"""
levels = self.levels
TSs = GetAggregateTS(self.data).aggregate(levels)
models = {}
residuals = {}
fcsts = {}
for bm in self.baseModels:
model_name = bm.model_name
if model_name is None: # only residuals and fcsts are provided
models[bm.level] = None
residuals[bm.level] = bm.residuals
fcsts[bm.level] = bm.fcsts
else:
m = BASE_MODELS[model_name](
data=TSs[bm.level],
params=bm.model_params,
)
m.fit()
models[bm.level] = m
self.models = models
self.info_fcsts = fcsts
self.info_residuals = residuals
def get_S(self) -> np.ndarray:
"""Calculate S matrix.
Returns:
A np.array representing the S matrix.
"""
ans = []
levels = self.levels
m = self.m
for k in levels:
for i in range(self.freq[k]):
tem = np.zeros(m)
tem[(i * k) : (i * k + k)] = 1.0
ans.append(tem)
return np.row_stack(ans)
def _aggregate_data(self, data: np.ndarray, k: int) -> np.ndarray:
"""Aggregate data according to level k."""
if k == 1:
return data
n = len(data)
h = n // k
return (data[: int(h * k)]).reshape(-1, k).sum(axis=1)
# pyre-fixme[24]: Generic type `Model` expects 1 type parameter.
def _get_residuals(self, model: Model) -> np.ndarray:
"""Calculate residuals of each base model.
Args:
model: a callable model object representing the trained base model.
Returns:
A np.ndarray of residuals.
"""
try:
# pyre-fixme[16]: `Model` has no attribute `model`.
return model.model.resid.values
except Exception:
fcst = model.predict(steps=1, freq="D", include_history=True)
# pyre-fixme[16]: `None` has no attribute `merge`.
# pyre-fixme[16]: `Optional` has no attribute `to_dataframe`.
merge = fcst.merge(model.data.to_dataframe(), on="time")
for col in merge.columns:
if col != "time" and ("fcst" not in col):
return merge[col].values - merge["fcst"].values
raise ValueError("Couldn't find residual or forecast values in model")
def _get_all_residuals(self) -> Dict[int, np.ndarray]:
"""
Calculate residuals for all base models.
Returns:
Dictionary for residuals, whose key is level and value is residual
array.
"""
residuals = self.residuals
# if residuals have not been calculated yet
if residuals is None:
levels = self.levels
models = self.models
residuals = {}
for k in levels:
# assert models is not None
# pyre-fixme[16]: `Optional` has no attribute `__getitem__`.
if models[k] is not None:
try:
vals = self._get_residuals(models[k])
except Exception as e:
msg = (
f"Failed to get residuals for level {k} with error "
f"message {e}."
)
raise _log_error(msg)
residuals[k] = vals
else:
residuals[k] = self.info_residuals[k]
self.residuals = residuals
return residuals
def _get_residual_matrix(self) -> np.ndarray:
"""
Reshape residuals into matrix format.
Returns:
Residual matrix.
"""
res_matrix = self.res_matrix
if res_matrix is None:
residuals = self._get_all_residuals()
ks = self.levels
freq = self.freq
h = np.min([len(residuals[k]) // freq[k] for k in ks])
res_matrix = []
for k in ks:
n = h * freq[k]
res_matrix.append(residuals[k][-n:].reshape(h, -1).T)
res_matrix = np.row_stack(res_matrix)
self.res_matrix = res_matrix
return res_matrix
def get_W(self, method: str = "struc", eps: float = 1e-5) -> np.ndarray:
| elif method == "svar":
residuals = self._get_all_residuals()
ans = []
for k in levels:
ans.extend([np.nanmean(np.square(residuals[k]))] * freq[k])
return np.array(ans) + eps
elif method == "hvar":
res_matrix = self._get_residual_matrix()
return np.nanvar(res_matrix, axis=1) + eps
elif method == "mint_shrink":
cov = np.cov(self._get_residual_matrix())
# get correlation matrix
sqrt = np.sqrt(np.diag(cov))
cor = (
(cov / sqrt).T
) / sqrt # due to symmetry, no need to transpose the matrix again.
mask = ~np.eye(cor.shape[0], dtype=bool)
cor = cor[mask]
lam = np.var(cor) / np.sum(cor**2)
lam = np.max([0, lam])
cov = np.diag(np.diag(cov)) * lam + (1.0 - lam) * cov
cov += np.eye(len(cov)) * eps
return cov
elif method == "mint_sample":
cov = np.cov(self._get_residual_matrix())
cov += np.eye(len(cov)) * eps
return cov
else:
raise _log_error(f"{method} is invalid for get_W() method.")
# pyre-fixme[2]: Parameter must be annotated.
def _predict_origin(self, steps: int, method="struc") -> Dict[int, np.ndarray]:
"""
Generate original forecasts from each base model (without time index).
Args:
steps: Number of forecasts for level 1.
methd: Reconciliation method.
Returns:
Dictionary of forecasts of each level, whose key is level and value
is forecast array.
"""
m = self.m
levels = self.levels
freq = self.freq
h = int(np.ceil(steps / m))
hf = steps // m
orig_fcst = {}
models = self.models
# generate forecasts for each level
for k in levels:
num = int(freq[k] * h)
# assert models is not None
# pyre-fixme[16]: `Optional` has no attribute `__getitem__`.
if models[k] is not None:
orig_fcst[k] = models[k].predict(steps=num, freq="D")["fcst"].values
else:
fcst_num = len(self.info_fcsts[k])
if fcst_num < num:
if fcst_num >= hf * freq[k]:
# since the final output only needs hf*freq[k] forecasts
# for level k, we pad the forecast array to desirable
# length. (note that the padding values would be ignored
# in the final output.)
orig_fcst[k] = np.concatenate(
[
self.info_fcsts[k],
[self.info_fcsts[k][-1]] * (num - fcst_num),
]
)
elif method == "bu" and k != 1:
# for 'bu' only level 1 is needed.
orig_fcst[k] = self.info_fcsts[k]
else:
msg = (
f | """
Calculate W matrix.
Args:
method: Reconciliation method for temporal hierarchical model. Valid
methods include 'struc', 'svar', 'hvar', 'mint_sample', and
'mint_shrink'.
eps: Epsilons added to W for numerical stability.
Returns:
W matrix. (If W is a diagnoal matrix, only returns its diagnoal elements).
"""
levels = self.levels
freq = self.freq
if method == "struc":
ans = []
for k in levels:
ans.extend([k] * freq[k])
return np.array(ans)
| identifier_body |
thm.py | "Base model should be a BaseTHModel object but is "
f"{type(basemodel)}."
)
raise _log_error(msg)
levels = [bm.level for bm in baseModels]
if 1 not in levels:
raise _log_error("Model of level 1 is missing.")
if len(levels) != len(set(levels)):
raise _log_error("One level cannot receive multiple models.")
# pyre-fixme[4]: Attribute must be annotated.
self.levels = sorted(levels, reverse=True)
m = self._get_m(levels)
# pyre-fixme[4]: Attribute must be annotated.
self.m = m
# pyre-fixme[4]: Attribute must be annotated.
self.freq = {k: int(m / k) for k in self.levels}
self.baseModels = baseModels
# pyre-fixme[4]: Attribute must be annotated.
self.info_fcsts = {}
# pyre-fixme[4]: Attribute must be annotated.
self.info_residuals = {}
def _get_m(self, ks: List[int]) -> int:
"""Calculate m.
m is the minimum common multiple of all levels.
Args:
ks: the list of integers representing all the levels.
Returns:
An integer representing the minimum common multiple.
"""
base = 1
for c in ks:
base = base * c // gcd(base, c)
return base
def fit(self) -> None:
"""Fit all base models.
If base model only has residuals and forecasts, store the information.
"""
levels = self.levels
TSs = GetAggregateTS(self.data).aggregate(levels)
models = {}
residuals = {}
fcsts = {}
for bm in self.baseModels:
model_name = bm.model_name
if model_name is None: # only residuals and fcsts are provided
models[bm.level] = None
residuals[bm.level] = bm.residuals
fcsts[bm.level] = bm.fcsts
else:
m = BASE_MODELS[model_name](
data=TSs[bm.level],
params=bm.model_params,
)
m.fit()
models[bm.level] = m
self.models = models
self.info_fcsts = fcsts
self.info_residuals = residuals
def get_S(self) -> np.ndarray:
"""Calculate S matrix.
Returns:
A np.array representing the S matrix.
"""
ans = []
levels = self.levels
m = self.m
for k in levels:
for i in range(self.freq[k]):
tem = np.zeros(m)
tem[(i * k) : (i * k + k)] = 1.0
ans.append(tem)
return np.row_stack(ans)
def _aggregate_data(self, data: np.ndarray, k: int) -> np.ndarray:
"""Aggregate data according to level k."""
if k == 1:
return data
n = len(data)
h = n // k
return (data[: int(h * k)]).reshape(-1, k).sum(axis=1)
# pyre-fixme[24]: Generic type `Model` expects 1 type parameter.
def _get_residuals(self, model: Model) -> np.ndarray:
"""Calculate residuals of each base model.
Args:
model: a callable model object representing the trained base model.
Returns:
A np.ndarray of residuals.
"""
try:
# pyre-fixme[16]: `Model` has no attribute `model`.
return model.model.resid.values
except Exception:
fcst = model.predict(steps=1, freq="D", include_history=True)
# pyre-fixme[16]: `None` has no attribute `merge`.
# pyre-fixme[16]: `Optional` has no attribute `to_dataframe`.
merge = fcst.merge(model.data.to_dataframe(), on="time")
for col in merge.columns:
if col != "time" and ("fcst" not in col):
return merge[col].values - merge["fcst"].values
raise ValueError("Couldn't find residual or forecast values in model")
def _get_all_residuals(self) -> Dict[int, np.ndarray]:
"""
Calculate residuals for all base models.
Returns:
Dictionary for residuals, whose key is level and value is residual
array.
"""
residuals = self.residuals
# if residuals have not been calculated yet
if residuals is None:
levels = self.levels
models = self.models
residuals = {}
for k in levels:
# assert models is not None
# pyre-fixme[16]: `Optional` has no attribute `__getitem__`.
if models[k] is not None:
try:
vals = self._get_residuals(models[k])
except Exception as e:
msg = (
f"Failed to get residuals for level {k} with error "
f"message {e}."
)
raise _log_error(msg)
residuals[k] = vals
else:
residuals[k] = self.info_residuals[k]
self.residuals = residuals
return residuals
def _get_residual_matrix(self) -> np.ndarray:
"""
Reshape residuals into matrix format.
Returns:
Residual matrix.
"""
res_matrix = self.res_matrix
if res_matrix is None:
residuals = self._get_all_residuals()
ks = self.levels
freq = self.freq
h = np.min([len(residuals[k]) // freq[k] for k in ks])
res_matrix = []
for k in ks:
n = h * freq[k]
res_matrix.append(residuals[k][-n:].reshape(h, -1).T)
res_matrix = np.row_stack(res_matrix)
self.res_matrix = res_matrix
return res_matrix
def get_W(self, method: str = "struc", eps: float = 1e-5) -> np.ndarray:
"""
Calculate W matrix.
Args:
method: Reconciliation method for temporal hierarchical model. Valid
methods include 'struc', 'svar', 'hvar', 'mint_sample', and
'mint_shrink'.
eps: Epsilons added to W for numerical stability.
Returns:
W matrix. (If W is a diagnoal matrix, only returns its diagnoal elements).
"""
levels = self.levels
freq = self.freq
if method == "struc":
ans = []
for k in levels:
ans.extend([k] * freq[k])
return np.array(ans)
elif method == "svar":
residuals = self._get_all_residuals()
ans = []
for k in levels:
ans.extend([np.nanmean(np.square(residuals[k]))] * freq[k])
return np.array(ans) + eps
elif method == "hvar":
res_matrix = self._get_residual_matrix()
return np.nanvar(res_matrix, axis=1) + eps
elif method == "mint_shrink":
cov = np.cov(self._get_residual_matrix())
# get correlation matrix
sqrt = np.sqrt(np.diag(cov))
cor = (
(cov / sqrt).T
) / sqrt # due to symmetry, no need to transpose the matrix again.
mask = ~np.eye(cor.shape[0], dtype=bool)
cor = cor[mask]
lam = np.var(cor) / np.sum(cor**2)
lam = np.max([0, lam])
cov = np.diag(np.diag(cov)) * lam + (1.0 - lam) * cov
cov += np.eye(len(cov)) * eps
return cov
elif method == "mint_sample":
cov = np.cov(self._get_residual_matrix())
cov += np.eye(len(cov)) * eps
return cov
else:
raise _log_error(f"{method} is invalid for get_W() method.")
# pyre-fixme[2]: Parameter must be annotated.
def _predict_origin(self, steps: int, method="struc") -> Dict[int, np.ndarray]:
"""
Generate original forecasts from each base model (without time index).
Args:
steps: Number of forecasts for level 1.
methd: Reconciliation method.
Returns:
Dictionary of forecasts of each level, whose key is level and value
is forecast array.
"""
m = self.m
levels = self.levels
freq = self.freq
h = int(np.ceil(steps / m))
hf = steps // m
orig_fcst = {}
models = self.models
# generate forecasts for each level
for k in levels:
num = int(freq[k] * h)
# assert models is not None
# pyre-fixme[16]: `Optional` has no attribute `__getitem__`.
if models[k] is not None:
orig_fcst[k] = models[k].predict(steps=num, freq="D")["fcst"].values
else:
fcst_num = len(self.info_fcsts[k])
if fcst_num < num:
if fcst_num >= hf * freq[k]:
# since the final output only needs hf*freq[k] forecasts
# for level k, we pad the forecast array to desirable
# length. (note | random_line_split | ||
thm.py | .levels = sorted(levels, reverse=True)
m = self._get_m(levels)
# pyre-fixme[4]: Attribute must be annotated.
self.m = m
# pyre-fixme[4]: Attribute must be annotated.
self.freq = {k: int(m / k) for k in self.levels}
self.baseModels = baseModels
# pyre-fixme[4]: Attribute must be annotated.
self.info_fcsts = {}
# pyre-fixme[4]: Attribute must be annotated.
self.info_residuals = {}
def _get_m(self, ks: List[int]) -> int:
"""Calculate m.
m is the minimum common multiple of all levels.
Args:
ks: the list of integers representing all the levels.
Returns:
An integer representing the minimum common multiple.
"""
base = 1
for c in ks:
base = base * c // gcd(base, c)
return base
def fit(self) -> None:
"""Fit all base models.
If base model only has residuals and forecasts, store the information.
"""
levels = self.levels
TSs = GetAggregateTS(self.data).aggregate(levels)
models = {}
residuals = {}
fcsts = {}
for bm in self.baseModels:
model_name = bm.model_name
if model_name is None: # only residuals and fcsts are provided
models[bm.level] = None
residuals[bm.level] = bm.residuals
fcsts[bm.level] = bm.fcsts
else:
m = BASE_MODELS[model_name](
data=TSs[bm.level],
params=bm.model_params,
)
m.fit()
models[bm.level] = m
self.models = models
self.info_fcsts = fcsts
self.info_residuals = residuals
def get_S(self) -> np.ndarray:
"""Calculate S matrix.
Returns:
A np.array representing the S matrix.
"""
ans = []
levels = self.levels
m = self.m
for k in levels:
for i in range(self.freq[k]):
tem = np.zeros(m)
tem[(i * k) : (i * k + k)] = 1.0
ans.append(tem)
return np.row_stack(ans)
def _aggregate_data(self, data: np.ndarray, k: int) -> np.ndarray:
"""Aggregate data according to level k."""
if k == 1:
return data
n = len(data)
h = n // k
return (data[: int(h * k)]).reshape(-1, k).sum(axis=1)
# pyre-fixme[24]: Generic type `Model` expects 1 type parameter.
def _get_residuals(self, model: Model) -> np.ndarray:
"""Calculate residuals of each base model.
Args:
model: a callable model object representing the trained base model.
Returns:
A np.ndarray of residuals.
"""
try:
# pyre-fixme[16]: `Model` has no attribute `model`.
return model.model.resid.values
except Exception:
fcst = model.predict(steps=1, freq="D", include_history=True)
# pyre-fixme[16]: `None` has no attribute `merge`.
# pyre-fixme[16]: `Optional` has no attribute `to_dataframe`.
merge = fcst.merge(model.data.to_dataframe(), on="time")
for col in merge.columns:
if col != "time" and ("fcst" not in col):
return merge[col].values - merge["fcst"].values
raise ValueError("Couldn't find residual or forecast values in model")
def _get_all_residuals(self) -> Dict[int, np.ndarray]:
"""
Calculate residuals for all base models.
Returns:
Dictionary for residuals, whose key is level and value is residual
array.
"""
residuals = self.residuals
# if residuals have not been calculated yet
if residuals is None:
levels = self.levels
models = self.models
residuals = {}
for k in levels:
# assert models is not None
# pyre-fixme[16]: `Optional` has no attribute `__getitem__`.
if models[k] is not None:
try:
vals = self._get_residuals(models[k])
except Exception as e:
msg = (
f"Failed to get residuals for level {k} with error "
f"message {e}."
)
raise _log_error(msg)
residuals[k] = vals
else:
residuals[k] = self.info_residuals[k]
self.residuals = residuals
return residuals
def _get_residual_matrix(self) -> np.ndarray:
"""
Reshape residuals into matrix format.
Returns:
Residual matrix.
"""
res_matrix = self.res_matrix
if res_matrix is None:
residuals = self._get_all_residuals()
ks = self.levels
freq = self.freq
h = np.min([len(residuals[k]) // freq[k] for k in ks])
res_matrix = []
for k in ks:
n = h * freq[k]
res_matrix.append(residuals[k][-n:].reshape(h, -1).T)
res_matrix = np.row_stack(res_matrix)
self.res_matrix = res_matrix
return res_matrix
def get_W(self, method: str = "struc", eps: float = 1e-5) -> np.ndarray:
"""
Calculate W matrix.
Args:
method: Reconciliation method for temporal hierarchical model. Valid
methods include 'struc', 'svar', 'hvar', 'mint_sample', and
'mint_shrink'.
eps: Epsilons added to W for numerical stability.
Returns:
W matrix. (If W is a diagnoal matrix, only returns its diagnoal elements).
"""
levels = self.levels
freq = self.freq
if method == "struc":
ans = []
for k in levels:
ans.extend([k] * freq[k])
return np.array(ans)
elif method == "svar":
residuals = self._get_all_residuals()
ans = []
for k in levels:
ans.extend([np.nanmean(np.square(residuals[k]))] * freq[k])
return np.array(ans) + eps
elif method == "hvar":
res_matrix = self._get_residual_matrix()
return np.nanvar(res_matrix, axis=1) + eps
elif method == "mint_shrink":
cov = np.cov(self._get_residual_matrix())
# get correlation matrix
sqrt = np.sqrt(np.diag(cov))
cor = (
(cov / sqrt).T
) / sqrt # due to symmetry, no need to transpose the matrix again.
mask = ~np.eye(cor.shape[0], dtype=bool)
cor = cor[mask]
lam = np.var(cor) / np.sum(cor**2)
lam = np.max([0, lam])
cov = np.diag(np.diag(cov)) * lam + (1.0 - lam) * cov
cov += np.eye(len(cov)) * eps
return cov
elif method == "mint_sample":
cov = np.cov(self._get_residual_matrix())
cov += np.eye(len(cov)) * eps
return cov
else:
raise _log_error(f"{method} is invalid for get_W() method.")
# pyre-fixme[2]: Parameter must be annotated.
def | (self, steps: int, method="struc") -> Dict[int, np.ndarray]:
"""
Generate original forecasts from each base model (without time index).
Args:
steps: Number of forecasts for level 1.
methd: Reconciliation method.
Returns:
Dictionary of forecasts of each level, whose key is level and value
is forecast array.
"""
m = self.m
levels = self.levels
freq = self.freq
h = int(np.ceil(steps / m))
hf = steps // m
orig_fcst = {}
models = self.models
# generate forecasts for each level
for k in levels:
num = int(freq[k] * h)
# assert models is not None
# pyre-fixme[16]: `Optional` has no attribute `__getitem__`.
if models[k] is not None:
orig_fcst[k] = models[k].predict(steps=num, freq="D")["fcst"].values
else:
fcst_num = len(self.info_fcsts[k])
if fcst_num < num:
if fcst_num >= hf * freq[k]:
# since the final output only needs hf*freq[k] forecasts
# for level k, we pad the forecast array to desirable
# length. (note that the padding values would be ignored
# in the final output.)
orig_fcst[k] = np.concatenate(
[
self.info_fcsts[k],
[self.info_fcsts[k][-1]] * (num - fcst_num),
]
)
elif method == "bu" and k != 1:
# for 'bu' only level 1 is needed.
orig_fcst[k] = self.info_fcsts[k]
else:
msg = (
f"{ | _predict_origin | identifier_name |
thm.py | .levels = sorted(levels, reverse=True)
m = self._get_m(levels)
# pyre-fixme[4]: Attribute must be annotated.
self.m = m
# pyre-fixme[4]: Attribute must be annotated.
self.freq = {k: int(m / k) for k in self.levels}
self.baseModels = baseModels
# pyre-fixme[4]: Attribute must be annotated.
self.info_fcsts = {}
# pyre-fixme[4]: Attribute must be annotated.
self.info_residuals = {}
def _get_m(self, ks: List[int]) -> int:
"""Calculate m.
m is the minimum common multiple of all levels.
Args:
ks: the list of integers representing all the levels.
Returns:
An integer representing the minimum common multiple.
"""
base = 1
for c in ks:
base = base * c // gcd(base, c)
return base
def fit(self) -> None:
"""Fit all base models.
If base model only has residuals and forecasts, store the information.
"""
levels = self.levels
TSs = GetAggregateTS(self.data).aggregate(levels)
models = {}
residuals = {}
fcsts = {}
for bm in self.baseModels:
model_name = bm.model_name
if model_name is None: # only residuals and fcsts are provided
models[bm.level] = None
residuals[bm.level] = bm.residuals
fcsts[bm.level] = bm.fcsts
else:
m = BASE_MODELS[model_name](
data=TSs[bm.level],
params=bm.model_params,
)
m.fit()
models[bm.level] = m
self.models = models
self.info_fcsts = fcsts
self.info_residuals = residuals
def get_S(self) -> np.ndarray:
"""Calculate S matrix.
Returns:
A np.array representing the S matrix.
"""
ans = []
levels = self.levels
m = self.m
for k in levels:
for i in range(self.freq[k]):
tem = np.zeros(m)
tem[(i * k) : (i * k + k)] = 1.0
ans.append(tem)
return np.row_stack(ans)
def _aggregate_data(self, data: np.ndarray, k: int) -> np.ndarray:
"""Aggregate data according to level k."""
if k == 1:
return data
n = len(data)
h = n // k
return (data[: int(h * k)]).reshape(-1, k).sum(axis=1)
# pyre-fixme[24]: Generic type `Model` expects 1 type parameter.
def _get_residuals(self, model: Model) -> np.ndarray:
"""Calculate residuals of each base model.
Args:
model: a callable model object representing the trained base model.
Returns:
A np.ndarray of residuals.
"""
try:
# pyre-fixme[16]: `Model` has no attribute `model`.
return model.model.resid.values
except Exception:
fcst = model.predict(steps=1, freq="D", include_history=True)
# pyre-fixme[16]: `None` has no attribute `merge`.
# pyre-fixme[16]: `Optional` has no attribute `to_dataframe`.
merge = fcst.merge(model.data.to_dataframe(), on="time")
for col in merge.columns:
if col != "time" and ("fcst" not in col):
return merge[col].values - merge["fcst"].values
raise ValueError("Couldn't find residual or forecast values in model")
def _get_all_residuals(self) -> Dict[int, np.ndarray]:
"""
Calculate residuals for all base models.
Returns:
Dictionary for residuals, whose key is level and value is residual
array.
"""
residuals = self.residuals
# if residuals have not been calculated yet
if residuals is None:
levels = self.levels
models = self.models
residuals = {}
for k in levels:
# assert models is not None
# pyre-fixme[16]: `Optional` has no attribute `__getitem__`.
if models[k] is not None:
try:
vals = self._get_residuals(models[k])
except Exception as e:
msg = (
f"Failed to get residuals for level {k} with error "
f"message {e}."
)
raise _log_error(msg)
residuals[k] = vals
else:
residuals[k] = self.info_residuals[k]
self.residuals = residuals
return residuals
def _get_residual_matrix(self) -> np.ndarray:
"""
Reshape residuals into matrix format.
Returns:
Residual matrix.
"""
res_matrix = self.res_matrix
if res_matrix is None:
residuals = self._get_all_residuals()
ks = self.levels
freq = self.freq
h = np.min([len(residuals[k]) // freq[k] for k in ks])
res_matrix = []
for k in ks:
n = h * freq[k]
res_matrix.append(residuals[k][-n:].reshape(h, -1).T)
res_matrix = np.row_stack(res_matrix)
self.res_matrix = res_matrix
return res_matrix
def get_W(self, method: str = "struc", eps: float = 1e-5) -> np.ndarray:
"""
Calculate W matrix.
Args:
method: Reconciliation method for temporal hierarchical model. Valid
methods include 'struc', 'svar', 'hvar', 'mint_sample', and
'mint_shrink'.
eps: Epsilons added to W for numerical stability.
Returns:
W matrix. (If W is a diagnoal matrix, only returns its diagnoal elements).
"""
levels = self.levels
freq = self.freq
if method == "struc":
ans = []
for k in levels:
ans.extend([k] * freq[k])
return np.array(ans)
elif method == "svar":
residuals = self._get_all_residuals()
ans = []
for k in levels:
ans.extend([np.nanmean(np.square(residuals[k]))] * freq[k])
return np.array(ans) + eps
elif method == "hvar":
|
elif method == "mint_shrink":
cov = np.cov(self._get_residual_matrix())
# get correlation matrix
sqrt = np.sqrt(np.diag(cov))
cor = (
(cov / sqrt).T
) / sqrt # due to symmetry, no need to transpose the matrix again.
mask = ~np.eye(cor.shape[0], dtype=bool)
cor = cor[mask]
lam = np.var(cor) / np.sum(cor**2)
lam = np.max([0, lam])
cov = np.diag(np.diag(cov)) * lam + (1.0 - lam) * cov
cov += np.eye(len(cov)) * eps
return cov
elif method == "mint_sample":
cov = np.cov(self._get_residual_matrix())
cov += np.eye(len(cov)) * eps
return cov
else:
raise _log_error(f"{method} is invalid for get_W() method.")
# pyre-fixme[2]: Parameter must be annotated.
def _predict_origin(self, steps: int, method="struc") -> Dict[int, np.ndarray]:
"""
Generate original forecasts from each base model (without time index).
Args:
steps: Number of forecasts for level 1.
methd: Reconciliation method.
Returns:
Dictionary of forecasts of each level, whose key is level and value
is forecast array.
"""
m = self.m
levels = self.levels
freq = self.freq
h = int(np.ceil(steps / m))
hf = steps // m
orig_fcst = {}
models = self.models
# generate forecasts for each level
for k in levels:
num = int(freq[k] * h)
# assert models is not None
# pyre-fixme[16]: `Optional` has no attribute `__getitem__`.
if models[k] is not None:
orig_fcst[k] = models[k].predict(steps=num, freq="D")["fcst"].values
else:
fcst_num = len(self.info_fcsts[k])
if fcst_num < num:
if fcst_num >= hf * freq[k]:
# since the final output only needs hf*freq[k] forecasts
# for level k, we pad the forecast array to desirable
# length. (note that the padding values would be ignored
# in the final output.)
orig_fcst[k] = np.concatenate(
[
self.info_fcsts[k],
[self.info_fcsts[k][-1]] * (num - fcst_num),
]
)
elif method == "bu" and k != 1:
# for 'bu' only level 1 is needed.
orig_fcst[k] = self.info_fcsts[k]
else:
msg = (
f"{ | res_matrix = self._get_residual_matrix()
return np.nanvar(res_matrix, axis=1) + eps | conditional_block |
Thyroid annotator.py | def get_file_list(root_dir):
file_list = []
counter = 1
for root, directories, filenames in os.walk(root_dir):
for filename in filenames:
if any(ext in filename for ext in extensions):
file_list.append(os.path.join(root, filename))
counter += 1
return sorted(file_list)
### Creating the side bar
add_proj_text = st.sidebar.write('Start new project')
add_textbox = st.sidebar.text_input('Project name')
add_foldbox = st.sidebar.text_input('Folder name' )
add_newproj_btn = st.sidebar.button('Create new project')
st.sidebar.write(' ')
add_proj_load = st.sidebar.write('Load project')
#proj_list =new_installation(proj_file)
add_csvbox = st.sidebar.selectbox('Pick your project',"exp1")
add_loadproj_btn = st.sidebar.button('Load project')
### store file names to a list and find the number of files in the list
#file_to_anot = get_file_list(source_dir)
#file_to_anot = get_file_list(source_dir)
#max_ind= len(file_to_anot) -1
### Creating a list to store the annotations
### @st.cache(allow_output_mutation=True) - is used to preserve the current state and to allow modification of the list
@st.cache(allow_output_mutation=True)
def init_anot(file_to_anot):
anot = [None]*(len(file_to_anot))
comp_list = [None]*(len(file_to_anot))
echo_list = [None]*(len(file_to_anot))
shape_list =[None]*(len(file_to_anot))
marg_list = [None]*(len(file_to_anot))
foci_list = [None]*(len(file_to_anot))
return anot,comp_list,echo_list,shape_list,marg_list,foci_list
### Creating a list to store just the file names
@st.cache(allow_output_mutation=True)
def init_base_f(file_to_anot):
base_file = [None]*(len(file_to_anot))
return base_file
#anotf,comp_list,echo_list,shape_list,marg_list,foci_list = init_anot(file_to_anot)
#base_f = init_base_f(file_to_anot)
### Given an index this function converts path in the index to windows readable path
### then load the imaeg and returns the loaded image
def get_image(ind_no,file_to_anot):
file_name = file_to_anot[ind_no]
im_file =re.sub("\\\\","\\\\\\\\", file_name)
loaded_image = Image.open(im_file)
return loaded_image
### Get just the image file name from the complete path string
def extract_basename(path):
basename = re.search(r'[^\\/]+(?=[\\/]?$)', path)
if basename:
return basename.group(0)
def get_index(dta_ar, out_string):
for i in range(len(dta_ar)):
if dta_ar[i] == out_string:
in_dex = i
return in_dex
def main():
state = _get_state()
def set_index_in(in_num):
state.comp_list[in_num] = get_index(comp_options, composition)
state.echo_list[in_num] = get_index(echo_options, echo)
state.shape_list[in_num]= get_index(shape_options, shape)
state.marg_list[in_num] = get_index(margin_options, margin)
state.foci_list[in_num]= get_index(foci_options, echogenic_foci)
def update_choices(ind_num):
''' This function collects the values of lables/tags for the next or previous image,
then displays it in the user interface.
This function is called each time Next or Previous button is pressed.
'''
if state.comp_list[ind_num] != None:
state.comp = state.comp_list[ind_num]
else:
state.comp = 0
if state.echo_list[ind_num] != None:
state.echo = state.echo_list[ind_num]
else:
state.echo = 0
if state.shape_list[ind_num] !=None:
state.shape = state.shape_list[ind_num]
else:
state.shape = 0
if state.marg_list[ind_num] != None:
state.margin = state.marg_list[ind_num]
else:
state.margin = 0
if state.foci_list[ind_num] != None:
state.foci = state.foci_list[ind_num]
else:
state.foci = 0
#print("This is from update", state.comp, state.echo, state.shape, state.margin, state.foci)
# Initializing a state variable input
if state.input == None:
state.input = 0
state.last_anot =0
state.comp = 0
state.echo = 0
state.shape = 0
state.margin = 0
state.foci = 0
# Creating the UI
comp_options = ['cystic','spongiform', 'mixed cystic','solid']
echo_options = ['anechoic','hyperechoic','isoechoic','hypoechoic','very hypoechoic']
shape_options =['wider than tall','taller than wide']
margin_options = ['smooth','ill defined','lobulated', 'irregular', 'ete']
foci_options = ['none','comet tail artifacts','macrocalcifications','peripheral calcifications','punctate echogenic foci']
with col2:
prev_button = st.button('Previous')
if state.active_project == True:
composition = st.radio('Composition',comp_options, state.comp)
echo = st.radio('Echogenicity',echo_options, state.echo)
shape = st.radio('Shape',shape_options, state.shape)
state.started = True
with col3:
next_button = st.button('Next')
if state.active_project == True:
margin = st.radio('Margin',margin_options, state.margin)
echogenic_foci = st.radio('Echogenic Foci', foci_options, state.foci)
with col1:
#if state.input ==0:
if next_button and state.active_project == True:
if state.input == state.max_ind:
|
else:
set_index_in(state.input)
#update_choices(state.input,comp_list)
state.input = state.input + 1
update_choices(state.input)
if state.input > state.last_anot:
state.last_anot = state.input
if prev_button and state.active_project == True:
if state.input == 0:
e =RuntimeError('Reached the first image in the folder')
st.exception(e)
else:
set_index_in(state.input)
#update_choices(state.input,state.comp_list)
state.input = state.input -1
update_choices(state.input)
if add_newproj_btn and add_foldbox != "":
state.file_to_anot = get_file_list(add_foldbox)
state.max_ind= len(state.file_to_anot) -1
### initializing variables
state.active_project = True
state.input = 0
state.last_anot =0
state.comp = 0
state.echo = 0
state.shape = 0
state.margin = 0
state.foci = 0
state.started = False
state.anot_list,state.comp_list,state.echo_list,state.shape_list,state.marg_list,state.foci_list = init_anot(state.file_to_anot)
state.base_f = init_base_f(state.file_to_anot)
if add_foldbox != "" and state.started == True:
st.image(get_image(state.input,state.file_to_anot),use_column_width=True)
desc_nod, lbl, fln= gen_desc_save(composition, echo, shape, margin, echogenic_foci,state.input,state.file_to_anot )
#print("anot list",state.anot_list)
state.anot_list[state.input] = lbl
state.base_f[state.input] = fln
col1.write( desc_nod)
### Save button ########################################################
save_button = st.button('Save')
if save_button:
set_index_in(state.input)
df = pd.DataFrame(list(zip(state.base_f, state.anot_list)), columns =["IM_FILENAME", "LABELS"])
cwd = os.getcwd()
csv_to_log = r'C:\Users\JOHNY\CV_recepies\annot1.csv'
#print("printing curr file name")
#print(csv_to_log)
df.to_csv(csv_to_log)
#proj = pd.read_csv(proj_file)
#ind_pr= proj.index[proj['Project_name'] == curr_proj_name].tolist()
print(ind_pr)
state.sync()
def gen_desc_save(composition, echo, shape, margin, echogenic_foci, ind_no,file_to_anot):
comp = composition.capitalize()
if echogenic_foci =="none":
echo_foc = "no calcification or comet tail artiacts"
else:
echo_foc = echogenic_foci
desc = comp + " " + echo + " " + shape + " thyroid nodule with " + margin + " margin" + " and " + echo_foc + "."
file_name2 = file_to_anot[ind_no]
file_only = extract_basename(file_name2)
label_to_log = composition + "," | e =RuntimeError('Reached end of images in the folder')
st.exception(e) | conditional_block |
Thyroid annotator.py | def get_file_list(root_dir):
file_list = []
counter = 1
for root, directories, filenames in os.walk(root_dir):
for filename in filenames:
if any(ext in filename for ext in extensions):
file_list.append(os.path.join(root, filename))
counter += 1
return sorted(file_list)
### Creating the side bar
add_proj_text = st.sidebar.write('Start new project')
add_textbox = st.sidebar.text_input('Project name')
add_foldbox = st.sidebar.text_input('Folder name' )
add_newproj_btn = st.sidebar.button('Create new project')
st.sidebar.write(' ')
add_proj_load = st.sidebar.write('Load project')
#proj_list =new_installation(proj_file)
add_csvbox = st.sidebar.selectbox('Pick your project',"exp1")
add_loadproj_btn = st.sidebar.button('Load project')
### store file names to a list and find the number of files in the list
#file_to_anot = get_file_list(source_dir)
#file_to_anot = get_file_list(source_dir)
#max_ind= len(file_to_anot) -1
### Creating a list to store the annotations
### @st.cache(allow_output_mutation=True) - is used to preserve the current state and to allow modification of the list
@st.cache(allow_output_mutation=True)
def init_anot(file_to_anot):
anot = [None]*(len(file_to_anot))
comp_list = [None]*(len(file_to_anot))
echo_list = [None]*(len(file_to_anot))
shape_list =[None]*(len(file_to_anot))
marg_list = [None]*(len(file_to_anot))
foci_list = [None]*(len(file_to_anot))
return anot,comp_list,echo_list,shape_list,marg_list,foci_list
### Creating a list to store just the file names
@st.cache(allow_output_mutation=True)
def init_base_f(file_to_anot):
base_file = [None]*(len(file_to_anot))
return base_file
#anotf,comp_list,echo_list,shape_list,marg_list,foci_list = init_anot(file_to_anot)
#base_f = init_base_f(file_to_anot)
### Given an index this function converts path in the index to windows readable path
### then load the imaeg and returns the loaded image
def | (ind_no,file_to_anot):
file_name = file_to_anot[ind_no]
im_file =re.sub("\\\\","\\\\\\\\", file_name)
loaded_image = Image.open(im_file)
return loaded_image
### Get just the image file name from the complete path string
def extract_basename(path):
basename = re.search(r'[^\\/]+(?=[\\/]?$)', path)
if basename:
return basename.group(0)
def get_index(dta_ar, out_string):
for i in range(len(dta_ar)):
if dta_ar[i] == out_string:
in_dex = i
return in_dex
def main():
state = _get_state()
def set_index_in(in_num):
state.comp_list[in_num] = get_index(comp_options, composition)
state.echo_list[in_num] = get_index(echo_options, echo)
state.shape_list[in_num]= get_index(shape_options, shape)
state.marg_list[in_num] = get_index(margin_options, margin)
state.foci_list[in_num]= get_index(foci_options, echogenic_foci)
def update_choices(ind_num):
''' This function collects the values of lables/tags for the next or previous image,
then displays it in the user interface.
This function is called each time Next or Previous button is pressed.
'''
if state.comp_list[ind_num] != None:
state.comp = state.comp_list[ind_num]
else:
state.comp = 0
if state.echo_list[ind_num] != None:
state.echo = state.echo_list[ind_num]
else:
state.echo = 0
if state.shape_list[ind_num] !=None:
state.shape = state.shape_list[ind_num]
else:
state.shape = 0
if state.marg_list[ind_num] != None:
state.margin = state.marg_list[ind_num]
else:
state.margin = 0
if state.foci_list[ind_num] != None:
state.foci = state.foci_list[ind_num]
else:
state.foci = 0
#print("This is from update", state.comp, state.echo, state.shape, state.margin, state.foci)
# Initializing a state variable input
if state.input == None:
state.input = 0
state.last_anot =0
state.comp = 0
state.echo = 0
state.shape = 0
state.margin = 0
state.foci = 0
# Creating the UI
comp_options = ['cystic','spongiform', 'mixed cystic','solid']
echo_options = ['anechoic','hyperechoic','isoechoic','hypoechoic','very hypoechoic']
shape_options =['wider than tall','taller than wide']
margin_options = ['smooth','ill defined','lobulated', 'irregular', 'ete']
foci_options = ['none','comet tail artifacts','macrocalcifications','peripheral calcifications','punctate echogenic foci']
with col2:
prev_button = st.button('Previous')
if state.active_project == True:
composition = st.radio('Composition',comp_options, state.comp)
echo = st.radio('Echogenicity',echo_options, state.echo)
shape = st.radio('Shape',shape_options, state.shape)
state.started = True
with col3:
next_button = st.button('Next')
if state.active_project == True:
margin = st.radio('Margin',margin_options, state.margin)
echogenic_foci = st.radio('Echogenic Foci', foci_options, state.foci)
with col1:
#if state.input ==0:
if next_button and state.active_project == True:
if state.input == state.max_ind:
e =RuntimeError('Reached end of images in the folder')
st.exception(e)
else:
set_index_in(state.input)
#update_choices(state.input,comp_list)
state.input = state.input + 1
update_choices(state.input)
if state.input > state.last_anot:
state.last_anot = state.input
if prev_button and state.active_project == True:
if state.input == 0:
e =RuntimeError('Reached the first image in the folder')
st.exception(e)
else:
set_index_in(state.input)
#update_choices(state.input,state.comp_list)
state.input = state.input -1
update_choices(state.input)
if add_newproj_btn and add_foldbox != "":
state.file_to_anot = get_file_list(add_foldbox)
state.max_ind= len(state.file_to_anot) -1
### initializing variables
state.active_project = True
state.input = 0
state.last_anot =0
state.comp = 0
state.echo = 0
state.shape = 0
state.margin = 0
state.foci = 0
state.started = False
state.anot_list,state.comp_list,state.echo_list,state.shape_list,state.marg_list,state.foci_list = init_anot(state.file_to_anot)
state.base_f = init_base_f(state.file_to_anot)
if add_foldbox != "" and state.started == True:
st.image(get_image(state.input,state.file_to_anot),use_column_width=True)
desc_nod, lbl, fln= gen_desc_save(composition, echo, shape, margin, echogenic_foci,state.input,state.file_to_anot )
#print("anot list",state.anot_list)
state.anot_list[state.input] = lbl
state.base_f[state.input] = fln
col1.write( desc_nod)
### Save button ########################################################
save_button = st.button('Save')
if save_button:
set_index_in(state.input)
df = pd.DataFrame(list(zip(state.base_f, state.anot_list)), columns =["IM_FILENAME", "LABELS"])
cwd = os.getcwd()
csv_to_log = r'C:\Users\JOHNY\CV_recepies\annot1.csv'
#print("printing curr file name")
#print(csv_to_log)
df.to_csv(csv_to_log)
#proj = pd.read_csv(proj_file)
#ind_pr= proj.index[proj['Project_name'] == curr_proj_name].tolist()
print(ind_pr)
state.sync()
def gen_desc_save(composition, echo, shape, margin, echogenic_foci, ind_no,file_to_anot):
comp = composition.capitalize()
if echogenic_foci =="none":
echo_foc = "no calcification or comet tail artiacts"
else:
echo_foc = echogenic_foci
desc = comp + " " + echo + " " + shape + " thyroid nodule with " + margin + " margin" + " and " + echo_foc + "."
file_name2 = file_to_anot[ind_no]
file_only = extract_basename(file_name2)
label_to_log = composition + "," | get_image | identifier_name |
Thyroid annotator.py | get_file_list(root_dir):
file_list = []
counter = 1
for root, directories, filenames in os.walk(root_dir):
for filename in filenames:
if any(ext in filename for ext in extensions):
file_list.append(os.path.join(root, filename))
counter += 1
return sorted(file_list)
### Creating the side bar
add_proj_text = st.sidebar.write('Start new project')
add_textbox = st.sidebar.text_input('Project name')
add_foldbox = st.sidebar.text_input('Folder name' )
add_newproj_btn = st.sidebar.button('Create new project')
st.sidebar.write(' ')
add_proj_load = st.sidebar.write('Load project')
#proj_list =new_installation(proj_file)
add_csvbox = st.sidebar.selectbox('Pick your project',"exp1")
add_loadproj_btn = st.sidebar.button('Load project')
### store file names to a list and find the number of files in the list
#file_to_anot = get_file_list(source_dir)
#file_to_anot = get_file_list(source_dir)
#max_ind= len(file_to_anot) -1
### Creating a list to store the annotations
### @st.cache(allow_output_mutation=True) - is used to preserve the current state and to allow modification of the list
@st.cache(allow_output_mutation=True)
def init_anot(file_to_anot):
anot = [None]*(len(file_to_anot))
comp_list = [None]*(len(file_to_anot))
echo_list = [None]*(len(file_to_anot))
shape_list =[None]*(len(file_to_anot))
marg_list = [None]*(len(file_to_anot))
foci_list = [None]*(len(file_to_anot))
return anot,comp_list,echo_list,shape_list,marg_list,foci_list
### Creating a list to store just the file names
@st.cache(allow_output_mutation=True)
def init_base_f(file_to_anot):
base_file = [None]*(len(file_to_anot))
return base_file
#anotf,comp_list,echo_list,shape_list,marg_list,foci_list = init_anot(file_to_anot)
#base_f = init_base_f(file_to_anot)
### Given an index this function converts path in the index to windows readable path
### then load the imaeg and returns the loaded image
def get_image(ind_no,file_to_anot):
file_name = file_to_anot[ind_no]
im_file =re.sub("\\\\","\\\\\\\\", file_name)
loaded_image = Image.open(im_file)
return loaded_image
### Get just the image file name from the complete path string
def extract_basename(path):
basename = re.search(r'[^\\/]+(?=[\\/]?$)', path)
if basename:
return basename.group(0)
def get_index(dta_ar, out_string):
for i in range(len(dta_ar)):
if dta_ar[i] == out_string:
in_dex = i
return in_dex
def main():
|
if state.echo_list[ind_num] != None:
state.echo = state.echo_list[ind_num]
else:
state.echo = 0
if state.shape_list[ind_num] !=None:
state.shape = state.shape_list[ind_num]
else:
state.shape = 0
if state.marg_list[ind_num] != None:
state.margin = state.marg_list[ind_num]
else:
state.margin = 0
if state.foci_list[ind_num] != None:
state.foci = state.foci_list[ind_num]
else:
state.foci = 0
#print("This is from update", state.comp, state.echo, state.shape, state.margin, state.foci)
# Initializing a state variable input
if state.input == None:
state.input = 0
state.last_anot =0
state.comp = 0
state.echo = 0
state.shape = 0
state.margin = 0
state.foci = 0
# Creating the UI
comp_options = ['cystic','spongiform', 'mixed cystic','solid']
echo_options = ['anechoic','hyperechoic','isoechoic','hypoechoic','very hypoechoic']
shape_options =['wider than tall','taller than wide']
margin_options = ['smooth','ill defined','lobulated', 'irregular', 'ete']
foci_options = ['none','comet tail artifacts','macrocalcifications','peripheral calcifications','punctate echogenic foci']
with col2:
prev_button = st.button('Previous')
if state.active_project == True:
composition = st.radio('Composition',comp_options, state.comp)
echo = st.radio('Echogenicity',echo_options, state.echo)
shape = st.radio('Shape',shape_options, state.shape)
state.started = True
with col3:
next_button = st.button('Next')
if state.active_project == True:
margin = st.radio('Margin',margin_options, state.margin)
echogenic_foci = st.radio('Echogenic Foci', foci_options, state.foci)
with col1:
#if state.input ==0:
if next_button and state.active_project == True:
if state.input == state.max_ind:
e =RuntimeError('Reached end of images in the folder')
st.exception(e)
else:
set_index_in(state.input)
#update_choices(state.input,comp_list)
state.input = state.input + 1
update_choices(state.input)
if state.input > state.last_anot:
state.last_anot = state.input
if prev_button and state.active_project == True:
if state.input == 0:
e =RuntimeError('Reached the first image in the folder')
st.exception(e)
else:
set_index_in(state.input)
#update_choices(state.input,state.comp_list)
state.input = state.input -1
update_choices(state.input)
if add_newproj_btn and add_foldbox != "":
state.file_to_anot = get_file_list(add_foldbox)
state.max_ind= len(state.file_to_anot) -1
### initializing variables
state.active_project = True
state.input = 0
state.last_anot =0
state.comp = 0
state.echo = 0
state.shape = 0
state.margin = 0
state.foci = 0
state.started = False
state.anot_list,state.comp_list,state.echo_list,state.shape_list,state.marg_list,state.foci_list = init_anot(state.file_to_anot)
state.base_f = init_base_f(state.file_to_anot)
if add_foldbox != "" and state.started == True:
st.image(get_image(state.input,state.file_to_anot),use_column_width=True)
desc_nod, lbl, fln= gen_desc_save(composition, echo, shape, margin, echogenic_foci,state.input,state.file_to_anot )
#print("anot list",state.anot_list)
state.anot_list[state.input] = lbl
state.base_f[state.input] = fln
col1.write( desc_nod)
### Save button ########################################################
save_button = st.button('Save')
if save_button:
set_index_in(state.input)
df = pd.DataFrame(list(zip(state.base_f, state.anot_list)), columns =["IM_FILENAME", "LABELS"])
cwd = os.getcwd()
csv_to_log = r'C:\Users\JOHNY\CV_recepies\annot1.csv'
#print("printing curr file name")
#print(csv_to_log)
df.to_csv(csv_to_log)
#proj = pd.read_csv(proj_file)
#ind_pr= proj.index[proj['Project_name'] == curr_proj_name].tolist()
print(ind_pr)
state.sync()
def gen_desc_save(composition, echo, shape, margin, echogenic_foci, ind_no,file_to_anot):
comp = composition.capitalize()
if echogenic_foci =="none":
echo_foc = "no calcification or comet tail artiacts"
else:
echo_foc = echogenic_foci
desc = comp + " " + echo + " " + shape + " thyroid nodule with " + margin + " margin" + " and " + echo_foc + "."
file_name2 = file_to_anot[ind_no]
file_only = extract_basename(file_name2)
label_to_log = composition + "," | state = _get_state()
def set_index_in(in_num):
state.comp_list[in_num] = get_index(comp_options, composition)
state.echo_list[in_num] = get_index(echo_options, echo)
state.shape_list[in_num]= get_index(shape_options, shape)
state.marg_list[in_num] = get_index(margin_options, margin)
state.foci_list[in_num]= get_index(foci_options, echogenic_foci)
def update_choices(ind_num):
''' This function collects the values of lables/tags for the next or previous image,
then displays it in the user interface.
This function is called each time Next or Previous button is pressed.
'''
if state.comp_list[ind_num] != None:
state.comp = state.comp_list[ind_num]
else:
state.comp = 0
| identifier_body |
Thyroid annotator.py | from streamlit.hashing import _CodeHasher
from streamlit.report_thread import get_report_ctx
from streamlit.server.server import Server
import streamlit as st
from PIL import Image
import os
import pandas as pd
import re
### Creating a 3 column layout in streamlit
col1, col2, col3= st.beta_columns([3, 1,1])
### Folder where the image files are kept. This path is in windows format.
### If you are runnin it in Linux, chnage the path appropriately.
#source_dir = r'C:\Users\JOHNY\CV_recepies\cv\images'
source_dir = None
csv_to_log = r'C:\Users\JOHNY\CV_recepies\annot1.csv'
proj_file = r'C:\Users\JOHNY\CV_recepies\St_annot.csv'
### Function to create a python list cotaning paths to image files in a specific folder
### This function is decorated with @st.cache to avoid rerunning
extensions = ['.jpg', '.JPG', '.jpeg', '.JPEG', '.png', '.PNG']
@st.cache(allow_output_mutation=True)
def get_file_list(root_dir):
file_list = []
counter = 1
for root, directories, filenames in os.walk(root_dir):
for filename in filenames:
if any(ext in filename for ext in extensions):
file_list.append(os.path.join(root, filename))
counter += 1
return sorted(file_list)
### Creating the side bar
add_proj_text = st.sidebar.write('Start new project')
add_textbox = st.sidebar.text_input('Project name')
add_foldbox = st.sidebar.text_input('Folder name' )
add_newproj_btn = st.sidebar.button('Create new project')
st.sidebar.write(' ')
add_proj_load = st.sidebar.write('Load project')
#proj_list =new_installation(proj_file)
add_csvbox = st.sidebar.selectbox('Pick your project',"exp1")
add_loadproj_btn = st.sidebar.button('Load project')
### store file names to a list and find the number of files in the list
#file_to_anot = get_file_list(source_dir)
#file_to_anot = get_file_list(source_dir)
#max_ind= len(file_to_anot) -1
### Creating a list to store the annotations
### @st.cache(allow_output_mutation=True) - is used to preserve the current state and to allow modification of the list
@st.cache(allow_output_mutation=True)
def init_anot(file_to_anot):
anot = [None]*(len(file_to_anot))
comp_list = [None]*(len(file_to_anot))
echo_list = [None]*(len(file_to_anot))
shape_list =[None]*(len(file_to_anot))
marg_list = [None]*(len(file_to_anot))
foci_list = [None]*(len(file_to_anot))
return anot,comp_list,echo_list,shape_list,marg_list,foci_list
### Creating a list to store just the file names
@st.cache(allow_output_mutation=True)
def init_base_f(file_to_anot):
base_file = [None]*(len(file_to_anot))
return base_file
#anotf,comp_list,echo_list,shape_list,marg_list,foci_list = init_anot(file_to_anot)
#base_f = init_base_f(file_to_anot)
### Given an index this function converts path in the index to windows readable path
### then load the imaeg and returns the loaded image
def get_image(ind_no,file_to_anot):
file_name = file_to_anot[ind_no]
im_file =re.sub("\\\\","\\\\\\\\", file_name)
loaded_image = Image.open(im_file)
return loaded_image
### Get just the image file name from the complete path string
def extract_basename(path):
basename = re.search(r'[^\\/]+(?=[\\/]?$)', path)
if basename:
return basename.group(0)
def get_index(dta_ar, out_string):
for i in range(len(dta_ar)):
if dta_ar[i] == out_string:
in_dex = i
return in_dex
def main():
state = _get_state()
def set_index_in(in_num):
state.comp_list[in_num] = get_index(comp_options, composition)
state.echo_list[in_num] = get_index(echo_options, echo)
state.shape_list[in_num]= get_index(shape_options, shape)
state.marg_list[in_num] = get_index(margin_options, margin)
state.foci_list[in_num]= get_index(foci_options, echogenic_foci)
def update_choices(ind_num):
''' This function collects the values of lables/tags for the next or previous image,
then displays it in the user interface.
This function is called each time Next or Previous button is pressed.
'''
if state.comp_list[ind_num] != None:
state.comp = state.comp_list[ind_num]
else:
state.comp = 0
if state.echo_list[ind_num] != None:
state.echo = state.echo_list[ind_num]
else:
state.echo = 0
if state.shape_list[ind_num] !=None:
state.shape = state.shape_list[ind_num]
else:
state.shape = 0
if state.marg_list[ind_num] != None:
state.margin = state.marg_list[ind_num]
else:
state.margin = 0
if state.foci_list[ind_num] != None:
state.foci = state.foci_list[ind_num]
else:
state.foci = 0
#print("This is from update", state.comp, state.echo, state.shape, state.margin, state.foci)
# Initializing a state variable input
if state.input == None:
state.input = 0
state.last_anot =0
state.comp = 0
state.echo = 0
state.shape = 0
state.margin = 0
state.foci = 0
# Creating the UI
comp_options = ['cystic','spongiform', 'mixed cystic','solid']
echo_options = ['anechoic','hyperechoic','isoechoic','hypoechoic','very hypoechoic']
shape_options =['wider than tall','taller than wide']
margin_options = ['smooth','ill defined','lobulated', 'irregular', 'ete']
foci_options = ['none','comet tail artifacts','macrocalcifications','peripheral calcifications','punctate echogenic foci']
with col2:
prev_button = st.button('Previous')
if state.active_project == True:
composition = st.radio('Composition',comp_options, state.comp)
echo = st.radio('Echogenicity',echo_options, state.echo)
shape = st.radio('Shape',shape_options, state.shape)
state.started = True
with col3:
next_button = st.button('Next')
if state.active_project == True:
margin = st.radio('Margin',margin_options, state.margin)
echogenic_foci = st.radio('Echogenic Foci', foci_options, state.foci)
with col1:
#if state.input ==0:
if next_button and state.active_project == True:
if state.input == state.max_ind:
e =RuntimeError('Reached end of images in the folder')
st.exception(e)
else:
set_index_in(state.input)
#update_choices(state.input,comp_list)
state.input = state.input + 1
update_choices(state.input)
if state.input > state.last_anot:
state.last_anot = state.input
if prev_button and state.active_project == True:
if state.input == 0:
e =RuntimeError('Reached the first image in the folder')
st.exception(e)
else:
set_index_in(state.input)
#update_choices(state.input,state.comp_list)
state.input = state.input -1
update_choices(state.input)
if add_newproj_btn and add_foldbox != "":
state.file_to_anot = get_file_list(add_foldbox)
state.max_ind= len(state.file_to_anot) -1
### initializing variables
state.active_project = True
state.input = 0
state.last_anot =0
state.comp = 0
state.echo = 0
state.shape = 0
state.margin = 0
state.foci = 0
state.started = False
state.anot_list,state.comp_list,state.echo_list,state.shape_list,state.marg_list,state.foci_list = init_anot(state.file_to_anot)
state.base_f = init_base_f(state.file_to_anot)
if add_foldbox != "" and state.started == True:
st.image(get_image(state.input,state.file_to_anot),use_column_width=True)
desc_nod, lbl, fln= gen_desc_save(composition, echo, shape, margin, echogenic_foci,state.input,state.file_to_anot )
#print("anot list",state.anot_list)
state.anot_list[state.input] = lbl
state.base_f[state.input] = fln
col1.write( desc_nod)
### Save button ########################################################
save_button = st.button('Save')
if save_button:
set_index_in(state.input |
### Importing libraries
| random_line_split | |
gardenView.js | idth, length) {
// From user dimensions, we calculate grid features, like the size in pixels of a meter
this.grid = {
userDimensions: {
width: width,
length: length
},
sizeMeter: ($(`#${this.containerSelector}`).width() - SCROLLBAR_WIDTH) / width,
horizontalLines: [],
verticalLines: []
};
$(`#${this.containerSelector}`).empty().append(`
<div class="row">
<div class="col-md-12">
<div style="height:400px; overflow: auto;">
<canvas
id="canvas-garden"
width=${this.grid.sizeMeter * width}
height=${this.grid.sizeMeter * length}>
</canvas>
</div>
</div>
</div>
`);
let self = this;
this.canvas = new fabric.Canvas('canvas-garden');
let canvasContainer = $(`#${this.containerSelector}`).parent()[0];
// drag and drop events dont work right with JQuery on container...
// so for canvas container, use native JS methods
// On drag over
canvasContainer.addEventListener('dragover', (event) => {
if (event.preventDefault) {
event.preventDefault();
}
event.dataTransfer.dropEffect = 'copy';
return false;
}, false);
// On drop
canvasContainer.addEventListener('drop', (event) => {
event.preventDefault();
if (event.stopPropagation) {
event.stopPropagation();
}
const idPlant = $('#plant-selectize').val();
const position = {
x: event.layerX,
y: event.layerY
};
self.putPlant($('#image-selected img.img-dragging')[0], idPlant, position);
return false;
}, false);
// On selection of an object
this.canvas.on('object:selected', (event) => {
this.selectPlant(event.target);
});
// On click on grid, but not on a object
this.canvas.on('before:selection:cleared', (event) => {
this.unselectPlant();
});
// On image moving
this.canvas.on('object:moving', (event) => {
var obj = event.target;
if (typeof(obj) === 'undefined' || obj === null || typeof(obj.canvas) === 'undefined') {
return;
}
// Below is code to be sure we can't drag a plant outside of the visible grid
// if object is too big ignore
if(obj.currentHeight > obj.canvas.height || obj.currentWidth > obj.canvas.width){
return;
}
obj.setCoords();
const imagePlant = obj._objects.filter(o => o.isType('image'))[0];
const boundingRect = {
left: obj.left + obj.width / 2 - imagePlant.width / 2,
top: obj.top + obj.height / 2 - imagePlant.height / 2,
width: imagePlant.width,
height: imagePlant.height
};
// top-left corner
if(boundingRect.top < 0 || boundingRect.left < 0){
obj.top = Math.max(obj.top, obj.top-boundingRect.top);
obj.left = Math.max(obj.left, obj.left-boundingRect.left);
}
// bot-right corner
if(boundingRect.top+boundingRect.height > obj.canvas.height || boundingRect.left+boundingRect.width > obj.canvas.width){
obj.top = Math.min(obj.top, obj.canvas.height-boundingRect.height+obj.top-boundingRect.top);
obj.left = Math.min(obj.left, obj.canvas.width-boundingRect.width+obj.left-boundingRect.left);
}
// On moving, notify state panel that we made a change
this.actionDispatcher.dispatch({type: actions.NOTIFY_CHANGE});
});
this.refreshGrid();
// Register listeners on some actions
this.actionDispatcher.register('unselectPlant', actions.UNSELECT_PLANT, () => this.canvas.trigger('before:selection:cleared'));
this.actionDispatcher.register('removePlant', actions.REMOVE_PLANT, () => this.removePlant());
this.actionDispatcher.register('showAreas', actions.SHOW_AREAS, (areaType) => this.showAreas(areaType));
this.actionDispatcher.register('hideAreas', actions.HIDE_AREAS, (areaType) => this.hideAreas(areaType));
/*this.actionDispatcher.register('showAreaSeeding', actions.SHOW_AREA_SEEDING, () => this.showAreas('seeding'));
this.actionDispatcher.register('showAreaSize', actions.SHOW_AREA_SIZE, () => this.showAreas('size'));
this.actionDispatcher.register('showAreaHeight', actions.SHOW_AREA_HEIGHT, () => this.showAreas('height'));
this.actionDispatcher.register('showAreaSun', actions.SHOW_AREA_SUN, () => this.showAreas('sun'));
this.actionDispatcher.register('hideArea', actions.HIDE_AREAS, () => this.hideAreas());*/
this.actionDispatcher.register('showMonthlyTask', actions.SHOW_TASK_MONTH, (data) => this.showMonthlyTask(data));
this.actionDispatcher.register('prepareSave', actions.PREPARE_SAVE, (data) => this.prepareSave(data));
this.actionDispatcher.register('prepareScore', actions.PREPARE_SCORE, (data) => this.prepareScoring(data));
this.actionDispatcher.register('showScorePlants', actions.SHOW_SCORE_PLANTS, (data) => this.showScoreSelection(data));
this.actionDispatcher.register('hideScorePlants', actions.HIDE_SCORE_PLANTS, (data) => this.hideScoreSelection(data));
// Unregister listeners on garden creation / loading
this.actionDispatcher.unregister('generateGarden');
this.actionDispatcher.unregister('loadGarden');
}
/* Get some datas about plants in garden, for saving */
prepareSave(data) {
let plants = [];
for (const id in this.imagesMapping) {
plants.push(this.imagesMapping[id].toJSON());
}
// Call save process by dispatching save event with plants data
this.actionDispatcher.dispatch({type: actions.SAVE, data: {
id: data.id,
garden: {
plants: plants,
userDimensions: this.grid.userDimensions
}
}});
}
/* Get some datas about plants in garden, to run scoring */
prepareScoring() {
let plants = [], plantModels = {};
for (const id in this.imagesMapping) {
const plantView = this.imagesMapping[id];
const plant = plantView.getPlant();
plants.push(plantView.toJSON());
if (!(plant.id in plantModels)) {
plantModels[plant.id] = plant;
}
}
const scoreInput = new ScoreInput(plants, plantModels, {
sizeMeter: this.grid.sizeMeter
});
// Call score process by dispatching save event with plants data
this.actionDispatcher.dispatch({type: actions.SCORE, data: {
input: scoreInput,
}});
}
/*
Add a plant on grid, by putting image in a fabricjs group
and instanciating a plantView object
*/
addPlantOnGrid(img, idPlant, width, height, position) {
img.set({
width: width,
height: height,
left: position.x,
top: position.y,
hasRotatingPoint: false,
lockRotation: true,
lockScalingFlip : true,
lockScalingX: true,
lockScalingY: true
});
const plant = this.plantFactory.buildPlant(idPlant);
let plantView = new PlantView(img, plant);
this.imagesMapping[img.id] = plantView;
this.canvas.add(plantView.getGroup());
}
/* Populate garden with plants from imported data */
load(data) {
// By default, if no user dimensions saved, we generate a 6mx4m garden
const {width, length} = (typeof(data.garden.userDimensions) !== 'undefined')
? data.garden.userDimensions
: {width: DEFAULT_USER_WIDTH, length: DEFAULT_USER_LENGTH};
this.generate(width, length);
data.garden.plants.map(jsonPlant => {
const idImage = this.idGardenCounter;
this.idGardenCounter = this.idGardenCounter + 1;
const img = cst.PLANTS_IMAGES[jsonPlant.idPlant] || cst.DEFAULT_PLANT_IMAGE;
fabric.Image.fromURL(`${cst.URL_IMAGES}/${img}`, oImg => {
oImg.set({
id: idImage
});
this.addPlantOnGrid(oImg, jsonPlant.idPlant, DEFAULT_IMAGE_WIDTH, DEFAULT_IMAGE_HEIGHT, jsonPlant.position);
});
});
}
/* Put a plant into the garden, from dragged image */
putPlant(img, idPlant, position) {
const idImage = this.idGardenCounter;
this.idGardenCounter = this.idGardenCounter + 1;
img = new fabric.Image(img, {
id: idImage
});
this.addPlantOnGrid(img, idPlant, img.width, img.height, position);
this.showMonthlyTask(this.monthSelected);
this.actionDispatcher.dispatch({type: actions.NOTIFY_CHANGE});
}
/* Remove selected plant */
removePlant() {
if (this.idImageSelected === null) {
return;
}
// We keep id in another variable to keep a reference for deleting from imagesMapping
const id = this.idImageSelected;
let imageGroupToRemove = this.imagesMapping[this.idImageSelected].getGroup();
this.canvas.remove(imageGroupToRemove);
delete this | nerate(w | identifier_name | |
gardenView.js | this.grid = {
userDimensions: {
width: width,
length: length
},
sizeMeter: ($(`#${this.containerSelector}`).width() - SCROLLBAR_WIDTH) / width,
horizontalLines: [],
verticalLines: []
}; |
$(`#${this.containerSelector}`).empty().append(`
<div class="row">
<div class="col-md-12">
<div style="height:400px; overflow: auto;">
<canvas
id="canvas-garden"
width=${this.grid.sizeMeter * width}
height=${this.grid.sizeMeter * length}>
</canvas>
</div>
</div>
</div>
`);
let self = this;
this.canvas = new fabric.Canvas('canvas-garden');
let canvasContainer = $(`#${this.containerSelector}`).parent()[0];
// drag and drop events dont work right with JQuery on container...
// so for canvas container, use native JS methods
// On drag over
canvasContainer.addEventListener('dragover', (event) => {
if (event.preventDefault) {
event.preventDefault();
}
event.dataTransfer.dropEffect = 'copy';
return false;
}, false);
// On drop
canvasContainer.addEventListener('drop', (event) => {
event.preventDefault();
if (event.stopPropagation) {
event.stopPropagation();
}
const idPlant = $('#plant-selectize').val();
const position = {
x: event.layerX,
y: event.layerY
};
self.putPlant($('#image-selected img.img-dragging')[0], idPlant, position);
return false;
}, false);
// On selection of an object
this.canvas.on('object:selected', (event) => {
this.selectPlant(event.target);
});
// On click on grid, but not on a object
this.canvas.on('before:selection:cleared', (event) => {
this.unselectPlant();
});
// On image moving
this.canvas.on('object:moving', (event) => {
var obj = event.target;
if (typeof(obj) === 'undefined' || obj === null || typeof(obj.canvas) === 'undefined') {
return;
}
// Below is code to be sure we can't drag a plant outside of the visible grid
// if object is too big ignore
if(obj.currentHeight > obj.canvas.height || obj.currentWidth > obj.canvas.width){
return;
}
obj.setCoords();
const imagePlant = obj._objects.filter(o => o.isType('image'))[0];
const boundingRect = {
left: obj.left + obj.width / 2 - imagePlant.width / 2,
top: obj.top + obj.height / 2 - imagePlant.height / 2,
width: imagePlant.width,
height: imagePlant.height
};
// top-left corner
if(boundingRect.top < 0 || boundingRect.left < 0){
obj.top = Math.max(obj.top, obj.top-boundingRect.top);
obj.left = Math.max(obj.left, obj.left-boundingRect.left);
}
// bot-right corner
if(boundingRect.top+boundingRect.height > obj.canvas.height || boundingRect.left+boundingRect.width > obj.canvas.width){
obj.top = Math.min(obj.top, obj.canvas.height-boundingRect.height+obj.top-boundingRect.top);
obj.left = Math.min(obj.left, obj.canvas.width-boundingRect.width+obj.left-boundingRect.left);
}
// On moving, notify state panel that we made a change
this.actionDispatcher.dispatch({type: actions.NOTIFY_CHANGE});
});
this.refreshGrid();
// Register listeners on some actions
this.actionDispatcher.register('unselectPlant', actions.UNSELECT_PLANT, () => this.canvas.trigger('before:selection:cleared'));
this.actionDispatcher.register('removePlant', actions.REMOVE_PLANT, () => this.removePlant());
this.actionDispatcher.register('showAreas', actions.SHOW_AREAS, (areaType) => this.showAreas(areaType));
this.actionDispatcher.register('hideAreas', actions.HIDE_AREAS, (areaType) => this.hideAreas(areaType));
/*this.actionDispatcher.register('showAreaSeeding', actions.SHOW_AREA_SEEDING, () => this.showAreas('seeding'));
this.actionDispatcher.register('showAreaSize', actions.SHOW_AREA_SIZE, () => this.showAreas('size'));
this.actionDispatcher.register('showAreaHeight', actions.SHOW_AREA_HEIGHT, () => this.showAreas('height'));
this.actionDispatcher.register('showAreaSun', actions.SHOW_AREA_SUN, () => this.showAreas('sun'));
this.actionDispatcher.register('hideArea', actions.HIDE_AREAS, () => this.hideAreas());*/
this.actionDispatcher.register('showMonthlyTask', actions.SHOW_TASK_MONTH, (data) => this.showMonthlyTask(data));
this.actionDispatcher.register('prepareSave', actions.PREPARE_SAVE, (data) => this.prepareSave(data));
this.actionDispatcher.register('prepareScore', actions.PREPARE_SCORE, (data) => this.prepareScoring(data));
this.actionDispatcher.register('showScorePlants', actions.SHOW_SCORE_PLANTS, (data) => this.showScoreSelection(data));
this.actionDispatcher.register('hideScorePlants', actions.HIDE_SCORE_PLANTS, (data) => this.hideScoreSelection(data));
// Unregister listeners on garden creation / loading
this.actionDispatcher.unregister('generateGarden');
this.actionDispatcher.unregister('loadGarden');
}
/* Get some datas about plants in garden, for saving */
prepareSave(data) {
let plants = [];
for (const id in this.imagesMapping) {
plants.push(this.imagesMapping[id].toJSON());
}
// Call save process by dispatching save event with plants data
this.actionDispatcher.dispatch({type: actions.SAVE, data: {
id: data.id,
garden: {
plants: plants,
userDimensions: this.grid.userDimensions
}
}});
}
/* Get some datas about plants in garden, to run scoring */
prepareScoring() {
let plants = [], plantModels = {};
for (const id in this.imagesMapping) {
const plantView = this.imagesMapping[id];
const plant = plantView.getPlant();
plants.push(plantView.toJSON());
if (!(plant.id in plantModels)) {
plantModels[plant.id] = plant;
}
}
const scoreInput = new ScoreInput(plants, plantModels, {
sizeMeter: this.grid.sizeMeter
});
// Call score process by dispatching save event with plants data
this.actionDispatcher.dispatch({type: actions.SCORE, data: {
input: scoreInput,
}});
}
/*
Add a plant on grid, by putting image in a fabricjs group
and instanciating a plantView object
*/
addPlantOnGrid(img, idPlant, width, height, position) {
img.set({
width: width,
height: height,
left: position.x,
top: position.y,
hasRotatingPoint: false,
lockRotation: true,
lockScalingFlip : true,
lockScalingX: true,
lockScalingY: true
});
const plant = this.plantFactory.buildPlant(idPlant);
let plantView = new PlantView(img, plant);
this.imagesMapping[img.id] = plantView;
this.canvas.add(plantView.getGroup());
}
/* Populate garden with plants from imported data */
load(data) {
// By default, if no user dimensions saved, we generate a 6mx4m garden
const {width, length} = (typeof(data.garden.userDimensions) !== 'undefined')
? data.garden.userDimensions
: {width: DEFAULT_USER_WIDTH, length: DEFAULT_USER_LENGTH};
this.generate(width, length);
data.garden.plants.map(jsonPlant => {
const idImage = this.idGardenCounter;
this.idGardenCounter = this.idGardenCounter + 1;
const img = cst.PLANTS_IMAGES[jsonPlant.idPlant] || cst.DEFAULT_PLANT_IMAGE;
fabric.Image.fromURL(`${cst.URL_IMAGES}/${img}`, oImg => {
oImg.set({
id: idImage
});
this.addPlantOnGrid(oImg, jsonPlant.idPlant, DEFAULT_IMAGE_WIDTH, DEFAULT_IMAGE_HEIGHT, jsonPlant.position);
});
});
}
/* Put a plant into the garden, from dragged image */
putPlant(img, idPlant, position) {
const idImage = this.idGardenCounter;
this.idGardenCounter = this.idGardenCounter + 1;
img = new fabric.Image(img, {
id: idImage
});
this.addPlantOnGrid(img, idPlant, img.width, img.height, position);
this.showMonthlyTask(this.monthSelected);
this.actionDispatcher.dispatch({type: actions.NOTIFY_CHANGE});
}
/* Remove selected plant */
removePlant() {
if (this.idImageSelected === null) {
return;
}
// We keep id in another variable to keep a reference for deleting from imagesMapping
const id = this.idImageSelected;
let imageGroupToRemove = this.imagesMapping[this.idImageSelected].getGroup();
this.canvas.remove(imageGroupToRemove);
delete this.imagesMapping[id];
this.actionDispatcher.dispatch({type: actions.HIDE_CARD});
this.actionDispatcher.dispatch({type: actions.NOTIFY | random_line_split | |
gardenView.js | this.grid = {
userDimensions: {
width: width,
length: length
},
sizeMeter: ($(`#${this.containerSelector}`).width() - SCROLLBAR_WIDTH) / width,
horizontalLines: [],
verticalLines: []
};
$(`#${this.containerSelector}`).empty().append(`
<div class="row">
<div class="col-md-12">
<div style="height:400px; overflow: auto;">
<canvas
id="canvas-garden"
width=${this.grid.sizeMeter * width}
height=${this.grid.sizeMeter * length}>
</canvas>
</div>
</div>
</div>
`);
let self = this;
this.canvas = new fabric.Canvas('canvas-garden');
let canvasContainer = $(`#${this.containerSelector}`).parent()[0];
// drag and drop events dont work right with JQuery on container...
// so for canvas container, use native JS methods
// On drag over
canvasContainer.addEventListener('dragover', (event) => {
if (event.preventDefault) {
event.preventDefault();
}
event.dataTransfer.dropEffect = 'copy';
return false;
}, false);
// On drop
canvasContainer.addEventListener('drop', (event) => {
event.preventDefault();
if (event.stopPropagation) {
event.stopPropagation();
}
const idPlant = $('#plant-selectize').val();
const position = {
x: event.layerX,
y: event.layerY
};
self.putPlant($('#image-selected img.img-dragging')[0], idPlant, position);
return false;
}, false);
// On selection of an object
this.canvas.on('object:selected', (event) => {
this.selectPlant(event.target);
});
// On click on grid, but not on a object
this.canvas.on('before:selection:cleared', (event) => {
this.unselectPlant();
});
// On image moving
this.canvas.on('object:moving', (event) => {
var obj = event.target;
if (typeof(obj) === 'undefined' || obj === null || typeof(obj.canvas) === 'undefined') {
return;
}
// Below is code to be sure we can't drag a plant outside of the visible grid
// if object is too big ignore
if(obj.currentHeight > obj.canvas.height || obj.currentWidth > obj.canvas.width){
return;
}
obj.setCoords();
const imagePlant = obj._objects.filter(o => o.isType('image'))[0];
const boundingRect = {
left: obj.left + obj.width / 2 - imagePlant.width / 2,
top: obj.top + obj.height / 2 - imagePlant.height / 2,
width: imagePlant.width,
height: imagePlant.height
};
// top-left corner
if(boundingRect.top < 0 || boundingRect.left < 0){
obj.top = Math.max(obj.top, obj.top-boundingRect.top);
obj.left = Math.max(obj.left, obj.left-boundingRect.left);
}
// bot-right corner
if(boundingRect.top+boundingRect.height > obj.canvas.height || boundingRect.left+boundingRect.width > obj.canvas.width){
obj.top = Math.min(obj.top, obj.canvas.height-boundingRect.height+obj.top-boundingRect.top);
obj.left = Math.min(obj.left, obj.canvas.width-boundingRect.width+obj.left-boundingRect.left);
}
// On moving, notify state panel that we made a change
this.actionDispatcher.dispatch({type: actions.NOTIFY_CHANGE});
});
this.refreshGrid();
// Register listeners on some actions
this.actionDispatcher.register('unselectPlant', actions.UNSELECT_PLANT, () => this.canvas.trigger('before:selection:cleared'));
this.actionDispatcher.register('removePlant', actions.REMOVE_PLANT, () => this.removePlant());
this.actionDispatcher.register('showAreas', actions.SHOW_AREAS, (areaType) => this.showAreas(areaType));
this.actionDispatcher.register('hideAreas', actions.HIDE_AREAS, (areaType) => this.hideAreas(areaType));
/*this.actionDispatcher.register('showAreaSeeding', actions.SHOW_AREA_SEEDING, () => this.showAreas('seeding'));
this.actionDispatcher.register('showAreaSize', actions.SHOW_AREA_SIZE, () => this.showAreas('size'));
this.actionDispatcher.register('showAreaHeight', actions.SHOW_AREA_HEIGHT, () => this.showAreas('height'));
this.actionDispatcher.register('showAreaSun', actions.SHOW_AREA_SUN, () => this.showAreas('sun'));
this.actionDispatcher.register('hideArea', actions.HIDE_AREAS, () => this.hideAreas());*/
this.actionDispatcher.register('showMonthlyTask', actions.SHOW_TASK_MONTH, (data) => this.showMonthlyTask(data));
this.actionDispatcher.register('prepareSave', actions.PREPARE_SAVE, (data) => this.prepareSave(data));
this.actionDispatcher.register('prepareScore', actions.PREPARE_SCORE, (data) => this.prepareScoring(data));
this.actionDispatcher.register('showScorePlants', actions.SHOW_SCORE_PLANTS, (data) => this.showScoreSelection(data));
this.actionDispatcher.register('hideScorePlants', actions.HIDE_SCORE_PLANTS, (data) => this.hideScoreSelection(data));
// Unregister listeners on garden creation / loading
this.actionDispatcher.unregister('generateGarden');
this.actionDispatcher.unregister('loadGarden');
}
/* Get some datas about plants in garden, for saving */
prepareSave(data) {
let plants = [];
for (const id in this.imagesMapping) {
plants.push(this.imagesMapping[id].toJSON());
}
// Call save process by dispatching save event with plants data
this.actionDispatcher.dispatch({type: actions.SAVE, data: {
id: data.id,
garden: {
plants: plants,
userDimensions: this.grid.userDimensions
}
}});
}
/* Get some datas about plants in garden, to run scoring */
prepareScoring() {
let plants = [], plantModels = {};
for (const id in this.imagesMapping) {
const plantView = this.imagesMapping[id];
const plant = plantView.getPlant();
plants.push(plantView.toJSON());
if (!(plant.id in plantModels)) {
plantModels[plant.id] = plant;
}
}
const scoreInput = new ScoreInput(plants, plantModels, {
sizeMeter: this.grid.sizeMeter
});
// Call score process by dispatching save event with plants data
this.actionDispatcher.dispatch({type: actions.SCORE, data: {
input: scoreInput,
}});
}
/*
Add a plant on grid, by putting image in a fabricjs group
and instanciating a plantView object
*/
addPlantOnGrid(img, idPlant, width, height, position) {
| /* Populate garden with plants from imported data */
load(data) {
// By default, if no user dimensions saved, we generate a 6mx4m garden
const {width, length} = (typeof(data.garden.userDimensions) !== 'undefined')
? data.garden.userDimensions
: {width: DEFAULT_USER_WIDTH, length: DEFAULT_USER_LENGTH};
this.generate(width, length);
data.garden.plants.map(jsonPlant => {
const idImage = this.idGardenCounter;
this.idGardenCounter = this.idGardenCounter + 1;
const img = cst.PLANTS_IMAGES[jsonPlant.idPlant] || cst.DEFAULT_PLANT_IMAGE;
fabric.Image.fromURL(`${cst.URL_IMAGES}/${img}`, oImg => {
oImg.set({
id: idImage
});
this.addPlantOnGrid(oImg, jsonPlant.idPlant, DEFAULT_IMAGE_WIDTH, DEFAULT_IMAGE_HEIGHT, jsonPlant.position);
});
});
}
/* Put a plant into the garden, from dragged image */
putPlant(img, idPlant, position) {
const idImage = this.idGardenCounter;
this.idGardenCounter = this.idGardenCounter + 1;
img = new fabric.Image(img, {
id: idImage
});
this.addPlantOnGrid(img, idPlant, img.width, img.height, position);
this.showMonthlyTask(this.monthSelected);
this.actionDispatcher.dispatch({type: actions.NOTIFY_CHANGE});
}
/* Remove selected plant */
removePlant() {
if (this.idImageSelected === null) {
return;
}
// We keep id in another variable to keep a reference for deleting from imagesMapping
const id = this.idImageSelected;
let imageGroupToRemove = this.imagesMapping[this.idImageSelected].getGroup();
this.canvas.remove(imageGroupToRemove);
delete this.imagesMapping[id];
this.actionDispatcher.dispatch({type: actions.HIDE_CARD});
this.actionDispatcher.dispatch({type: actions.NOTIFY | img.set({
width: width,
height: height,
left: position.x,
top: position.y,
hasRotatingPoint: false,
lockRotation: true,
lockScalingFlip : true,
lockScalingX: true,
lockScalingY: true
});
const plant = this.plantFactory.buildPlant(idPlant);
let plantView = new PlantView(img, plant);
this.imagesMapping[img.id] = plantView;
this.canvas.add(plantView.getGroup());
}
| identifier_body |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.