file_name
large_stringlengths
4
140
prefix
large_stringlengths
0
39k
suffix
large_stringlengths
0
36.1k
middle
large_stringlengths
0
29.4k
fim_type
large_stringclasses
4 values
chart.js
google.charts.load('current', { packages: ['corechart'] }); //global variable for storing the ids of articles currently shown in author analytics page var articlesShown = []; var allArticleTitles = []; var allArticletitlesWithRevisions = []; //chart otpions var options = { 'fontName':'Avenir', 'backgroundColor': { fill:'#F3F3F3', strokeWidth:10, stroke:'#CE953F' }, 'bar': {groupWidth: "70%"}, 'width': '100%', 'height': 500, 'hAxis':{ showTextEvery:1, maxAlternation:1, minTextSpacing:1, textStyle:{ fontSize:11, bold:true, }, }, 'legend': { position: 'top', alignment:'center' }, 'vAxis': { viewWindowMode: 'pretty', viewWindow: { min: 0, }, gridlines: { count: 9, } } }; var pieData var barData //Onload function window.onload = function () { getOverviewPage(); //loads overview page by default //when menu links are clicked, other pages can be loaded $('#ArticleAnalytics').click(function () { resetMenuBar(); $('#ArticleAnalytics').addClass("active"); getArticleAnalyticsPage(); }) $('#AuthorAnalytics').click(function () { resetMenuBar(); $('#AuthorAnalytics').addClass("active"); getAuthorAnalyticsPage(); }) $('#Overview').click(function () { resetMenuBar(); $('#Overview').addClass("active"); getOverviewPage(); }) } /******************************** FUNCTIONS FOR LOADING MAIN PAGES ********************************/ //Replaces entire page with article analytics function getArticleAnalyticsPage() { $('#main').empty(); $('#main').load('views/articleAnalytics.html', null, function () { fillAutocomplete(); $('#articleSearchButton').click(function () { getIndividualArticleStats(); }) }) } function getOverviewPage() { $('#main').empty(); //Clear page $('#main').load('views/overview.html', null, function () { //load overview page //Intial Data load getTopRevs(); getBotRevs(); getOldestArticles(); getNewestArticles(); getTitleLargestRegUser(); getTitleLeastRegUser(); //get chart data $.getJSON('/pieData', null, function (rdata) { pieData = rdata } ); $.getJSON('/barData', null, function (rdata) { barData = rdata drawBar('#myChart'); } ); //Update based on user input $('[name=topBotRevUpdate]').click(function () { getTopRevs(); getBotRevs(); }); $('[name=chartUpdate]').click(function () { var whichChart = $('[name=chartSelector]').val(); if (whichChart == "In Total") { drawPie('#myChart'); } else { drawBar('#myChart'); } }); }); } function getAuthorAnalyticsPage() { $('#main').empty(); $('#main').load('views/authorAnalytics.html', null, function () { $('#authorSearchButton').click(function () { getAuthorArticleList(); }) }); } //clears the .active class from the menu bar function resetMenuBar() { $('#Overview').removeClass("active"); $('#ArticleAnalytics').removeClass("active"); $('#AuthorAnalytics').removeClass("active"); } /****************** LOAD THE CHART DATA *******************/ function drawPie(where) { console.log(where) graphData = new google.visualization.DataTable(); graphData.addColumn('string', 'Element'); graphData.addColumn('number', 'Percentage'); $.each(pieData, function (key, val) { graphData.addRow([key, val]); }) var chart = new google.visualization.PieChart($(where)[0]); chart.draw(graphData, options); } function drawBar(where) { graphData = new google.visualization.DataTable(); graphData.addColumn('string', 'Year'); graphData.addColumn('number', 'RegularUsers'); graphData.addColumn('number', 'Bots'); graphData.addColumn('number', 'Admins'); graphData.addColumn('number', 'Anon'); var test = []; for (var i in barData) { test.push(barData[i]) } // console.log(test); for (var x = 0; x < test.length; x++) { graphData.addRow([test[x].Year, test[x].RegularUsers, test[x].Bots, test[x].Admins, test[x].Anon]); } var chart = new google.visualization.ColumnChart($(where)[0]); chart.draw(graphData, options); } function drawBarSpecificUser(where, dataToUse) { graphData = new google.visualization.DataTable(); graphData.addColumn('string', 'Year'); graphData.addColumn('number', 'Revisions'); var test = []; for (var i in dataToUse) { test.push(dataToUse[i]) } // console.log(test); for (var x = 0; x < test.length; x++) { graphData.addRow([test[x].Year, test[x].Revisions]); } var chart = new google.visualization.BarChart($(where)[0]); chart.draw(graphData, options); } /******************************************* FUNCTIONS FOR LOADING REGULAR DATA INTO HTML ********************************************/ function getTopRevs() { var quantity = $('[name=quantity]').val(); var destination = 'getTopRevs?quantity=' + quantity; $.get(destination, quantity, function (data) { $('#topRev').empty(); for (var x = 0; x < data.length; x++) { var num = x + 1; num = num + '. '; var appendMe = $('<li>' + num + data[x]._id + '</li>'); $('#topRev').append(appendMe); } }) } function getBotRevs() { var quantity = $('[name=quantity]').val(); var destination = 'getBotRevs?quantity=' + quantity; $.get(destination, quantity, function (data) { $('#botRev').empty(); for (var x = 0; x < data.length; x++) { var num = x + 1; num = num + '. '; var appendMe = $('<li>' + num + data[x]._id + '</li>'); $('#botRev').append(appendMe); } }) } function getOldestArticles() { var destination = 'getOldestArticles'; $.get(destination, null, function (data) { console.log(data); $('#oldestArticles').empty(); for (var x = 0; x < data.length; x++) { var num = x + 1; num = num + '. '; var appendMe = $('<li>' + num + data[x]._id + '</li>'); $('#oldestArticles').append(appendMe); } }) } function
() { var destination = 'getNewestArticles'; console.log('here'); $.get(destination, null, function (data) { console.log(data); $('#newestArticles').empty(); for (var x = 0; x < data.length; x++) { var num = x + 1; num = num + '. '; var appendMe = $('<li>' + num + data[x]._id + '</li>'); $('#newestArticles').append(appendMe); } }) } function getTitleLargestRegUser(){ var destination = 'getLeastRegoUser'; $.get(destination, null, function (data) { console.log(data); $('#mostUsers').empty(); $('#mostUsers').text(data); }) } function getTitleLeastRegUser(){ var destination = 'getLargestRegoUser'; $.get(destination, null, function (data) { console.log(data); $('#leastUsers').empty(); $('#leastUsers').text(data); }) } function getAuthorArticleList() { var authorName = $('#authorEntryBox').val(); console.log(authorName) var destination = 'getAuthorArticleList?authorName=' + authorName; var putListHere = $('#articleList'); $.get(destination, null, function (data) { console.log('Here is the user list ') console.log(data) if (data.length == 0) { alert("Could not find any users with names matching that query"); } else { // var heading = $('<thead><tr>' + '<th>' + 'Article Name' + '</th>' + '<th>' + 'Number of Revisions' + '</th>' + '</tr></thead><tbody>') // $('#articleList').append(heading); // for (var x = 0; x < data.length; x++) { // var test = "<tr onclick='getTimestamps()' id= '" + "ArticleNameIs" + data[x]._id + "'>" + "<td>" + data[x]._id + "</td>" + '<td>' + data[x].count + '</td>' + '</tr>'; // var appendMe = $("<tr onclick='getTimestamps()' id= '" + "ArticleNameIs" + data[x]._id + "'>" + "<td>" + data[x]._id + "</td>" + '<td>' + data[x].count + '</td>' + '</tr>'); // console.log(test) // $('#articleList').append(appendMe); // } // var ending = $('</tbody>'); // $('#articleList').append(ending); putListHere.empty(); //Add headers var theader = $("<thead><tr><th>User Name</th><th>Article Name</th><th>Number of Revisions</th></tr></thead>") $('#articleList').append(theader); //Create data table for (var x = 0; x < data.length; x++) { var appnedMe = $("<tr class='articleEntry' id= '" + "entryID" + x + "'>" + '<td>' + data[x].user + '</td>' + "<td>" + data[x]._id + "</td>" + '<td>' + data[x].count + '</td>' + '</tr>'); $('#articleList').append(appnedMe); var temp = '#entryID' + x; // $(temp).click(function(x){ //Get timestamps // console.log(x) // }) } //Create event handler seperately function handleEvent(idVal){ var elementGetter = '#entryID' + idVal; $(elementGetter).click(function(){ $(".timestamp").remove(); console.log(elementGetter) var newdestination = 'getTimestamps?authorName=' + data[idVal]._id + "&title=" + data[idVal].user; $.get(newdestination, null, function (newdata) { console.log(newdata) for(var z = 0; z < newdata.length; z++){ var myDate = new Date(newdata[z].timestamp) console.log(myDate) $('<tr><td class="timestamp">' + " " + myDate.toUTCString() + '</td></tr>').insertAfter(elementGetter); } }) }) } for(var x = 0; x < data.length; x++){ handleEvent(x) } } }) } function fillAutocomplete() { var destination = 'getAllArticleTitles' $.get(destination, null, function (data) { $('#articleEntryList').empty(); for (var x = 0; x < data.length; x++) { console.log(data[x]) var appendMe = $('<option>' + data[x]._id + " [revisions: " + data[x].count + ']</option>') $('#articleEntryList').append(appendMe); allArticleTitles[x] = data[x]._id; allArticletitlesWithRevisions[x] = data[x]._id + ' [Total Revisions: ' + data[x].count + ']'; } }) } function getIndividualArticleStats() { //Get article name var searchedArticle = $('#articleEntryBox').val(); var temp = searchedArticle.split("["); temp = temp[0]; if (temp.substring(temp.length - 1, temp.length) == " ") { temp = temp.substring(0, temp.length - 1); } searchedArticle = temp; var validTitle = false; //check if this is a valid article for (var n = 0; n < allArticleTitles.length; n++) { if (searchedArticle == allArticleTitles[n]) { validTitle = true; } } //Convert article string to be used with wiki link searchQuery = searchedArticle.replace(/\s+/g, '_'); //Retrieve last timestamp from DB var sendTitle = 'getLastTime?title=' + searchedArticle $.ajax({ url: sendTitle, type: 'GET', global: false, dataType: 'json', success: function(data) { lastTS = data; //AJAX within an AJAX var wikiEndpoint = "https://en.wikipedia.org/w/api.php", parameters = [ "action=query", "format=json", "formatversion=2", "prop=revisions", "titles="+searchQuery, "rvstart="+lastTS, "rvdir=newer", "order=desc", "rvlimit=max", //Only querying for timestamps "rvprop=timestamp", "origin=*", "callback=?"] var url = wikiEndpoint + "?" + parameters.join("&"); console.log(url); //Get data from MediaWiki API $.ajax({ url: url, type: 'GET', dataType: 'jsonp', contentType: "application/json; charset=utf-8", success: function(data, jqXHR) { page=data.query.pages; revs = page[Object.keys(page)[0]].revisions //Retrieve latest timestamp from the array latestTS = revs[Object.keys(revs).length-1].timestamp //Moment.js library to compare dates var diff = moment.duration(moment(latestTS).diff(moment(lastTS))); //Date conversions var days = parseInt(diff.asDays()); var hours = parseInt(diff.asHours()); adjustedHours = hours - days*24; var minutes = parseInt(diff.asMinutes()); minutes = minutes - (days*24*60 + adjustedHours*60); console.log(searchQuery+": "+days+" days "+adjustedHours+" hours " + minutes +" minutes"); console.log("Latest: "+latestTS); console.log("Database: "+lastTS); //Displays number of revisions if available if (hours<24) { $("#wasIUpdated").empty(); $("#wasIUpdated").text("No new revisions!"); } else { $("#wasIUpdated").empty(); $("#wasIUpdated").text("Update available: " +revs.length + " revisions"); } } }) } }) if (!validTitle) { alert("There are no articles with this title (titles are case sensitive)"); } else { //check if any data is being returned //Add if statement with alert in case of an incorrect title //Display article name $("#putTitleHere").empty(); var newTitle = searchedArticle.trim(); for(var c = 0; c < allArticleTitles.length; c++){ var toTest = allArticleTitles[c].trim(); if(newTitle == toTest){ newTitle = allArticletitlesWithRevisions[c]; } } $("#putTitleHere").text(newTitle); //Add in title and class of table $("putTopRegUsersHeading").empty(); $("#putTopRegUsersHeading").text("Top Users For " + searchedArticle); $("#putClassHere").addClass("longtable"); //Add in charts $("#putChartTitleHere").text("Graphs"); //Get chart data and draw charts var destination2 = 'pieDataIndividualArticle?title=' + searchedArticle $.getJSON(destination2, null, function (rdata) { pieData = rdata } ); var destination3 = 'barDataIndividualArticle?title=' + searchedArticle $.getJSON(destination3, null, function (rdata) { barData = rdata drawBar('#IndividualArticleChart'); //default chart } ); //Draws the chart controls and add the top 5 users to the table var chartControls = $.get('views/chartControls.html', null, function (data) { appendMe = $(data) $('#putChartControlsHere').empty(); $('#putChartControlsHere').append(appendMe); //Add in top 5 users to both the chart above and the select list var destination = 'getTopUsersForArticle?title=' + searchedArticle $.get(destination, null, function (data) { console.log(data) $('#putTopRegUsers').empty(); for (var x = 0; x < data.length; x++) { var ranking = x + 1; ranking = ranking + '. ' var appendMe = $('<li>' + ranking + data[x]._id + ' (revisions: ' + data[x].count + ')' + '</li>'); $('#putTopRegUsers').append(appendMe); appendMe = $('<option value=' + data[x]._id + '>' + data[x]._id + '</option>'); $('#putUsersHere').append(appendMe) } }) //Registers event handler for update button $('#chartSwitcherIndividualArticle').click(function () { //get value from select box var specificUser = $('#chartSelectorIndividualArticles').val(); if (specificUser == "In Total") { drawPie('#IndividualArticleChart'); } else if (specificUser == "Over Time") { drawBar('#IndividualArticleChart'); } else { var destination4 = 'barDataSpecificUser?user=' + specificUser + '&title=' + searchedArticle $.getJSON(destination4, null, function (rdata) { console.log('here it is...') console.log(rdata) drawBarSpecificUser('#IndividualArticleChart', rdata); } ); } }) }) } }
getNewestArticles
identifier_name
chart.js
google.charts.load('current', { packages: ['corechart'] }); //global variable for storing the ids of articles currently shown in author analytics page var articlesShown = []; var allArticleTitles = []; var allArticletitlesWithRevisions = []; //chart otpions var options = { 'fontName':'Avenir', 'backgroundColor': { fill:'#F3F3F3', strokeWidth:10, stroke:'#CE953F' }, 'bar': {groupWidth: "70%"}, 'width': '100%', 'height': 500, 'hAxis':{ showTextEvery:1, maxAlternation:1, minTextSpacing:1, textStyle:{ fontSize:11, bold:true, }, }, 'legend': { position: 'top', alignment:'center' }, 'vAxis': { viewWindowMode: 'pretty', viewWindow: { min: 0, }, gridlines: { count: 9, } } }; var pieData var barData //Onload function window.onload = function () { getOverviewPage(); //loads overview page by default //when menu links are clicked, other pages can be loaded $('#ArticleAnalytics').click(function () { resetMenuBar(); $('#ArticleAnalytics').addClass("active"); getArticleAnalyticsPage(); }) $('#AuthorAnalytics').click(function () { resetMenuBar(); $('#AuthorAnalytics').addClass("active"); getAuthorAnalyticsPage(); }) $('#Overview').click(function () { resetMenuBar(); $('#Overview').addClass("active"); getOverviewPage(); }) } /******************************** FUNCTIONS FOR LOADING MAIN PAGES ********************************/ //Replaces entire page with article analytics function getArticleAnalyticsPage() { $('#main').empty(); $('#main').load('views/articleAnalytics.html', null, function () { fillAutocomplete(); $('#articleSearchButton').click(function () { getIndividualArticleStats(); }) }) } function getOverviewPage() { $('#main').empty(); //Clear page $('#main').load('views/overview.html', null, function () { //load overview page //Intial Data load getTopRevs(); getBotRevs(); getOldestArticles(); getNewestArticles(); getTitleLargestRegUser(); getTitleLeastRegUser(); //get chart data $.getJSON('/pieData', null, function (rdata) { pieData = rdata } ); $.getJSON('/barData', null, function (rdata) { barData = rdata drawBar('#myChart'); } ); //Update based on user input $('[name=topBotRevUpdate]').click(function () { getTopRevs(); getBotRevs(); }); $('[name=chartUpdate]').click(function () { var whichChart = $('[name=chartSelector]').val(); if (whichChart == "In Total") { drawPie('#myChart'); } else { drawBar('#myChart'); } }); }); } function getAuthorAnalyticsPage() { $('#main').empty(); $('#main').load('views/authorAnalytics.html', null, function () { $('#authorSearchButton').click(function () { getAuthorArticleList(); }) }); } //clears the .active class from the menu bar function resetMenuBar() { $('#Overview').removeClass("active"); $('#ArticleAnalytics').removeClass("active"); $('#AuthorAnalytics').removeClass("active"); } /****************** LOAD THE CHART DATA *******************/ function drawPie(where) { console.log(where) graphData = new google.visualization.DataTable(); graphData.addColumn('string', 'Element'); graphData.addColumn('number', 'Percentage'); $.each(pieData, function (key, val) { graphData.addRow([key, val]); }) var chart = new google.visualization.PieChart($(where)[0]); chart.draw(graphData, options); } function drawBar(where) { graphData = new google.visualization.DataTable(); graphData.addColumn('string', 'Year'); graphData.addColumn('number', 'RegularUsers'); graphData.addColumn('number', 'Bots'); graphData.addColumn('number', 'Admins'); graphData.addColumn('number', 'Anon'); var test = []; for (var i in barData) { test.push(barData[i]) } // console.log(test); for (var x = 0; x < test.length; x++) { graphData.addRow([test[x].Year, test[x].RegularUsers, test[x].Bots, test[x].Admins, test[x].Anon]); } var chart = new google.visualization.ColumnChart($(where)[0]); chart.draw(graphData, options); } function drawBarSpecificUser(where, dataToUse) { graphData = new google.visualization.DataTable(); graphData.addColumn('string', 'Year'); graphData.addColumn('number', 'Revisions'); var test = []; for (var i in dataToUse) { test.push(dataToUse[i]) } // console.log(test); for (var x = 0; x < test.length; x++) { graphData.addRow([test[x].Year, test[x].Revisions]); } var chart = new google.visualization.BarChart($(where)[0]); chart.draw(graphData, options); } /******************************************* FUNCTIONS FOR LOADING REGULAR DATA INTO HTML ********************************************/ function getTopRevs() { var quantity = $('[name=quantity]').val(); var destination = 'getTopRevs?quantity=' + quantity; $.get(destination, quantity, function (data) { $('#topRev').empty(); for (var x = 0; x < data.length; x++) { var num = x + 1; num = num + '. '; var appendMe = $('<li>' + num + data[x]._id + '</li>'); $('#topRev').append(appendMe); } }) } function getBotRevs() { var quantity = $('[name=quantity]').val(); var destination = 'getBotRevs?quantity=' + quantity; $.get(destination, quantity, function (data) { $('#botRev').empty(); for (var x = 0; x < data.length; x++) { var num = x + 1; num = num + '. '; var appendMe = $('<li>' + num + data[x]._id + '</li>'); $('#botRev').append(appendMe); } }) } function getOldestArticles() { var destination = 'getOldestArticles'; $.get(destination, null, function (data) { console.log(data); $('#oldestArticles').empty(); for (var x = 0; x < data.length; x++) { var num = x + 1; num = num + '. '; var appendMe = $('<li>' + num + data[x]._id + '</li>'); $('#oldestArticles').append(appendMe); } }) } function getNewestArticles() { var destination = 'getNewestArticles'; console.log('here'); $.get(destination, null, function (data) { console.log(data); $('#newestArticles').empty(); for (var x = 0; x < data.length; x++) { var num = x + 1; num = num + '. '; var appendMe = $('<li>' + num + data[x]._id + '</li>'); $('#newestArticles').append(appendMe); } }) } function getTitleLargestRegUser(){ var destination = 'getLeastRegoUser'; $.get(destination, null, function (data) { console.log(data); $('#mostUsers').empty(); $('#mostUsers').text(data); }) } function getTitleLeastRegUser(){ var destination = 'getLargestRegoUser'; $.get(destination, null, function (data) { console.log(data); $('#leastUsers').empty(); $('#leastUsers').text(data); }) } function getAuthorArticleList() { var authorName = $('#authorEntryBox').val(); console.log(authorName) var destination = 'getAuthorArticleList?authorName=' + authorName; var putListHere = $('#articleList'); $.get(destination, null, function (data) { console.log('Here is the user list ') console.log(data) if (data.length == 0) { alert("Could not find any users with names matching that query"); } else { // var heading = $('<thead><tr>' + '<th>' + 'Article Name' + '</th>' + '<th>' + 'Number of Revisions' + '</th>' + '</tr></thead><tbody>') // $('#articleList').append(heading); // for (var x = 0; x < data.length; x++) { // var test = "<tr onclick='getTimestamps()' id= '" + "ArticleNameIs" + data[x]._id + "'>" + "<td>" + data[x]._id + "</td>" + '<td>' + data[x].count + '</td>' + '</tr>'; // var appendMe = $("<tr onclick='getTimestamps()' id= '" + "ArticleNameIs" + data[x]._id + "'>" + "<td>" + data[x]._id + "</td>" + '<td>' + data[x].count + '</td>' + '</tr>'); // console.log(test) // $('#articleList').append(appendMe); // } // var ending = $('</tbody>'); // $('#articleList').append(ending); putListHere.empty(); //Add headers var theader = $("<thead><tr><th>User Name</th><th>Article Name</th><th>Number of Revisions</th></tr></thead>") $('#articleList').append(theader); //Create data table for (var x = 0; x < data.length; x++)
//Create event handler seperately function handleEvent(idVal){ var elementGetter = '#entryID' + idVal; $(elementGetter).click(function(){ $(".timestamp").remove(); console.log(elementGetter) var newdestination = 'getTimestamps?authorName=' + data[idVal]._id + "&title=" + data[idVal].user; $.get(newdestination, null, function (newdata) { console.log(newdata) for(var z = 0; z < newdata.length; z++){ var myDate = new Date(newdata[z].timestamp) console.log(myDate) $('<tr><td class="timestamp">' + " " + myDate.toUTCString() + '</td></tr>').insertAfter(elementGetter); } }) }) } for(var x = 0; x < data.length; x++){ handleEvent(x) } } }) } function fillAutocomplete() { var destination = 'getAllArticleTitles' $.get(destination, null, function (data) { $('#articleEntryList').empty(); for (var x = 0; x < data.length; x++) { console.log(data[x]) var appendMe = $('<option>' + data[x]._id + " [revisions: " + data[x].count + ']</option>') $('#articleEntryList').append(appendMe); allArticleTitles[x] = data[x]._id; allArticletitlesWithRevisions[x] = data[x]._id + ' [Total Revisions: ' + data[x].count + ']'; } }) } function getIndividualArticleStats() { //Get article name var searchedArticle = $('#articleEntryBox').val(); var temp = searchedArticle.split("["); temp = temp[0]; if (temp.substring(temp.length - 1, temp.length) == " ") { temp = temp.substring(0, temp.length - 1); } searchedArticle = temp; var validTitle = false; //check if this is a valid article for (var n = 0; n < allArticleTitles.length; n++) { if (searchedArticle == allArticleTitles[n]) { validTitle = true; } } //Convert article string to be used with wiki link searchQuery = searchedArticle.replace(/\s+/g, '_'); //Retrieve last timestamp from DB var sendTitle = 'getLastTime?title=' + searchedArticle $.ajax({ url: sendTitle, type: 'GET', global: false, dataType: 'json', success: function(data) { lastTS = data; //AJAX within an AJAX var wikiEndpoint = "https://en.wikipedia.org/w/api.php", parameters = [ "action=query", "format=json", "formatversion=2", "prop=revisions", "titles="+searchQuery, "rvstart="+lastTS, "rvdir=newer", "order=desc", "rvlimit=max", //Only querying for timestamps "rvprop=timestamp", "origin=*", "callback=?"] var url = wikiEndpoint + "?" + parameters.join("&"); console.log(url); //Get data from MediaWiki API $.ajax({ url: url, type: 'GET', dataType: 'jsonp', contentType: "application/json; charset=utf-8", success: function(data, jqXHR) { page=data.query.pages; revs = page[Object.keys(page)[0]].revisions //Retrieve latest timestamp from the array latestTS = revs[Object.keys(revs).length-1].timestamp //Moment.js library to compare dates var diff = moment.duration(moment(latestTS).diff(moment(lastTS))); //Date conversions var days = parseInt(diff.asDays()); var hours = parseInt(diff.asHours()); adjustedHours = hours - days*24; var minutes = parseInt(diff.asMinutes()); minutes = minutes - (days*24*60 + adjustedHours*60); console.log(searchQuery+": "+days+" days "+adjustedHours+" hours " + minutes +" minutes"); console.log("Latest: "+latestTS); console.log("Database: "+lastTS); //Displays number of revisions if available if (hours<24) { $("#wasIUpdated").empty(); $("#wasIUpdated").text("No new revisions!"); } else { $("#wasIUpdated").empty(); $("#wasIUpdated").text("Update available: " +revs.length + " revisions"); } } }) } }) if (!validTitle) { alert("There are no articles with this title (titles are case sensitive)"); } else { //check if any data is being returned //Add if statement with alert in case of an incorrect title //Display article name $("#putTitleHere").empty(); var newTitle = searchedArticle.trim(); for(var c = 0; c < allArticleTitles.length; c++){ var toTest = allArticleTitles[c].trim(); if(newTitle == toTest){ newTitle = allArticletitlesWithRevisions[c]; } } $("#putTitleHere").text(newTitle); //Add in title and class of table $("putTopRegUsersHeading").empty(); $("#putTopRegUsersHeading").text("Top Users For " + searchedArticle); $("#putClassHere").addClass("longtable"); //Add in charts $("#putChartTitleHere").text("Graphs"); //Get chart data and draw charts var destination2 = 'pieDataIndividualArticle?title=' + searchedArticle $.getJSON(destination2, null, function (rdata) { pieData = rdata } ); var destination3 = 'barDataIndividualArticle?title=' + searchedArticle $.getJSON(destination3, null, function (rdata) { barData = rdata drawBar('#IndividualArticleChart'); //default chart } ); //Draws the chart controls and add the top 5 users to the table var chartControls = $.get('views/chartControls.html', null, function (data) { appendMe = $(data) $('#putChartControlsHere').empty(); $('#putChartControlsHere').append(appendMe); //Add in top 5 users to both the chart above and the select list var destination = 'getTopUsersForArticle?title=' + searchedArticle $.get(destination, null, function (data) { console.log(data) $('#putTopRegUsers').empty(); for (var x = 0; x < data.length; x++) { var ranking = x + 1; ranking = ranking + '. ' var appendMe = $('<li>' + ranking + data[x]._id + ' (revisions: ' + data[x].count + ')' + '</li>'); $('#putTopRegUsers').append(appendMe); appendMe = $('<option value=' + data[x]._id + '>' + data[x]._id + '</option>'); $('#putUsersHere').append(appendMe) } }) //Registers event handler for update button $('#chartSwitcherIndividualArticle').click(function () { //get value from select box var specificUser = $('#chartSelectorIndividualArticles').val(); if (specificUser == "In Total") { drawPie('#IndividualArticleChart'); } else if (specificUser == "Over Time") { drawBar('#IndividualArticleChart'); } else { var destination4 = 'barDataSpecificUser?user=' + specificUser + '&title=' + searchedArticle $.getJSON(destination4, null, function (rdata) { console.log('here it is...') console.log(rdata) drawBarSpecificUser('#IndividualArticleChart', rdata); } ); } }) }) } }
{ var appnedMe = $("<tr class='articleEntry' id= '" + "entryID" + x + "'>" + '<td>' + data[x].user + '</td>' + "<td>" + data[x]._id + "</td>" + '<td>' + data[x].count + '</td>' + '</tr>'); $('#articleList').append(appnedMe); var temp = '#entryID' + x; // $(temp).click(function(x){ //Get timestamps // console.log(x) // }) }
conditional_block
start.go
/* Copyright SecureKey Technologies Inc. All Rights Reserved. SPDX-License-Identifier: Apache-2.0 */ package startcmd import ( "crypto/tls" "crypto/x509" "database/sql" "errors" "fmt" "net/http" "net/url" "strconv" "strings" "time" "github.com/cenkalti/backoff" "github.com/gorilla/mux" "github.com/hyperledger/aries-framework-go/pkg/client/didexchange" "github.com/hyperledger/aries-framework-go/pkg/client/presentproof" arieshttp "github.com/hyperledger/aries-framework-go/pkg/didcomm/transport/http" "github.com/hyperledger/aries-framework-go/pkg/framework/aries" "github.com/hyperledger/aries-framework-go/pkg/framework/aries/defaults" ariesctx "github.com/hyperledger/aries-framework-go/pkg/framework/context" "github.com/rs/cors" "github.com/spf13/cobra" "github.com/trustbloc/edge-core/pkg/log" "github.com/trustbloc/edge-core/pkg/storage/memstore" cmdutils "github.com/trustbloc/edge-core/pkg/utils/cmd" tlsutils "github.com/trustbloc/edge-core/pkg/utils/tls" "github.com/xo/dburl" ariespai "github.com/trustbloc/edge-adapter/pkg/aries" "github.com/trustbloc/edge-adapter/pkg/did" "github.com/trustbloc/edge-adapter/pkg/hydra" "github.com/trustbloc/edge-adapter/pkg/presentationex" "github.com/trustbloc/edge-adapter/pkg/restapi/healthcheck" "github.com/trustbloc/edge-adapter/pkg/restapi/issuer" issuerops "github.com/trustbloc/edge-adapter/pkg/restapi/issuer/operation" "github.com/trustbloc/edge-adapter/pkg/restapi/rp" rpops "github.com/trustbloc/edge-adapter/pkg/restapi/rp/operation" ) var logger = log.New("edge-adapter") const ( hostURLFlagName = "host-url" hostURLFlagShorthand = "u" hostURLFlagUsage = "URL to run the adapter-rest instance on. Format: HostName:Port." hostURLEnvKey = "ADAPTER_REST_HOST_URL" datasourceNameFlagName = "dsn" datasourceNameFlagUsage = "Datasource Name with credentials if required," + " eg. mysql://root:secret@localhost:3306/adapter" + "Alternatively, this can be set with the following environment variable: " + datasourceNameEnvKey datasourceNameEnvKey = "ADAPTER_REST_DSN" oidcProviderURLFlagName = "op-url" oidcProviderURLFlagUsage = "URL for the OIDC provider." + "Alternatively, this can be set with the following environment variable: " + oidcProviderEnvKey oidcProviderEnvKey = "ADAPTER_REST_OP_URL" staticFilesPathFlagName = "static-path" staticFilesPathFlagUsage = "Path to the folder where the static files are to be hosted under " + uiEndpoint + "." + "Alternatively, this can be set with the following environment variable: " + staticFilesPathEnvKey staticFilesPathEnvKey = "ADAPTER_REST_STATIC_FILES" tlsSystemCertPoolFlagName = "tls-systemcertpool" tlsSystemCertPoolFlagUsage = "Use system certificate pool." + " Possible values [true] [false]. Defaults to false if not set." + " Alternatively, this can be set with the following environment variable: " + tlsSystemCertPoolEnvKey tlsSystemCertPoolEnvKey = "ADAPTER_REST_TLS_SYSTEMCERTPOOL" tlsCACertsFlagName = "tls-cacerts" tlsCACertsFlagUsage = "Comma-Separated list of ca certs path." + " Alternatively, this can be set with the following environment variable: " + tlsCACertsEnvKey tlsCACertsEnvKey = "ADAPTER_REST_TLS_CACERTS" presentationDefinitionsFlagName = "presentation-definitions-file" presentationDefinitionsFlagUsage = "Path to presentation definitions file with input_descriptors." presentationDefinitionsEnvKey = "ADAPTER_REST_PRESENTATION_DEFINITIONS_FILE" hydraURLFlagName = "hydra-url" hydraURLFlagUsage = "Base URL to the hydra service." + "Alternatively, this can be set with the following environment variable: " + hydraURLEnvKey hydraURLEnvKey = "ADAPTER_REST_HYDRA_URL" modeFlagName = "mode" modeFlagUsage = "Mode in which the edge-adapter service will run. Possible values: " + "['issuer', 'rp']." modeEnvKey = "ADAPTER_REST_MODE" // inbound host url flag didCommInboundHostFlagName = "didcomm-inbound-host" didCommInboundHostEnvKey = "ADAPTER_REST_DIDCOMM_INBOUND_HOST" didCommInboundHostFlagUsage = "Inbound Host Name:Port. This is used internally to start the didcomm server." + " Alternatively, this can be set with the following environment variable: " + didCommInboundHostEnvKey // inbound host external url flag didCommInboundHostExternalFlagName = "didcomm-inbound-host-external" didCommInboundHostExternalEnvKey = "ADAPTER_REST_DIDCOMM_INBOUND_HOST_EXTERNAL" didCommInboundHostExternalFlagUsage = "Inbound Host External Name:Port." + " This is the URL for the inbound server as seen externally." + " If not provided, then the internal inbound host will be used here." + " Alternatively, this can be set with the following environment variable: " + didCommInboundHostExternalEnvKey // db path didCommDBPathFlagName = "didcomm-db-path" didCommDBPathEnvKey = "ADAPTER_REST_DIDCOMM_DB_PATH" didCommDBPathFlagUsage = "Path to database." + " Alternatively, this can be set with the following environment variable: " + didCommDBPathEnvKey trustblocDomainFlagName = "dids-trustbloc-domain" trustblocDomainEnvKey = "ADAPTER_REST_TRUSTBLOC_DOMAIN" trustblocDomainFlagUsage = "URL to the did:trustbloc consortium's domain." + " Alternatively, this can be set with the following environment variable: " + trustblocDomainEnvKey ) // API endpoints. const ( uiEndpoint = "/ui" // modes issuerMode = "issuer" rpMode = "rp" ) type didCommParameters struct { inboundHostInternal string inboundHostExternal string dbPath string } type adapterRestParameters struct { hostURL string tlsSystemCertPool bool tlsCACerts []string dsn string oidcProviderURL string staticFiles string presentationDefinitionsFile string // TODO assuming same base path for all hydra endpoints for now hydraURL string mode string didCommParameters *didCommParameters // didcomm trustblocDomain string } type server interface { ListenAndServe(host string, router http.Handler) error } // HTTPServer represents an actual HTTP server implementation. type HTTPServer struct{} // ListenAndServe starts the server using the standard Go HTTP server implementation. func (s *HTTPServer) ListenAndServe(host string, router http.Handler) error { return http.ListenAndServe(host, router) } // GetStartCmd returns the Cobra start command. func GetStartCmd(srv server) *cobra.Command { startCmd := createStartCmd(srv) createFlags(startCmd) return startCmd } func createStartCmd(srv server) *cobra.Command { return &cobra.Command{ Use: "start", Short: "Start adapter-rest", Long: "Start adapter-rest inside the edge-adapter", RunE: func(cmd *cobra.Command, args []string) error { parameters, err := getAdapterRestParameters(cmd) if err != nil { return err } return startAdapterService(parameters, srv) }, } } //nolint:funlen,gocyclo func getAdapterRestParameters(cmd *cobra.Command) (*adapterRestParameters, error) { hostURL, err := cmdutils.GetUserSetVarFromString(cmd, hostURLFlagName, hostURLEnvKey, false) if err != nil { return nil, err } tlsSystemCertPool, tlsCACerts, err := getTLS(cmd) if err != nil { return nil, err } dsn, err := cmdutils.GetUserSetVarFromString(cmd, datasourceNameFlagName, datasourceNameEnvKey, true) if err != nil { return nil, err } oidcURL, err := cmdutils.GetUserSetVarFromString(cmd, oidcProviderURLFlagName, oidcProviderEnvKey, true) if err != nil { return nil, err } staticFiles, err := cmdutils.GetUserSetVarFromString(cmd, staticFilesPathFlagName, staticFilesPathEnvKey, true) if err != nil { return nil, err } mode, err := cmdutils.GetUserSetVarFromString(cmd, modeFlagName, modeEnvKey, true) if err != nil { return nil, err } presentationDefinitionsFile, err := cmdutils.GetUserSetVarFromString(cmd, presentationDefinitionsFlagName, presentationDefinitionsEnvKey, mode != rpMode) if err != nil { return nil, err } hydraURL, err := cmdutils.GetUserSetVarFromString(cmd, hydraURLFlagName, hydraURLEnvKey, true) if err != nil { return nil, err } // didcomm didCommParameters, err := getDIDCommParams(cmd) if err != nil { return nil, err } trustblocDomain, err := cmdutils.GetUserSetVarFromString(cmd, trustblocDomainFlagName, trustblocDomainEnvKey, true) if err != nil { return nil, err } return &adapterRestParameters{ hostURL: hostURL, tlsSystemCertPool: tlsSystemCertPool, tlsCACerts: tlsCACerts, dsn: dsn, oidcProviderURL: oidcURL, staticFiles: staticFiles, presentationDefinitionsFile: presentationDefinitionsFile, hydraURL: hydraURL, mode: mode, didCommParameters: didCommParameters, trustblocDomain: trustblocDomain, }, nil } func getDIDCommParams(cmd *cobra.Command) (*didCommParameters, error) { inboundHostInternal, err := cmdutils.GetUserSetVarFromString(cmd, didCommInboundHostFlagName, didCommInboundHostEnvKey, true) if err != nil { return nil, err } inboundHostExternal, err := cmdutils.GetUserSetVarFromString(cmd, didCommInboundHostExternalFlagName, didCommInboundHostExternalEnvKey, true) if err != nil { return nil, err } dbPath, err := cmdutils.GetUserSetVarFromString(cmd, didCommDBPathFlagName, didCommDBPathEnvKey, true) if err != nil { return nil, err } return &didCommParameters{ inboundHostInternal: inboundHostInternal, inboundHostExternal: inboundHostExternal, dbPath: dbPath, }, nil } func getTLS(cmd *cobra.Command) (bool, []string, error)
func createFlags(startCmd *cobra.Command) { startCmd.Flags().StringP(hostURLFlagName, hostURLFlagShorthand, "", hostURLFlagUsage) startCmd.Flags().StringP(tlsSystemCertPoolFlagName, "", "", tlsSystemCertPoolFlagUsage) startCmd.Flags().StringArrayP(tlsCACertsFlagName, "", []string{}, tlsCACertsFlagUsage) startCmd.Flags().StringP(oidcProviderURLFlagName, "", "", oidcProviderURLFlagUsage) startCmd.Flags().StringP(datasourceNameFlagName, "", "", datasourceNameFlagUsage) startCmd.Flags().StringP(staticFilesPathFlagName, "", "", staticFilesPathFlagUsage) startCmd.Flags().StringP(presentationDefinitionsFlagName, "", "", presentationDefinitionsFlagUsage) startCmd.Flags().StringP(hydraURLFlagName, "", "", hydraURLFlagUsage) startCmd.Flags().StringP(modeFlagName, "", "", modeFlagUsage) // didcomm startCmd.Flags().StringP(didCommInboundHostFlagName, "", "", didCommInboundHostFlagUsage) startCmd.Flags().StringP(didCommInboundHostExternalFlagName, "", "", didCommInboundHostExternalFlagUsage) startCmd.Flags().StringP(didCommDBPathFlagName, "", "", didCommDBPathFlagUsage) startCmd.Flags().StringP(trustblocDomainFlagName, "", "", trustblocDomainFlagUsage) } func startAdapterService(parameters *adapterRestParameters, srv server) error { rootCAs, err := tlsutils.GetCertPool(parameters.tlsSystemCertPool, parameters.tlsCACerts) if err != nil { return err } logger.Debugf("root ca's %v", rootCAs) router := mux.NewRouter() // add health check endpoint healthCheckService := healthcheck.New() healthCheckHandlers := healthCheckService.GetOperations() for _, handler := range healthCheckHandlers { router.HandleFunc(handler.Path(), handler.Handle()).Methods(handler.Method()) } ariesCtx, err := createAriesAgent(parameters, &tls.Config{RootCAs: rootCAs}) if err != nil { return err } // add endpoints switch parameters.mode { case rpMode: err = addRPHandlers(parameters, ariesCtx, router, rootCAs) if err != nil { return nil } case issuerMode: err = addIssuerHandlers(parameters, ariesCtx, router) if err != nil { return nil } default: return fmt.Errorf("invalid mode : %s", parameters.mode) } logger.Infof("starting %s adapter rest server on host %s", parameters.mode, parameters.hostURL) return srv.ListenAndServe(parameters.hostURL, constructCORSHandler(router)) } func addRPHandlers( parameters *adapterRestParameters, ctx ariespai.CtxProvider, router *mux.Router, rootCAs *x509.CertPool) error { presentationExProvider, err := presentationex.New(parameters.presentationDefinitionsFile) if err != nil { return err } hydraURL, err := url.Parse(parameters.hydraURL) if err != nil { return err } didClient, err := didexchange.New(ctx) if err != nil { return fmt.Errorf("failed to initialized didexchange client : %w", err) } presentProofClient, err := presentproof.New(ctx) if err != nil { return err } // TODO init OIDC stuff in iteration 2 - https://github.com/trustbloc/edge-adapter/issues/24 // add rp endpoints rpService, err := rp.New(&rpops.Config{ PresentationExProvider: presentationExProvider, Hydra: hydra.NewClient(hydraURL, rootCAs), UIEndpoint: uiEndpoint, DIDExchClient: didClient, Store: memstore.NewProvider(), PublicDIDCreator: did.NewTrustblocDIDCreator( parameters.trustblocDomain, parameters.didCommParameters.inboundHostExternal, ctx.KMS(), rootCAs), AriesStorageProvider: ctx, PresentProofClient: presentProofClient, }) if err != nil { return err } rpHandlers := rpService.GetOperations() for _, handler := range rpHandlers { router.HandleFunc(handler.Path(), handler.Handle()).Methods(handler.Method()) } // static frontend router.PathPrefix(uiEndpoint). Subrouter(). Methods(http.MethodGet). HandlerFunc(uiHandler(parameters.staticFiles, http.ServeFile)) return nil } func addIssuerHandlers(parameters *adapterRestParameters, ariesCtx ariespai.CtxProvider, router *mux.Router) error { // add issuer endpoints issuerService, err := issuer.New(&issuerops.Config{ AriesCtx: ariesCtx, UIEndpoint: uiEndpoint, // TODO https://github.com/trustbloc/edge-adapter/issues/42 use sql store StoreProvider: memstore.NewProvider(), }) if err != nil { return err } rpHandlers := issuerService.GetOperations() for _, handler := range rpHandlers { router.HandleFunc(handler.Path(), handler.Handle()).Methods(handler.Method()) } // static frontend router.PathPrefix(uiEndpoint). Subrouter(). Methods(http.MethodGet). HandlerFunc(uiHandler(parameters.staticFiles, http.ServeFile)) return nil } func uiHandler( basePath string, fileServer func(http.ResponseWriter, *http.Request, string)) func(http.ResponseWriter, *http.Request) { return func(w http.ResponseWriter, r *http.Request) { if r.URL.Path == uiEndpoint { fileServer(w, r, strings.ReplaceAll(basePath+"/index.html", "//", "/")) return } fileServer(w, r, strings.ReplaceAll(basePath+"/"+r.URL.Path[len(uiEndpoint):], "//", "/")) } } func constructCORSHandler(handler http.Handler) http.Handler { return cors.New( cors.Options{ AllowedMethods: []string{http.MethodGet, http.MethodPost}, AllowedHeaders: []string{"Origin", "Accept", "Content-Type", "X-Requested-With", "Authorization"}, }, ).Handler(handler) } //nolint:deadcode,unused func initDB(dsn string) (*sql.DB, error) { const ( sleep = 1 * time.Second numRetries = 30 ) var dbms *sql.DB err := backoff.RetryNotify( func() error { var openErr error dbms, openErr = dburl.Open(dsn) return openErr }, backoff.WithMaxRetries(backoff.NewConstantBackOff(sleep), numRetries), func(retryErr error, t time.Duration) { logger.Warnf( "failed to connect to database, will sleep for %d before trying again : %s\n", t, retryErr) }, ) if err != nil { return nil, fmt.Errorf("failed to connect to database at %s : %w", dsn, err) } return dbms, nil } func createAriesAgent(parameters *adapterRestParameters, tlsConfig *tls.Config) (*ariesctx.Provider, error) { var opts []aries.Option if parameters.didCommParameters.inboundHostInternal == "" { return nil, errors.New("didcomm inbound host is mandatory") } if parameters.didCommParameters.dbPath != "" { opts = append(opts, defaults.WithStorePath(parameters.didCommParameters.dbPath)) } inboundTransportOpt := defaults.WithInboundHTTPAddr(parameters.didCommParameters.inboundHostInternal, parameters.didCommParameters.inboundHostExternal) opts = append(opts, inboundTransportOpt) outbound, err := arieshttp.NewOutbound(arieshttp.WithOutboundTLSConfig(tlsConfig)) if err != nil { return nil, fmt.Errorf("aries-framework - failed to create outbound tranpsort opts : %w", err) } opts = append(opts, aries.WithOutboundTransports(outbound)) framework, err := aries.New(opts...) if err != nil { return nil, fmt.Errorf("aries-framework - failed to initialize framework : %w", err) } ctx, err := framework.Context() if err != nil { return nil, fmt.Errorf("aries-framework - failed to get aries context : %w", err) } return ctx, nil }
{ tlsSystemCertPoolString, err := cmdutils.GetUserSetVarFromString(cmd, tlsSystemCertPoolFlagName, tlsSystemCertPoolEnvKey, true) if err != nil { return false, nil, err } tlsSystemCertPool := false if tlsSystemCertPoolString != "" { tlsSystemCertPool, err = strconv.ParseBool(tlsSystemCertPoolString) if err != nil { return false, nil, err } } tlsCACerts, err := cmdutils.GetUserSetVarFromArrayString(cmd, tlsCACertsFlagName, tlsCACertsEnvKey, true) if err != nil { return false, nil, err } return tlsSystemCertPool, tlsCACerts, nil }
identifier_body
start.go
/* Copyright SecureKey Technologies Inc. All Rights Reserved. SPDX-License-Identifier: Apache-2.0 */ package startcmd import ( "crypto/tls" "crypto/x509" "database/sql" "errors" "fmt" "net/http" "net/url" "strconv" "strings" "time" "github.com/cenkalti/backoff" "github.com/gorilla/mux" "github.com/hyperledger/aries-framework-go/pkg/client/didexchange" "github.com/hyperledger/aries-framework-go/pkg/client/presentproof" arieshttp "github.com/hyperledger/aries-framework-go/pkg/didcomm/transport/http" "github.com/hyperledger/aries-framework-go/pkg/framework/aries" "github.com/hyperledger/aries-framework-go/pkg/framework/aries/defaults" ariesctx "github.com/hyperledger/aries-framework-go/pkg/framework/context" "github.com/rs/cors" "github.com/spf13/cobra" "github.com/trustbloc/edge-core/pkg/log" "github.com/trustbloc/edge-core/pkg/storage/memstore" cmdutils "github.com/trustbloc/edge-core/pkg/utils/cmd" tlsutils "github.com/trustbloc/edge-core/pkg/utils/tls" "github.com/xo/dburl" ariespai "github.com/trustbloc/edge-adapter/pkg/aries" "github.com/trustbloc/edge-adapter/pkg/did" "github.com/trustbloc/edge-adapter/pkg/hydra" "github.com/trustbloc/edge-adapter/pkg/presentationex" "github.com/trustbloc/edge-adapter/pkg/restapi/healthcheck" "github.com/trustbloc/edge-adapter/pkg/restapi/issuer" issuerops "github.com/trustbloc/edge-adapter/pkg/restapi/issuer/operation" "github.com/trustbloc/edge-adapter/pkg/restapi/rp" rpops "github.com/trustbloc/edge-adapter/pkg/restapi/rp/operation" ) var logger = log.New("edge-adapter") const ( hostURLFlagName = "host-url" hostURLFlagShorthand = "u" hostURLFlagUsage = "URL to run the adapter-rest instance on. Format: HostName:Port." hostURLEnvKey = "ADAPTER_REST_HOST_URL" datasourceNameFlagName = "dsn" datasourceNameFlagUsage = "Datasource Name with credentials if required," + " eg. mysql://root:secret@localhost:3306/adapter" + "Alternatively, this can be set with the following environment variable: " + datasourceNameEnvKey datasourceNameEnvKey = "ADAPTER_REST_DSN" oidcProviderURLFlagName = "op-url" oidcProviderURLFlagUsage = "URL for the OIDC provider." + "Alternatively, this can be set with the following environment variable: " + oidcProviderEnvKey oidcProviderEnvKey = "ADAPTER_REST_OP_URL" staticFilesPathFlagName = "static-path" staticFilesPathFlagUsage = "Path to the folder where the static files are to be hosted under " + uiEndpoint + "." + "Alternatively, this can be set with the following environment variable: " + staticFilesPathEnvKey staticFilesPathEnvKey = "ADAPTER_REST_STATIC_FILES" tlsSystemCertPoolFlagName = "tls-systemcertpool" tlsSystemCertPoolFlagUsage = "Use system certificate pool." + " Possible values [true] [false]. Defaults to false if not set." + " Alternatively, this can be set with the following environment variable: " + tlsSystemCertPoolEnvKey tlsSystemCertPoolEnvKey = "ADAPTER_REST_TLS_SYSTEMCERTPOOL" tlsCACertsFlagName = "tls-cacerts" tlsCACertsFlagUsage = "Comma-Separated list of ca certs path." + " Alternatively, this can be set with the following environment variable: " + tlsCACertsEnvKey tlsCACertsEnvKey = "ADAPTER_REST_TLS_CACERTS" presentationDefinitionsFlagName = "presentation-definitions-file" presentationDefinitionsFlagUsage = "Path to presentation definitions file with input_descriptors." presentationDefinitionsEnvKey = "ADAPTER_REST_PRESENTATION_DEFINITIONS_FILE" hydraURLFlagName = "hydra-url" hydraURLFlagUsage = "Base URL to the hydra service." + "Alternatively, this can be set with the following environment variable: " + hydraURLEnvKey hydraURLEnvKey = "ADAPTER_REST_HYDRA_URL" modeFlagName = "mode" modeFlagUsage = "Mode in which the edge-adapter service will run. Possible values: " + "['issuer', 'rp']." modeEnvKey = "ADAPTER_REST_MODE" // inbound host url flag didCommInboundHostFlagName = "didcomm-inbound-host" didCommInboundHostEnvKey = "ADAPTER_REST_DIDCOMM_INBOUND_HOST" didCommInboundHostFlagUsage = "Inbound Host Name:Port. This is used internally to start the didcomm server." + " Alternatively, this can be set with the following environment variable: " + didCommInboundHostEnvKey // inbound host external url flag didCommInboundHostExternalFlagName = "didcomm-inbound-host-external" didCommInboundHostExternalEnvKey = "ADAPTER_REST_DIDCOMM_INBOUND_HOST_EXTERNAL" didCommInboundHostExternalFlagUsage = "Inbound Host External Name:Port." + " This is the URL for the inbound server as seen externally." + " If not provided, then the internal inbound host will be used here." + " Alternatively, this can be set with the following environment variable: " + didCommInboundHostExternalEnvKey // db path didCommDBPathFlagName = "didcomm-db-path" didCommDBPathEnvKey = "ADAPTER_REST_DIDCOMM_DB_PATH" didCommDBPathFlagUsage = "Path to database." + " Alternatively, this can be set with the following environment variable: " + didCommDBPathEnvKey trustblocDomainFlagName = "dids-trustbloc-domain" trustblocDomainEnvKey = "ADAPTER_REST_TRUSTBLOC_DOMAIN" trustblocDomainFlagUsage = "URL to the did:trustbloc consortium's domain." + " Alternatively, this can be set with the following environment variable: " + trustblocDomainEnvKey ) // API endpoints. const ( uiEndpoint = "/ui" // modes issuerMode = "issuer" rpMode = "rp" ) type didCommParameters struct { inboundHostInternal string inboundHostExternal string dbPath string } type adapterRestParameters struct { hostURL string tlsSystemCertPool bool tlsCACerts []string dsn string oidcProviderURL string staticFiles string presentationDefinitionsFile string // TODO assuming same base path for all hydra endpoints for now hydraURL string mode string didCommParameters *didCommParameters // didcomm trustblocDomain string } type server interface { ListenAndServe(host string, router http.Handler) error } // HTTPServer represents an actual HTTP server implementation. type HTTPServer struct{} // ListenAndServe starts the server using the standard Go HTTP server implementation. func (s *HTTPServer) ListenAndServe(host string, router http.Handler) error { return http.ListenAndServe(host, router) } // GetStartCmd returns the Cobra start command. func GetStartCmd(srv server) *cobra.Command { startCmd := createStartCmd(srv) createFlags(startCmd) return startCmd } func createStartCmd(srv server) *cobra.Command { return &cobra.Command{ Use: "start", Short: "Start adapter-rest", Long: "Start adapter-rest inside the edge-adapter", RunE: func(cmd *cobra.Command, args []string) error { parameters, err := getAdapterRestParameters(cmd) if err != nil { return err } return startAdapterService(parameters, srv) }, } } //nolint:funlen,gocyclo func getAdapterRestParameters(cmd *cobra.Command) (*adapterRestParameters, error) { hostURL, err := cmdutils.GetUserSetVarFromString(cmd, hostURLFlagName, hostURLEnvKey, false) if err != nil { return nil, err } tlsSystemCertPool, tlsCACerts, err := getTLS(cmd) if err != nil { return nil, err } dsn, err := cmdutils.GetUserSetVarFromString(cmd, datasourceNameFlagName, datasourceNameEnvKey, true) if err != nil { return nil, err } oidcURL, err := cmdutils.GetUserSetVarFromString(cmd, oidcProviderURLFlagName, oidcProviderEnvKey, true) if err != nil { return nil, err } staticFiles, err := cmdutils.GetUserSetVarFromString(cmd, staticFilesPathFlagName, staticFilesPathEnvKey, true) if err != nil { return nil, err } mode, err := cmdutils.GetUserSetVarFromString(cmd, modeFlagName, modeEnvKey, true) if err != nil { return nil, err } presentationDefinitionsFile, err := cmdutils.GetUserSetVarFromString(cmd, presentationDefinitionsFlagName, presentationDefinitionsEnvKey, mode != rpMode) if err != nil { return nil, err } hydraURL, err := cmdutils.GetUserSetVarFromString(cmd, hydraURLFlagName, hydraURLEnvKey, true) if err != nil { return nil, err } // didcomm didCommParameters, err := getDIDCommParams(cmd) if err != nil { return nil, err } trustblocDomain, err := cmdutils.GetUserSetVarFromString(cmd, trustblocDomainFlagName, trustblocDomainEnvKey, true) if err != nil { return nil, err } return &adapterRestParameters{ hostURL: hostURL, tlsSystemCertPool: tlsSystemCertPool, tlsCACerts: tlsCACerts, dsn: dsn, oidcProviderURL: oidcURL, staticFiles: staticFiles, presentationDefinitionsFile: presentationDefinitionsFile, hydraURL: hydraURL, mode: mode, didCommParameters: didCommParameters, trustblocDomain: trustblocDomain, }, nil } func getDIDCommParams(cmd *cobra.Command) (*didCommParameters, error) { inboundHostInternal, err := cmdutils.GetUserSetVarFromString(cmd, didCommInboundHostFlagName, didCommInboundHostEnvKey, true) if err != nil { return nil, err } inboundHostExternal, err := cmdutils.GetUserSetVarFromString(cmd, didCommInboundHostExternalFlagName, didCommInboundHostExternalEnvKey, true) if err != nil { return nil, err } dbPath, err := cmdutils.GetUserSetVarFromString(cmd, didCommDBPathFlagName, didCommDBPathEnvKey, true) if err != nil { return nil, err } return &didCommParameters{ inboundHostInternal: inboundHostInternal, inboundHostExternal: inboundHostExternal, dbPath: dbPath, }, nil } func getTLS(cmd *cobra.Command) (bool, []string, error) { tlsSystemCertPoolString, err := cmdutils.GetUserSetVarFromString(cmd, tlsSystemCertPoolFlagName, tlsSystemCertPoolEnvKey, true) if err != nil { return false, nil, err } tlsSystemCertPool := false if tlsSystemCertPoolString != "" { tlsSystemCertPool, err = strconv.ParseBool(tlsSystemCertPoolString) if err != nil { return false, nil, err } } tlsCACerts, err := cmdutils.GetUserSetVarFromArrayString(cmd, tlsCACertsFlagName, tlsCACertsEnvKey, true) if err != nil { return false, nil, err } return tlsSystemCertPool, tlsCACerts, nil } func createFlags(startCmd *cobra.Command) { startCmd.Flags().StringP(hostURLFlagName, hostURLFlagShorthand, "", hostURLFlagUsage) startCmd.Flags().StringP(tlsSystemCertPoolFlagName, "", "", tlsSystemCertPoolFlagUsage) startCmd.Flags().StringArrayP(tlsCACertsFlagName, "", []string{}, tlsCACertsFlagUsage) startCmd.Flags().StringP(oidcProviderURLFlagName, "", "", oidcProviderURLFlagUsage) startCmd.Flags().StringP(datasourceNameFlagName, "", "", datasourceNameFlagUsage) startCmd.Flags().StringP(staticFilesPathFlagName, "", "", staticFilesPathFlagUsage) startCmd.Flags().StringP(presentationDefinitionsFlagName, "", "", presentationDefinitionsFlagUsage) startCmd.Flags().StringP(hydraURLFlagName, "", "", hydraURLFlagUsage) startCmd.Flags().StringP(modeFlagName, "", "", modeFlagUsage) // didcomm startCmd.Flags().StringP(didCommInboundHostFlagName, "", "", didCommInboundHostFlagUsage) startCmd.Flags().StringP(didCommInboundHostExternalFlagName, "", "", didCommInboundHostExternalFlagUsage) startCmd.Flags().StringP(didCommDBPathFlagName, "", "", didCommDBPathFlagUsage) startCmd.Flags().StringP(trustblocDomainFlagName, "", "", trustblocDomainFlagUsage) } func startAdapterService(parameters *adapterRestParameters, srv server) error { rootCAs, err := tlsutils.GetCertPool(parameters.tlsSystemCertPool, parameters.tlsCACerts) if err != nil { return err } logger.Debugf("root ca's %v", rootCAs) router := mux.NewRouter() // add health check endpoint healthCheckService := healthcheck.New() healthCheckHandlers := healthCheckService.GetOperations() for _, handler := range healthCheckHandlers { router.HandleFunc(handler.Path(), handler.Handle()).Methods(handler.Method()) } ariesCtx, err := createAriesAgent(parameters, &tls.Config{RootCAs: rootCAs}) if err != nil { return err } // add endpoints switch parameters.mode { case rpMode: err = addRPHandlers(parameters, ariesCtx, router, rootCAs) if err != nil { return nil } case issuerMode: err = addIssuerHandlers(parameters, ariesCtx, router) if err != nil { return nil } default: return fmt.Errorf("invalid mode : %s", parameters.mode) } logger.Infof("starting %s adapter rest server on host %s", parameters.mode, parameters.hostURL) return srv.ListenAndServe(parameters.hostURL, constructCORSHandler(router)) } func addRPHandlers( parameters *adapterRestParameters, ctx ariespai.CtxProvider, router *mux.Router, rootCAs *x509.CertPool) error { presentationExProvider, err := presentationex.New(parameters.presentationDefinitionsFile) if err != nil { return err } hydraURL, err := url.Parse(parameters.hydraURL) if err != nil { return err } didClient, err := didexchange.New(ctx) if err != nil { return fmt.Errorf("failed to initialized didexchange client : %w", err) } presentProofClient, err := presentproof.New(ctx) if err != nil { return err } // TODO init OIDC stuff in iteration 2 - https://github.com/trustbloc/edge-adapter/issues/24 // add rp endpoints rpService, err := rp.New(&rpops.Config{ PresentationExProvider: presentationExProvider, Hydra: hydra.NewClient(hydraURL, rootCAs), UIEndpoint: uiEndpoint, DIDExchClient: didClient, Store: memstore.NewProvider(), PublicDIDCreator: did.NewTrustblocDIDCreator( parameters.trustblocDomain, parameters.didCommParameters.inboundHostExternal, ctx.KMS(), rootCAs), AriesStorageProvider: ctx, PresentProofClient: presentProofClient, }) if err != nil { return err } rpHandlers := rpService.GetOperations() for _, handler := range rpHandlers { router.HandleFunc(handler.Path(), handler.Handle()).Methods(handler.Method()) } // static frontend router.PathPrefix(uiEndpoint). Subrouter(). Methods(http.MethodGet). HandlerFunc(uiHandler(parameters.staticFiles, http.ServeFile)) return nil } func addIssuerHandlers(parameters *adapterRestParameters, ariesCtx ariespai.CtxProvider, router *mux.Router) error { // add issuer endpoints issuerService, err := issuer.New(&issuerops.Config{ AriesCtx: ariesCtx, UIEndpoint: uiEndpoint, // TODO https://github.com/trustbloc/edge-adapter/issues/42 use sql store StoreProvider: memstore.NewProvider(), }) if err != nil { return err } rpHandlers := issuerService.GetOperations() for _, handler := range rpHandlers { router.HandleFunc(handler.Path(), handler.Handle()).Methods(handler.Method()) } // static frontend router.PathPrefix(uiEndpoint). Subrouter(). Methods(http.MethodGet). HandlerFunc(uiHandler(parameters.staticFiles, http.ServeFile)) return nil } func uiHandler( basePath string, fileServer func(http.ResponseWriter, *http.Request, string)) func(http.ResponseWriter, *http.Request) { return func(w http.ResponseWriter, r *http.Request) { if r.URL.Path == uiEndpoint { fileServer(w, r, strings.ReplaceAll(basePath+"/index.html", "//", "/")) return } fileServer(w, r, strings.ReplaceAll(basePath+"/"+r.URL.Path[len(uiEndpoint):], "//", "/")) } } func constructCORSHandler(handler http.Handler) http.Handler { return cors.New( cors.Options{ AllowedMethods: []string{http.MethodGet, http.MethodPost}, AllowedHeaders: []string{"Origin", "Accept", "Content-Type", "X-Requested-With", "Authorization"}, }, ).Handler(handler) } //nolint:deadcode,unused func initDB(dsn string) (*sql.DB, error) { const ( sleep = 1 * time.Second numRetries = 30 ) var dbms *sql.DB err := backoff.RetryNotify( func() error { var openErr error dbms, openErr = dburl.Open(dsn) return openErr }, backoff.WithMaxRetries(backoff.NewConstantBackOff(sleep), numRetries), func(retryErr error, t time.Duration) { logger.Warnf( "failed to connect to database, will sleep for %d before trying again : %s\n", t, retryErr) }, ) if err != nil { return nil, fmt.Errorf("failed to connect to database at %s : %w", dsn, err) } return dbms, nil } func createAriesAgent(parameters *adapterRestParameters, tlsConfig *tls.Config) (*ariesctx.Provider, error) { var opts []aries.Option if parameters.didCommParameters.inboundHostInternal == "" { return nil, errors.New("didcomm inbound host is mandatory") } if parameters.didCommParameters.dbPath != "" { opts = append(opts, defaults.WithStorePath(parameters.didCommParameters.dbPath)) } inboundTransportOpt := defaults.WithInboundHTTPAddr(parameters.didCommParameters.inboundHostInternal, parameters.didCommParameters.inboundHostExternal) opts = append(opts, inboundTransportOpt) outbound, err := arieshttp.NewOutbound(arieshttp.WithOutboundTLSConfig(tlsConfig)) if err != nil { return nil, fmt.Errorf("aries-framework - failed to create outbound tranpsort opts : %w", err) } opts = append(opts, aries.WithOutboundTransports(outbound)) framework, err := aries.New(opts...) if err != nil { return nil, fmt.Errorf("aries-framework - failed to initialize framework : %w", err) } ctx, err := framework.Context() if err != nil
return ctx, nil }
{ return nil, fmt.Errorf("aries-framework - failed to get aries context : %w", err) }
conditional_block
start.go
/* Copyright SecureKey Technologies Inc. All Rights Reserved. SPDX-License-Identifier: Apache-2.0 */ package startcmd import ( "crypto/tls" "crypto/x509" "database/sql" "errors" "fmt" "net/http" "net/url" "strconv" "strings" "time" "github.com/cenkalti/backoff" "github.com/gorilla/mux" "github.com/hyperledger/aries-framework-go/pkg/client/didexchange" "github.com/hyperledger/aries-framework-go/pkg/client/presentproof" arieshttp "github.com/hyperledger/aries-framework-go/pkg/didcomm/transport/http" "github.com/hyperledger/aries-framework-go/pkg/framework/aries" "github.com/hyperledger/aries-framework-go/pkg/framework/aries/defaults" ariesctx "github.com/hyperledger/aries-framework-go/pkg/framework/context" "github.com/rs/cors" "github.com/spf13/cobra" "github.com/trustbloc/edge-core/pkg/log" "github.com/trustbloc/edge-core/pkg/storage/memstore" cmdutils "github.com/trustbloc/edge-core/pkg/utils/cmd" tlsutils "github.com/trustbloc/edge-core/pkg/utils/tls" "github.com/xo/dburl" ariespai "github.com/trustbloc/edge-adapter/pkg/aries" "github.com/trustbloc/edge-adapter/pkg/did" "github.com/trustbloc/edge-adapter/pkg/hydra" "github.com/trustbloc/edge-adapter/pkg/presentationex" "github.com/trustbloc/edge-adapter/pkg/restapi/healthcheck" "github.com/trustbloc/edge-adapter/pkg/restapi/issuer" issuerops "github.com/trustbloc/edge-adapter/pkg/restapi/issuer/operation" "github.com/trustbloc/edge-adapter/pkg/restapi/rp" rpops "github.com/trustbloc/edge-adapter/pkg/restapi/rp/operation" ) var logger = log.New("edge-adapter") const ( hostURLFlagName = "host-url" hostURLFlagShorthand = "u" hostURLFlagUsage = "URL to run the adapter-rest instance on. Format: HostName:Port." hostURLEnvKey = "ADAPTER_REST_HOST_URL" datasourceNameFlagName = "dsn" datasourceNameFlagUsage = "Datasource Name with credentials if required," + " eg. mysql://root:secret@localhost:3306/adapter" + "Alternatively, this can be set with the following environment variable: " + datasourceNameEnvKey datasourceNameEnvKey = "ADAPTER_REST_DSN" oidcProviderURLFlagName = "op-url" oidcProviderURLFlagUsage = "URL for the OIDC provider." + "Alternatively, this can be set with the following environment variable: " + oidcProviderEnvKey oidcProviderEnvKey = "ADAPTER_REST_OP_URL" staticFilesPathFlagName = "static-path" staticFilesPathFlagUsage = "Path to the folder where the static files are to be hosted under " + uiEndpoint + "." + "Alternatively, this can be set with the following environment variable: " + staticFilesPathEnvKey staticFilesPathEnvKey = "ADAPTER_REST_STATIC_FILES" tlsSystemCertPoolFlagName = "tls-systemcertpool" tlsSystemCertPoolFlagUsage = "Use system certificate pool." + " Possible values [true] [false]. Defaults to false if not set." + " Alternatively, this can be set with the following environment variable: " + tlsSystemCertPoolEnvKey tlsSystemCertPoolEnvKey = "ADAPTER_REST_TLS_SYSTEMCERTPOOL" tlsCACertsFlagName = "tls-cacerts" tlsCACertsFlagUsage = "Comma-Separated list of ca certs path." + " Alternatively, this can be set with the following environment variable: " + tlsCACertsEnvKey tlsCACertsEnvKey = "ADAPTER_REST_TLS_CACERTS" presentationDefinitionsFlagName = "presentation-definitions-file" presentationDefinitionsFlagUsage = "Path to presentation definitions file with input_descriptors." presentationDefinitionsEnvKey = "ADAPTER_REST_PRESENTATION_DEFINITIONS_FILE" hydraURLFlagName = "hydra-url" hydraURLFlagUsage = "Base URL to the hydra service." + "Alternatively, this can be set with the following environment variable: " + hydraURLEnvKey hydraURLEnvKey = "ADAPTER_REST_HYDRA_URL" modeFlagName = "mode" modeFlagUsage = "Mode in which the edge-adapter service will run. Possible values: " + "['issuer', 'rp']." modeEnvKey = "ADAPTER_REST_MODE" // inbound host url flag didCommInboundHostFlagName = "didcomm-inbound-host" didCommInboundHostEnvKey = "ADAPTER_REST_DIDCOMM_INBOUND_HOST" didCommInboundHostFlagUsage = "Inbound Host Name:Port. This is used internally to start the didcomm server." + " Alternatively, this can be set with the following environment variable: " + didCommInboundHostEnvKey // inbound host external url flag didCommInboundHostExternalFlagName = "didcomm-inbound-host-external" didCommInboundHostExternalEnvKey = "ADAPTER_REST_DIDCOMM_INBOUND_HOST_EXTERNAL" didCommInboundHostExternalFlagUsage = "Inbound Host External Name:Port." + " This is the URL for the inbound server as seen externally." + " If not provided, then the internal inbound host will be used here." + " Alternatively, this can be set with the following environment variable: " + didCommInboundHostExternalEnvKey // db path didCommDBPathFlagName = "didcomm-db-path" didCommDBPathEnvKey = "ADAPTER_REST_DIDCOMM_DB_PATH" didCommDBPathFlagUsage = "Path to database." + " Alternatively, this can be set with the following environment variable: " + didCommDBPathEnvKey trustblocDomainFlagName = "dids-trustbloc-domain" trustblocDomainEnvKey = "ADAPTER_REST_TRUSTBLOC_DOMAIN" trustblocDomainFlagUsage = "URL to the did:trustbloc consortium's domain." + " Alternatively, this can be set with the following environment variable: " + trustblocDomainEnvKey ) // API endpoints. const ( uiEndpoint = "/ui" // modes issuerMode = "issuer" rpMode = "rp" ) type didCommParameters struct { inboundHostInternal string inboundHostExternal string dbPath string } type adapterRestParameters struct { hostURL string tlsSystemCertPool bool tlsCACerts []string dsn string oidcProviderURL string staticFiles string presentationDefinitionsFile string // TODO assuming same base path for all hydra endpoints for now hydraURL string mode string didCommParameters *didCommParameters // didcomm trustblocDomain string } type server interface { ListenAndServe(host string, router http.Handler) error } // HTTPServer represents an actual HTTP server implementation. type HTTPServer struct{} // ListenAndServe starts the server using the standard Go HTTP server implementation. func (s *HTTPServer) ListenAndServe(host string, router http.Handler) error { return http.ListenAndServe(host, router) } // GetStartCmd returns the Cobra start command. func GetStartCmd(srv server) *cobra.Command { startCmd := createStartCmd(srv) createFlags(startCmd) return startCmd } func createStartCmd(srv server) *cobra.Command { return &cobra.Command{ Use: "start", Short: "Start adapter-rest", Long: "Start adapter-rest inside the edge-adapter", RunE: func(cmd *cobra.Command, args []string) error { parameters, err := getAdapterRestParameters(cmd) if err != nil { return err } return startAdapterService(parameters, srv) }, } } //nolint:funlen,gocyclo func getAdapterRestParameters(cmd *cobra.Command) (*adapterRestParameters, error) { hostURL, err := cmdutils.GetUserSetVarFromString(cmd, hostURLFlagName, hostURLEnvKey, false) if err != nil { return nil, err } tlsSystemCertPool, tlsCACerts, err := getTLS(cmd) if err != nil { return nil, err } dsn, err := cmdutils.GetUserSetVarFromString(cmd, datasourceNameFlagName, datasourceNameEnvKey, true) if err != nil { return nil, err } oidcURL, err := cmdutils.GetUserSetVarFromString(cmd, oidcProviderURLFlagName, oidcProviderEnvKey, true) if err != nil { return nil, err } staticFiles, err := cmdutils.GetUserSetVarFromString(cmd, staticFilesPathFlagName, staticFilesPathEnvKey, true) if err != nil { return nil, err } mode, err := cmdutils.GetUserSetVarFromString(cmd, modeFlagName, modeEnvKey, true) if err != nil { return nil, err } presentationDefinitionsFile, err := cmdutils.GetUserSetVarFromString(cmd, presentationDefinitionsFlagName, presentationDefinitionsEnvKey, mode != rpMode) if err != nil { return nil, err } hydraURL, err := cmdutils.GetUserSetVarFromString(cmd, hydraURLFlagName, hydraURLEnvKey, true) if err != nil { return nil, err } // didcomm didCommParameters, err := getDIDCommParams(cmd) if err != nil { return nil, err } trustblocDomain, err := cmdutils.GetUserSetVarFromString(cmd, trustblocDomainFlagName, trustblocDomainEnvKey, true) if err != nil { return nil, err } return &adapterRestParameters{ hostURL: hostURL, tlsSystemCertPool: tlsSystemCertPool, tlsCACerts: tlsCACerts, dsn: dsn, oidcProviderURL: oidcURL, staticFiles: staticFiles, presentationDefinitionsFile: presentationDefinitionsFile, hydraURL: hydraURL, mode: mode, didCommParameters: didCommParameters, trustblocDomain: trustblocDomain, }, nil } func getDIDCommParams(cmd *cobra.Command) (*didCommParameters, error) { inboundHostInternal, err := cmdutils.GetUserSetVarFromString(cmd, didCommInboundHostFlagName, didCommInboundHostEnvKey, true) if err != nil { return nil, err } inboundHostExternal, err := cmdutils.GetUserSetVarFromString(cmd, didCommInboundHostExternalFlagName, didCommInboundHostExternalEnvKey, true) if err != nil { return nil, err } dbPath, err := cmdutils.GetUserSetVarFromString(cmd, didCommDBPathFlagName, didCommDBPathEnvKey, true) if err != nil { return nil, err } return &didCommParameters{ inboundHostInternal: inboundHostInternal, inboundHostExternal: inboundHostExternal, dbPath: dbPath, }, nil } func getTLS(cmd *cobra.Command) (bool, []string, error) { tlsSystemCertPoolString, err := cmdutils.GetUserSetVarFromString(cmd, tlsSystemCertPoolFlagName, tlsSystemCertPoolEnvKey, true) if err != nil { return false, nil, err } tlsSystemCertPool := false if tlsSystemCertPoolString != "" { tlsSystemCertPool, err = strconv.ParseBool(tlsSystemCertPoolString) if err != nil { return false, nil, err } } tlsCACerts, err := cmdutils.GetUserSetVarFromArrayString(cmd, tlsCACertsFlagName, tlsCACertsEnvKey, true) if err != nil { return false, nil, err } return tlsSystemCertPool, tlsCACerts, nil } func createFlags(startCmd *cobra.Command) { startCmd.Flags().StringP(hostURLFlagName, hostURLFlagShorthand, "", hostURLFlagUsage) startCmd.Flags().StringP(tlsSystemCertPoolFlagName, "", "", tlsSystemCertPoolFlagUsage) startCmd.Flags().StringArrayP(tlsCACertsFlagName, "", []string{}, tlsCACertsFlagUsage) startCmd.Flags().StringP(oidcProviderURLFlagName, "", "", oidcProviderURLFlagUsage) startCmd.Flags().StringP(datasourceNameFlagName, "", "", datasourceNameFlagUsage) startCmd.Flags().StringP(staticFilesPathFlagName, "", "", staticFilesPathFlagUsage) startCmd.Flags().StringP(presentationDefinitionsFlagName, "", "", presentationDefinitionsFlagUsage) startCmd.Flags().StringP(hydraURLFlagName, "", "", hydraURLFlagUsage) startCmd.Flags().StringP(modeFlagName, "", "", modeFlagUsage) // didcomm startCmd.Flags().StringP(didCommInboundHostFlagName, "", "", didCommInboundHostFlagUsage) startCmd.Flags().StringP(didCommInboundHostExternalFlagName, "", "", didCommInboundHostExternalFlagUsage) startCmd.Flags().StringP(didCommDBPathFlagName, "", "", didCommDBPathFlagUsage) startCmd.Flags().StringP(trustblocDomainFlagName, "", "", trustblocDomainFlagUsage) } func startAdapterService(parameters *adapterRestParameters, srv server) error { rootCAs, err := tlsutils.GetCertPool(parameters.tlsSystemCertPool, parameters.tlsCACerts) if err != nil { return err } logger.Debugf("root ca's %v", rootCAs) router := mux.NewRouter() // add health check endpoint healthCheckService := healthcheck.New() healthCheckHandlers := healthCheckService.GetOperations() for _, handler := range healthCheckHandlers { router.HandleFunc(handler.Path(), handler.Handle()).Methods(handler.Method()) } ariesCtx, err := createAriesAgent(parameters, &tls.Config{RootCAs: rootCAs}) if err != nil { return err } // add endpoints switch parameters.mode { case rpMode: err = addRPHandlers(parameters, ariesCtx, router, rootCAs) if err != nil { return nil } case issuerMode: err = addIssuerHandlers(parameters, ariesCtx, router) if err != nil { return nil } default: return fmt.Errorf("invalid mode : %s", parameters.mode) } logger.Infof("starting %s adapter rest server on host %s", parameters.mode, parameters.hostURL) return srv.ListenAndServe(parameters.hostURL, constructCORSHandler(router)) } func addRPHandlers( parameters *adapterRestParameters, ctx ariespai.CtxProvider, router *mux.Router, rootCAs *x509.CertPool) error { presentationExProvider, err := presentationex.New(parameters.presentationDefinitionsFile) if err != nil { return err } hydraURL, err := url.Parse(parameters.hydraURL) if err != nil { return err } didClient, err := didexchange.New(ctx) if err != nil { return fmt.Errorf("failed to initialized didexchange client : %w", err) } presentProofClient, err := presentproof.New(ctx) if err != nil { return err } // TODO init OIDC stuff in iteration 2 - https://github.com/trustbloc/edge-adapter/issues/24 // add rp endpoints rpService, err := rp.New(&rpops.Config{ PresentationExProvider: presentationExProvider, Hydra: hydra.NewClient(hydraURL, rootCAs), UIEndpoint: uiEndpoint, DIDExchClient: didClient, Store: memstore.NewProvider(), PublicDIDCreator: did.NewTrustblocDIDCreator( parameters.trustblocDomain, parameters.didCommParameters.inboundHostExternal, ctx.KMS(), rootCAs), AriesStorageProvider: ctx, PresentProofClient: presentProofClient, }) if err != nil { return err } rpHandlers := rpService.GetOperations() for _, handler := range rpHandlers { router.HandleFunc(handler.Path(), handler.Handle()).Methods(handler.Method()) } // static frontend router.PathPrefix(uiEndpoint). Subrouter(). Methods(http.MethodGet). HandlerFunc(uiHandler(parameters.staticFiles, http.ServeFile)) return nil } func addIssuerHandlers(parameters *adapterRestParameters, ariesCtx ariespai.CtxProvider, router *mux.Router) error { // add issuer endpoints issuerService, err := issuer.New(&issuerops.Config{ AriesCtx: ariesCtx, UIEndpoint: uiEndpoint, // TODO https://github.com/trustbloc/edge-adapter/issues/42 use sql store StoreProvider: memstore.NewProvider(), }) if err != nil { return err } rpHandlers := issuerService.GetOperations() for _, handler := range rpHandlers { router.HandleFunc(handler.Path(), handler.Handle()).Methods(handler.Method()) } // static frontend router.PathPrefix(uiEndpoint). Subrouter(). Methods(http.MethodGet). HandlerFunc(uiHandler(parameters.staticFiles, http.ServeFile)) return nil } func uiHandler( basePath string, fileServer func(http.ResponseWriter, *http.Request, string)) func(http.ResponseWriter, *http.Request) { return func(w http.ResponseWriter, r *http.Request) { if r.URL.Path == uiEndpoint { fileServer(w, r, strings.ReplaceAll(basePath+"/index.html", "//", "/")) return } fileServer(w, r, strings.ReplaceAll(basePath+"/"+r.URL.Path[len(uiEndpoint):], "//", "/")) } } func
(handler http.Handler) http.Handler { return cors.New( cors.Options{ AllowedMethods: []string{http.MethodGet, http.MethodPost}, AllowedHeaders: []string{"Origin", "Accept", "Content-Type", "X-Requested-With", "Authorization"}, }, ).Handler(handler) } //nolint:deadcode,unused func initDB(dsn string) (*sql.DB, error) { const ( sleep = 1 * time.Second numRetries = 30 ) var dbms *sql.DB err := backoff.RetryNotify( func() error { var openErr error dbms, openErr = dburl.Open(dsn) return openErr }, backoff.WithMaxRetries(backoff.NewConstantBackOff(sleep), numRetries), func(retryErr error, t time.Duration) { logger.Warnf( "failed to connect to database, will sleep for %d before trying again : %s\n", t, retryErr) }, ) if err != nil { return nil, fmt.Errorf("failed to connect to database at %s : %w", dsn, err) } return dbms, nil } func createAriesAgent(parameters *adapterRestParameters, tlsConfig *tls.Config) (*ariesctx.Provider, error) { var opts []aries.Option if parameters.didCommParameters.inboundHostInternal == "" { return nil, errors.New("didcomm inbound host is mandatory") } if parameters.didCommParameters.dbPath != "" { opts = append(opts, defaults.WithStorePath(parameters.didCommParameters.dbPath)) } inboundTransportOpt := defaults.WithInboundHTTPAddr(parameters.didCommParameters.inboundHostInternal, parameters.didCommParameters.inboundHostExternal) opts = append(opts, inboundTransportOpt) outbound, err := arieshttp.NewOutbound(arieshttp.WithOutboundTLSConfig(tlsConfig)) if err != nil { return nil, fmt.Errorf("aries-framework - failed to create outbound tranpsort opts : %w", err) } opts = append(opts, aries.WithOutboundTransports(outbound)) framework, err := aries.New(opts...) if err != nil { return nil, fmt.Errorf("aries-framework - failed to initialize framework : %w", err) } ctx, err := framework.Context() if err != nil { return nil, fmt.Errorf("aries-framework - failed to get aries context : %w", err) } return ctx, nil }
constructCORSHandler
identifier_name
start.go
/* Copyright SecureKey Technologies Inc. All Rights Reserved. SPDX-License-Identifier: Apache-2.0 */ package startcmd import ( "crypto/tls" "crypto/x509" "database/sql" "errors" "fmt" "net/http" "net/url" "strconv" "strings" "time" "github.com/cenkalti/backoff" "github.com/gorilla/mux" "github.com/hyperledger/aries-framework-go/pkg/client/didexchange" "github.com/hyperledger/aries-framework-go/pkg/client/presentproof" arieshttp "github.com/hyperledger/aries-framework-go/pkg/didcomm/transport/http" "github.com/hyperledger/aries-framework-go/pkg/framework/aries" "github.com/hyperledger/aries-framework-go/pkg/framework/aries/defaults" ariesctx "github.com/hyperledger/aries-framework-go/pkg/framework/context" "github.com/rs/cors" "github.com/spf13/cobra" "github.com/trustbloc/edge-core/pkg/log" "github.com/trustbloc/edge-core/pkg/storage/memstore" cmdutils "github.com/trustbloc/edge-core/pkg/utils/cmd" tlsutils "github.com/trustbloc/edge-core/pkg/utils/tls" "github.com/xo/dburl" ariespai "github.com/trustbloc/edge-adapter/pkg/aries" "github.com/trustbloc/edge-adapter/pkg/did" "github.com/trustbloc/edge-adapter/pkg/hydra" "github.com/trustbloc/edge-adapter/pkg/presentationex" "github.com/trustbloc/edge-adapter/pkg/restapi/healthcheck" "github.com/trustbloc/edge-adapter/pkg/restapi/issuer" issuerops "github.com/trustbloc/edge-adapter/pkg/restapi/issuer/operation" "github.com/trustbloc/edge-adapter/pkg/restapi/rp" rpops "github.com/trustbloc/edge-adapter/pkg/restapi/rp/operation" ) var logger = log.New("edge-adapter") const ( hostURLFlagName = "host-url" hostURLFlagShorthand = "u" hostURLFlagUsage = "URL to run the adapter-rest instance on. Format: HostName:Port." hostURLEnvKey = "ADAPTER_REST_HOST_URL" datasourceNameFlagName = "dsn" datasourceNameFlagUsage = "Datasource Name with credentials if required," + " eg. mysql://root:secret@localhost:3306/adapter" + "Alternatively, this can be set with the following environment variable: " + datasourceNameEnvKey datasourceNameEnvKey = "ADAPTER_REST_DSN" oidcProviderURLFlagName = "op-url" oidcProviderURLFlagUsage = "URL for the OIDC provider." + "Alternatively, this can be set with the following environment variable: " + oidcProviderEnvKey oidcProviderEnvKey = "ADAPTER_REST_OP_URL" staticFilesPathFlagName = "static-path" staticFilesPathFlagUsage = "Path to the folder where the static files are to be hosted under " + uiEndpoint + "." + "Alternatively, this can be set with the following environment variable: " + staticFilesPathEnvKey staticFilesPathEnvKey = "ADAPTER_REST_STATIC_FILES" tlsSystemCertPoolFlagName = "tls-systemcertpool" tlsSystemCertPoolFlagUsage = "Use system certificate pool." + " Possible values [true] [false]. Defaults to false if not set." + " Alternatively, this can be set with the following environment variable: " + tlsSystemCertPoolEnvKey tlsSystemCertPoolEnvKey = "ADAPTER_REST_TLS_SYSTEMCERTPOOL" tlsCACertsFlagName = "tls-cacerts" tlsCACertsFlagUsage = "Comma-Separated list of ca certs path." + " Alternatively, this can be set with the following environment variable: " + tlsCACertsEnvKey tlsCACertsEnvKey = "ADAPTER_REST_TLS_CACERTS" presentationDefinitionsFlagName = "presentation-definitions-file" presentationDefinitionsFlagUsage = "Path to presentation definitions file with input_descriptors." presentationDefinitionsEnvKey = "ADAPTER_REST_PRESENTATION_DEFINITIONS_FILE" hydraURLFlagName = "hydra-url" hydraURLFlagUsage = "Base URL to the hydra service." + "Alternatively, this can be set with the following environment variable: " + hydraURLEnvKey hydraURLEnvKey = "ADAPTER_REST_HYDRA_URL" modeFlagName = "mode" modeFlagUsage = "Mode in which the edge-adapter service will run. Possible values: " + "['issuer', 'rp']." modeEnvKey = "ADAPTER_REST_MODE" // inbound host url flag didCommInboundHostFlagName = "didcomm-inbound-host" didCommInboundHostEnvKey = "ADAPTER_REST_DIDCOMM_INBOUND_HOST" didCommInboundHostFlagUsage = "Inbound Host Name:Port. This is used internally to start the didcomm server." + " Alternatively, this can be set with the following environment variable: " + didCommInboundHostEnvKey // inbound host external url flag didCommInboundHostExternalFlagName = "didcomm-inbound-host-external" didCommInboundHostExternalEnvKey = "ADAPTER_REST_DIDCOMM_INBOUND_HOST_EXTERNAL" didCommInboundHostExternalFlagUsage = "Inbound Host External Name:Port." + " This is the URL for the inbound server as seen externally." + " If not provided, then the internal inbound host will be used here." + " Alternatively, this can be set with the following environment variable: " + didCommInboundHostExternalEnvKey // db path didCommDBPathFlagName = "didcomm-db-path" didCommDBPathEnvKey = "ADAPTER_REST_DIDCOMM_DB_PATH" didCommDBPathFlagUsage = "Path to database." + " Alternatively, this can be set with the following environment variable: " + didCommDBPathEnvKey trustblocDomainFlagName = "dids-trustbloc-domain" trustblocDomainEnvKey = "ADAPTER_REST_TRUSTBLOC_DOMAIN" trustblocDomainFlagUsage = "URL to the did:trustbloc consortium's domain." + " Alternatively, this can be set with the following environment variable: " + trustblocDomainEnvKey ) // API endpoints. const ( uiEndpoint = "/ui" // modes issuerMode = "issuer" rpMode = "rp" ) type didCommParameters struct { inboundHostInternal string inboundHostExternal string dbPath string } type adapterRestParameters struct { hostURL string tlsSystemCertPool bool tlsCACerts []string dsn string oidcProviderURL string staticFiles string presentationDefinitionsFile string // TODO assuming same base path for all hydra endpoints for now hydraURL string mode string didCommParameters *didCommParameters // didcomm trustblocDomain string } type server interface { ListenAndServe(host string, router http.Handler) error } // HTTPServer represents an actual HTTP server implementation. type HTTPServer struct{} // ListenAndServe starts the server using the standard Go HTTP server implementation. func (s *HTTPServer) ListenAndServe(host string, router http.Handler) error { return http.ListenAndServe(host, router) } // GetStartCmd returns the Cobra start command. func GetStartCmd(srv server) *cobra.Command { startCmd := createStartCmd(srv) createFlags(startCmd) return startCmd } func createStartCmd(srv server) *cobra.Command { return &cobra.Command{ Use: "start", Short: "Start adapter-rest", Long: "Start adapter-rest inside the edge-adapter", RunE: func(cmd *cobra.Command, args []string) error { parameters, err := getAdapterRestParameters(cmd) if err != nil { return err } return startAdapterService(parameters, srv) }, } } //nolint:funlen,gocyclo func getAdapterRestParameters(cmd *cobra.Command) (*adapterRestParameters, error) { hostURL, err := cmdutils.GetUserSetVarFromString(cmd, hostURLFlagName, hostURLEnvKey, false) if err != nil { return nil, err } tlsSystemCertPool, tlsCACerts, err := getTLS(cmd) if err != nil { return nil, err } dsn, err := cmdutils.GetUserSetVarFromString(cmd, datasourceNameFlagName, datasourceNameEnvKey, true) if err != nil { return nil, err } oidcURL, err := cmdutils.GetUserSetVarFromString(cmd, oidcProviderURLFlagName, oidcProviderEnvKey, true) if err != nil { return nil, err } staticFiles, err := cmdutils.GetUserSetVarFromString(cmd, staticFilesPathFlagName, staticFilesPathEnvKey, true) if err != nil { return nil, err } mode, err := cmdutils.GetUserSetVarFromString(cmd, modeFlagName, modeEnvKey, true) if err != nil { return nil, err } presentationDefinitionsFile, err := cmdutils.GetUserSetVarFromString(cmd, presentationDefinitionsFlagName, presentationDefinitionsEnvKey, mode != rpMode) if err != nil { return nil, err } hydraURL, err := cmdutils.GetUserSetVarFromString(cmd, hydraURLFlagName, hydraURLEnvKey, true) if err != nil { return nil, err } // didcomm didCommParameters, err := getDIDCommParams(cmd) if err != nil { return nil, err } trustblocDomain, err := cmdutils.GetUserSetVarFromString(cmd, trustblocDomainFlagName, trustblocDomainEnvKey, true) if err != nil { return nil, err } return &adapterRestParameters{ hostURL: hostURL, tlsSystemCertPool: tlsSystemCertPool, tlsCACerts: tlsCACerts, dsn: dsn, oidcProviderURL: oidcURL, staticFiles: staticFiles, presentationDefinitionsFile: presentationDefinitionsFile, hydraURL: hydraURL, mode: mode, didCommParameters: didCommParameters, trustblocDomain: trustblocDomain, }, nil } func getDIDCommParams(cmd *cobra.Command) (*didCommParameters, error) { inboundHostInternal, err := cmdutils.GetUserSetVarFromString(cmd, didCommInboundHostFlagName, didCommInboundHostEnvKey, true) if err != nil { return nil, err } inboundHostExternal, err := cmdutils.GetUserSetVarFromString(cmd, didCommInboundHostExternalFlagName, didCommInboundHostExternalEnvKey, true) if err != nil { return nil, err } dbPath, err := cmdutils.GetUserSetVarFromString(cmd, didCommDBPathFlagName, didCommDBPathEnvKey, true) if err != nil { return nil, err } return &didCommParameters{ inboundHostInternal: inboundHostInternal, inboundHostExternal: inboundHostExternal, dbPath: dbPath, }, nil } func getTLS(cmd *cobra.Command) (bool, []string, error) { tlsSystemCertPoolString, err := cmdutils.GetUserSetVarFromString(cmd, tlsSystemCertPoolFlagName, tlsSystemCertPoolEnvKey, true) if err != nil { return false, nil, err } tlsSystemCertPool := false if tlsSystemCertPoolString != "" { tlsSystemCertPool, err = strconv.ParseBool(tlsSystemCertPoolString) if err != nil { return false, nil, err } } tlsCACerts, err := cmdutils.GetUserSetVarFromArrayString(cmd, tlsCACertsFlagName, tlsCACertsEnvKey, true) if err != nil { return false, nil, err } return tlsSystemCertPool, tlsCACerts, nil } func createFlags(startCmd *cobra.Command) { startCmd.Flags().StringP(hostURLFlagName, hostURLFlagShorthand, "", hostURLFlagUsage) startCmd.Flags().StringP(tlsSystemCertPoolFlagName, "", "", tlsSystemCertPoolFlagUsage) startCmd.Flags().StringArrayP(tlsCACertsFlagName, "", []string{}, tlsCACertsFlagUsage) startCmd.Flags().StringP(oidcProviderURLFlagName, "", "", oidcProviderURLFlagUsage) startCmd.Flags().StringP(datasourceNameFlagName, "", "", datasourceNameFlagUsage) startCmd.Flags().StringP(staticFilesPathFlagName, "", "", staticFilesPathFlagUsage) startCmd.Flags().StringP(presentationDefinitionsFlagName, "", "", presentationDefinitionsFlagUsage) startCmd.Flags().StringP(hydraURLFlagName, "", "", hydraURLFlagUsage) startCmd.Flags().StringP(modeFlagName, "", "", modeFlagUsage) // didcomm startCmd.Flags().StringP(didCommInboundHostFlagName, "", "", didCommInboundHostFlagUsage) startCmd.Flags().StringP(didCommInboundHostExternalFlagName, "", "", didCommInboundHostExternalFlagUsage) startCmd.Flags().StringP(didCommDBPathFlagName, "", "", didCommDBPathFlagUsage) startCmd.Flags().StringP(trustblocDomainFlagName, "", "", trustblocDomainFlagUsage) } func startAdapterService(parameters *adapterRestParameters, srv server) error { rootCAs, err := tlsutils.GetCertPool(parameters.tlsSystemCertPool, parameters.tlsCACerts) if err != nil { return err } logger.Debugf("root ca's %v", rootCAs) router := mux.NewRouter() // add health check endpoint healthCheckService := healthcheck.New() healthCheckHandlers := healthCheckService.GetOperations() for _, handler := range healthCheckHandlers { router.HandleFunc(handler.Path(), handler.Handle()).Methods(handler.Method()) } ariesCtx, err := createAriesAgent(parameters, &tls.Config{RootCAs: rootCAs}) if err != nil { return err } // add endpoints switch parameters.mode { case rpMode: err = addRPHandlers(parameters, ariesCtx, router, rootCAs) if err != nil { return nil } case issuerMode: err = addIssuerHandlers(parameters, ariesCtx, router) if err != nil { return nil } default: return fmt.Errorf("invalid mode : %s", parameters.mode) } logger.Infof("starting %s adapter rest server on host %s", parameters.mode, parameters.hostURL) return srv.ListenAndServe(parameters.hostURL, constructCORSHandler(router)) } func addRPHandlers( parameters *adapterRestParameters, ctx ariespai.CtxProvider, router *mux.Router, rootCAs *x509.CertPool) error { presentationExProvider, err := presentationex.New(parameters.presentationDefinitionsFile) if err != nil { return err } hydraURL, err := url.Parse(parameters.hydraURL) if err != nil { return err } didClient, err := didexchange.New(ctx) if err != nil { return fmt.Errorf("failed to initialized didexchange client : %w", err) } presentProofClient, err := presentproof.New(ctx) if err != nil { return err } // TODO init OIDC stuff in iteration 2 - https://github.com/trustbloc/edge-adapter/issues/24 // add rp endpoints rpService, err := rp.New(&rpops.Config{ PresentationExProvider: presentationExProvider, Hydra: hydra.NewClient(hydraURL, rootCAs), UIEndpoint: uiEndpoint, DIDExchClient: didClient, Store: memstore.NewProvider(), PublicDIDCreator: did.NewTrustblocDIDCreator( parameters.trustblocDomain, parameters.didCommParameters.inboundHostExternal, ctx.KMS(), rootCAs), AriesStorageProvider: ctx, PresentProofClient: presentProofClient, }) if err != nil { return err } rpHandlers := rpService.GetOperations() for _, handler := range rpHandlers {
} // static frontend router.PathPrefix(uiEndpoint). Subrouter(). Methods(http.MethodGet). HandlerFunc(uiHandler(parameters.staticFiles, http.ServeFile)) return nil } func addIssuerHandlers(parameters *adapterRestParameters, ariesCtx ariespai.CtxProvider, router *mux.Router) error { // add issuer endpoints issuerService, err := issuer.New(&issuerops.Config{ AriesCtx: ariesCtx, UIEndpoint: uiEndpoint, // TODO https://github.com/trustbloc/edge-adapter/issues/42 use sql store StoreProvider: memstore.NewProvider(), }) if err != nil { return err } rpHandlers := issuerService.GetOperations() for _, handler := range rpHandlers { router.HandleFunc(handler.Path(), handler.Handle()).Methods(handler.Method()) } // static frontend router.PathPrefix(uiEndpoint). Subrouter(). Methods(http.MethodGet). HandlerFunc(uiHandler(parameters.staticFiles, http.ServeFile)) return nil } func uiHandler( basePath string, fileServer func(http.ResponseWriter, *http.Request, string)) func(http.ResponseWriter, *http.Request) { return func(w http.ResponseWriter, r *http.Request) { if r.URL.Path == uiEndpoint { fileServer(w, r, strings.ReplaceAll(basePath+"/index.html", "//", "/")) return } fileServer(w, r, strings.ReplaceAll(basePath+"/"+r.URL.Path[len(uiEndpoint):], "//", "/")) } } func constructCORSHandler(handler http.Handler) http.Handler { return cors.New( cors.Options{ AllowedMethods: []string{http.MethodGet, http.MethodPost}, AllowedHeaders: []string{"Origin", "Accept", "Content-Type", "X-Requested-With", "Authorization"}, }, ).Handler(handler) } //nolint:deadcode,unused func initDB(dsn string) (*sql.DB, error) { const ( sleep = 1 * time.Second numRetries = 30 ) var dbms *sql.DB err := backoff.RetryNotify( func() error { var openErr error dbms, openErr = dburl.Open(dsn) return openErr }, backoff.WithMaxRetries(backoff.NewConstantBackOff(sleep), numRetries), func(retryErr error, t time.Duration) { logger.Warnf( "failed to connect to database, will sleep for %d before trying again : %s\n", t, retryErr) }, ) if err != nil { return nil, fmt.Errorf("failed to connect to database at %s : %w", dsn, err) } return dbms, nil } func createAriesAgent(parameters *adapterRestParameters, tlsConfig *tls.Config) (*ariesctx.Provider, error) { var opts []aries.Option if parameters.didCommParameters.inboundHostInternal == "" { return nil, errors.New("didcomm inbound host is mandatory") } if parameters.didCommParameters.dbPath != "" { opts = append(opts, defaults.WithStorePath(parameters.didCommParameters.dbPath)) } inboundTransportOpt := defaults.WithInboundHTTPAddr(parameters.didCommParameters.inboundHostInternal, parameters.didCommParameters.inboundHostExternal) opts = append(opts, inboundTransportOpt) outbound, err := arieshttp.NewOutbound(arieshttp.WithOutboundTLSConfig(tlsConfig)) if err != nil { return nil, fmt.Errorf("aries-framework - failed to create outbound tranpsort opts : %w", err) } opts = append(opts, aries.WithOutboundTransports(outbound)) framework, err := aries.New(opts...) if err != nil { return nil, fmt.Errorf("aries-framework - failed to initialize framework : %w", err) } ctx, err := framework.Context() if err != nil { return nil, fmt.Errorf("aries-framework - failed to get aries context : %w", err) } return ctx, nil }
router.HandleFunc(handler.Path(), handler.Handle()).Methods(handler.Method())
random_line_split
parse.go
// Package types is a package that parses the GDNative headers for type definitions // to create wrapper structures for Go. package types import ( "fmt" "github.com/pinzolo/casee" "io/ioutil" "os" "regexp" "path/filepath" "strings" "github.com/godot-go/godot-go/cmd/gdnativeapijson" ) var cTypeRegex = regexp.MustCompile(`(const)?\s*([\w_][\w_\d]*)\s*(\**)`) // GlobalMethods contains the list of methods not associated with a GoTypeDef type GlobalMethods []gdnativeapijson.GoMethod // ConstructorIndex indexes by gdnativeapijson.GoTypeDef.CName type ConstructorIndex map[string][]gdnativeapijson.GoMethod // MethodIndex indexes by gdnativeapijson.GoTypeDef.CName type MethodIndex map[string][]gdnativeapijson.GoMethod // GoTypeDefIndex indexes by C header file name and then by C typedef name type GoTypeDefIndex map[string]map[string]gdnativeapijson.GoTypeDef // parseGodotHeaders will parse the GDNative headers. Takes a list of headers/structs to ignore. // Definitions in the given headers and definitions // with the given name will not be added to the returned list of type definitions. // We'll need to manually create these structures. func parseGodotHeaders( packagePath string, constructorIndex ConstructorIndex, methodIndex MethodIndex, excludeHeaders, excludeStructs []string) GoTypeDefIndex { var ( index = GoTypeDefIndex{} relPath string err error godotHeaderPath = filepath.Join(packagePath, "godot_headers") ) // Walk through all of the godot filename files err = filepath.Walk(godotHeaderPath, func(path string, f os.FileInfo, err error) error { if !f.IsDir() && filepath.Ext(path) == ".h" { relPath, err = filepath.Rel(godotHeaderPath, path) if err != nil { panic(err) } // Read the filename content, err := ioutil.ReadFile(path) if err != nil { panic(err) } // Find all of the type definitions in the filename file // fmt.Println("Parsing File ", path, "...") foundTypesLines := findTypeDefs(content) // After extracting the lines, we can now parse the type definition to // a structure that we can use to build a Go wrapper. for _, foundTypeLines := range foundTypesLines { typeDef := parseTypeDef(foundTypeLines, relPath) typeDef.Constructors = constructorIndex[typeDef.CName] typeDef.Methods = methodIndex[typeDef.CName] // Only add the type if it's not in our exclude list. if !strInSlice(typeDef.CName, excludeStructs) && !strInSlice(typeDef.CHeaderFilename, excludeHeaders) { if tdMap, ok := index[relPath]; ok { tdMap[typeDef.CName] = typeDef } else { index[relPath] = map[string]gdnativeapijson.GoTypeDef{ typeDef.CName: typeDef, } } } } } return nil }) if err != nil { panic(err) } return index } func
(typeLines []string, headerName string) gdnativeapijson.GoTypeDef { // Create a structure for our type definition. typeDef := gdnativeapijson.GoTypeDef{ CHeaderFilename: headerName, Properties: []gdnativeapijson.GoProperty{}, } // Small function for splitting a line to get the uncommented line and // get the comment itself. getComment := func(line string) (def, comment string) { halves := strings.Split(line, "//") def = halves[0] if len(halves) > 1 { comment = strings.TrimSpace(halves[1]) } if strings.HasPrefix(comment, "/") { comment = strings.Replace(comment, "/", "", 1) } return def, comment } // If the type definition is a single line, handle it a little differently if len(typeLines) == 1 { // Extract the comment if there is one. line, comment := getComment(typeLines[0]) // Check to see if the property is a pointer type if strings.Contains(line, "*") { line = strings.Replace(line, "*", "", 1) typeDef.IsPointer = true } var err error // Get the words of the line words := strings.Split(line, " ") typeDef.CName = strings.Replace(words[len(words)-1], ";", "", 1) goTypeName, usage := gdnativeapijson.ToGoTypeName(typeDef.CName) typeDef.Name = goTypeName typeDef.Base = words[len(words)-2] typeDef.Comment = comment typeDef.Usage = usage if err != nil { panic(fmt.Errorf("%s\n%w", line, err)) } return typeDef } // Extract the name of the type. lastLine := typeLines[len(typeLines)-1] words := strings.Split(lastLine, " ") typeDef.CName = strings.Replace(words[len(words)-1], ";", "", 1) var err error // Extract the base type firstLine := typeLines[0] words = strings.Split(firstLine, " ") typeDef.Base = words[1] if err != nil { panic(fmt.Errorf("%s\n%w", strings.Join(typeLines, "\n"), err)) } // Convert the name of the type to a Go name typeDef.Name, _ = gdnativeapijson.ToGoTypeName(typeDef.CName) if len(typeDef.Name) == 0 { typeDef.Name = words[2] } // Extract the properties from the type var properties []string if strings.HasSuffix(strings.TrimSpace(firstLine), "{") { properties = typeLines[1 : len(typeLines)-1] } else { properties = typeLines[2 : len(typeLines)-1] } var accumLines string // Loop through each property line for _, line := range properties { if strings.HasPrefix(strings.TrimSpace(line), "//") || len(strings.TrimSpace(line)) == 0 { continue } if !strings.Contains(line, ";") && typeDef.Base != "enum" { accumLines += line } else { line = accumLines + line accumLines = "" } // Skip function definitions if strings.Contains(line, "(*") { continue } // Create a type definition for the property property := gdnativeapijson.GoProperty{} // Extract the comment if there is one. line, comment := getComment(line) property.Comment = comment // Sanitize the line line = strings.TrimSpace(line) line = strings.Split(line, ";")[0] line = strings.Replace(line, "unsigned ", "u", 1) line = strings.Replace(line, "const ", "", 1) // Split the line by spaces words = strings.Split(line, " ") // Check to see if the line is just a comment if words[0] == "//" || (strings.Index(line, "/*") == 0 && strings.Index(line, "*/") == (len(line)-2)) { continue } // Set the property details if typeDef.Base == "enum" { // Strip any commas in the name words[0] = strings.Replace(words[0], ",", "", 1) property.CName = words[0] property.Name = casee.ToPascalCase(strings.Replace(words[0], "GODOT_", "", 1)) } else { if len(words) < 2 { fmt.Println("Skipping irregular line:", line) continue } property.Base = words[0] property.CName = words[1] property.Name = casee.ToPascalCase(strings.Replace(words[1], "godot_", "", 1)) } // Check to see if the property is a pointer type if strings.Contains(property.CName, "*") { property.CName = strings.Replace(property.CName, "*", "", 1) property.Name = strings.Replace(property.Name, "*", "", 1) property.IsPointer = true } // Skip empty property names if property.Name == "" { continue } if strings.Contains(property.Name, "}") { panic(fmt.Errorf("malformed Name: %+v", property)) } // Append the property to the type definition typeDef.Properties = append(typeDef.Properties, property) } return typeDef } type block int8 const ( externBlock block = iota typedefBlock localStructBlock enumBlock ) // findTypeDefs will return a list of type definition lines. func findTypeDefs(content []byte) [][]string { lines := strings.Split(string(content), "\n") // Create a structure that will hold the lines that define the type. var ( singleType []string foundTypes [][]string blocks []block ) for i, line := range lines { if strings.Index(line, "extern \"C\" {") == 0 { // fmt.Println("Line", i ,": START EXTERN BLOCK") blocks = append(blocks, externBlock) continue } else if strings.Index(line, "struct ") == 0 { // fmt.Println("Line", i ,": START LOCAL STRUCT BLOCK") blocks = append(blocks, localStructBlock) continue } else if strings.Index(line, "enum ") == 0 { // fmt.Println("Line", i ,": START ENUM BLOCK") blocks = append(blocks, enumBlock) continue } else if strings.Index(line, "}") == 0 { if len(blocks) == 0 { panic(fmt.Sprintln("\tLine", i, ": extra closing bracket encountered", line)) } n := len(blocks)-1 b := blocks[n] blocks = blocks[:n] switch b { case localStructBlock: // fmt.Println("Line", i ,": END LOCAL STRUCT BLOCK") continue case externBlock: // fmt.Println("Line", i ,": END EXTERN BLOCK") continue case enumBlock: // fmt.Println("Line", i ,": END ENUM BLOCK") continue case typedefBlock: singleType = append(singleType, line) foundTypes = append(foundTypes, singleType) // fmt.Println("\tLine", i, ": Type found:\n", strings.Join(singleType, "\n")) // reset singleType = []string{} default: panic(fmt.Sprintln("\tLine", i, ": extra closing curly braket found")) } } else if strings.Index(line, "typedef ") == 0 { // Check to see if this is a single line type and avoid using // the blocks stack if strings.Contains(line, ";") { // Skip if this is a function definition if strings.Contains(line, ")") { fmt.Println("\tLine", i, ": skip function: ", line) continue } singleType = append(singleType, line) foundTypes = append(foundTypes, singleType) // fmt.Println("\tLine", i, ": Single line type found:\n", strings.Join(singleType, "\n")) // reset singleType = []string{} } else { blocks = append(blocks, typedefBlock) singleType = append(singleType, line) } } else if len(blocks) > 0 { b := blocks[len(blocks) - 1] if b == typedefBlock { singleType = append(singleType, line) } } // // If a type was found, keep appending our struct lines until we // // reach the end of the definition. // if accumulatingTypedefLines { // //fmt.Println("Line", i, ": Appending line for type found:", line) // // Keep adding the lines to our list of lines until we // // reach an end bracket. // singleType = append(singleType, line) // if strings.Contains(line, "}") { // //fmt.Println("Line", i, ": Found end of type definition.") // accumulatingTypedefLines = false // foundTypes = append(foundTypes, singleType) // singleType = []string{} // } // } } return foundTypes } func strInSlice(a string, list []string) bool { for _, b := range list { if b == a { return true } } return false }
parseTypeDef
identifier_name
parse.go
// Package types is a package that parses the GDNative headers for type definitions // to create wrapper structures for Go. package types import ( "fmt" "github.com/pinzolo/casee" "io/ioutil" "os" "regexp" "path/filepath" "strings" "github.com/godot-go/godot-go/cmd/gdnativeapijson" ) var cTypeRegex = regexp.MustCompile(`(const)?\s*([\w_][\w_\d]*)\s*(\**)`) // GlobalMethods contains the list of methods not associated with a GoTypeDef type GlobalMethods []gdnativeapijson.GoMethod // ConstructorIndex indexes by gdnativeapijson.GoTypeDef.CName type ConstructorIndex map[string][]gdnativeapijson.GoMethod // MethodIndex indexes by gdnativeapijson.GoTypeDef.CName type MethodIndex map[string][]gdnativeapijson.GoMethod // GoTypeDefIndex indexes by C header file name and then by C typedef name type GoTypeDefIndex map[string]map[string]gdnativeapijson.GoTypeDef // parseGodotHeaders will parse the GDNative headers. Takes a list of headers/structs to ignore. // Definitions in the given headers and definitions // with the given name will not be added to the returned list of type definitions. // We'll need to manually create these structures. func parseGodotHeaders( packagePath string, constructorIndex ConstructorIndex, methodIndex MethodIndex, excludeHeaders, excludeStructs []string) GoTypeDefIndex { var ( index = GoTypeDefIndex{} relPath string err error godotHeaderPath = filepath.Join(packagePath, "godot_headers") ) // Walk through all of the godot filename files err = filepath.Walk(godotHeaderPath, func(path string, f os.FileInfo, err error) error { if !f.IsDir() && filepath.Ext(path) == ".h" { relPath, err = filepath.Rel(godotHeaderPath, path) if err != nil { panic(err) } // Read the filename content, err := ioutil.ReadFile(path) if err != nil
// Find all of the type definitions in the filename file // fmt.Println("Parsing File ", path, "...") foundTypesLines := findTypeDefs(content) // After extracting the lines, we can now parse the type definition to // a structure that we can use to build a Go wrapper. for _, foundTypeLines := range foundTypesLines { typeDef := parseTypeDef(foundTypeLines, relPath) typeDef.Constructors = constructorIndex[typeDef.CName] typeDef.Methods = methodIndex[typeDef.CName] // Only add the type if it's not in our exclude list. if !strInSlice(typeDef.CName, excludeStructs) && !strInSlice(typeDef.CHeaderFilename, excludeHeaders) { if tdMap, ok := index[relPath]; ok { tdMap[typeDef.CName] = typeDef } else { index[relPath] = map[string]gdnativeapijson.GoTypeDef{ typeDef.CName: typeDef, } } } } } return nil }) if err != nil { panic(err) } return index } func parseTypeDef(typeLines []string, headerName string) gdnativeapijson.GoTypeDef { // Create a structure for our type definition. typeDef := gdnativeapijson.GoTypeDef{ CHeaderFilename: headerName, Properties: []gdnativeapijson.GoProperty{}, } // Small function for splitting a line to get the uncommented line and // get the comment itself. getComment := func(line string) (def, comment string) { halves := strings.Split(line, "//") def = halves[0] if len(halves) > 1 { comment = strings.TrimSpace(halves[1]) } if strings.HasPrefix(comment, "/") { comment = strings.Replace(comment, "/", "", 1) } return def, comment } // If the type definition is a single line, handle it a little differently if len(typeLines) == 1 { // Extract the comment if there is one. line, comment := getComment(typeLines[0]) // Check to see if the property is a pointer type if strings.Contains(line, "*") { line = strings.Replace(line, "*", "", 1) typeDef.IsPointer = true } var err error // Get the words of the line words := strings.Split(line, " ") typeDef.CName = strings.Replace(words[len(words)-1], ";", "", 1) goTypeName, usage := gdnativeapijson.ToGoTypeName(typeDef.CName) typeDef.Name = goTypeName typeDef.Base = words[len(words)-2] typeDef.Comment = comment typeDef.Usage = usage if err != nil { panic(fmt.Errorf("%s\n%w", line, err)) } return typeDef } // Extract the name of the type. lastLine := typeLines[len(typeLines)-1] words := strings.Split(lastLine, " ") typeDef.CName = strings.Replace(words[len(words)-1], ";", "", 1) var err error // Extract the base type firstLine := typeLines[0] words = strings.Split(firstLine, " ") typeDef.Base = words[1] if err != nil { panic(fmt.Errorf("%s\n%w", strings.Join(typeLines, "\n"), err)) } // Convert the name of the type to a Go name typeDef.Name, _ = gdnativeapijson.ToGoTypeName(typeDef.CName) if len(typeDef.Name) == 0 { typeDef.Name = words[2] } // Extract the properties from the type var properties []string if strings.HasSuffix(strings.TrimSpace(firstLine), "{") { properties = typeLines[1 : len(typeLines)-1] } else { properties = typeLines[2 : len(typeLines)-1] } var accumLines string // Loop through each property line for _, line := range properties { if strings.HasPrefix(strings.TrimSpace(line), "//") || len(strings.TrimSpace(line)) == 0 { continue } if !strings.Contains(line, ";") && typeDef.Base != "enum" { accumLines += line } else { line = accumLines + line accumLines = "" } // Skip function definitions if strings.Contains(line, "(*") { continue } // Create a type definition for the property property := gdnativeapijson.GoProperty{} // Extract the comment if there is one. line, comment := getComment(line) property.Comment = comment // Sanitize the line line = strings.TrimSpace(line) line = strings.Split(line, ";")[0] line = strings.Replace(line, "unsigned ", "u", 1) line = strings.Replace(line, "const ", "", 1) // Split the line by spaces words = strings.Split(line, " ") // Check to see if the line is just a comment if words[0] == "//" || (strings.Index(line, "/*") == 0 && strings.Index(line, "*/") == (len(line)-2)) { continue } // Set the property details if typeDef.Base == "enum" { // Strip any commas in the name words[0] = strings.Replace(words[0], ",", "", 1) property.CName = words[0] property.Name = casee.ToPascalCase(strings.Replace(words[0], "GODOT_", "", 1)) } else { if len(words) < 2 { fmt.Println("Skipping irregular line:", line) continue } property.Base = words[0] property.CName = words[1] property.Name = casee.ToPascalCase(strings.Replace(words[1], "godot_", "", 1)) } // Check to see if the property is a pointer type if strings.Contains(property.CName, "*") { property.CName = strings.Replace(property.CName, "*", "", 1) property.Name = strings.Replace(property.Name, "*", "", 1) property.IsPointer = true } // Skip empty property names if property.Name == "" { continue } if strings.Contains(property.Name, "}") { panic(fmt.Errorf("malformed Name: %+v", property)) } // Append the property to the type definition typeDef.Properties = append(typeDef.Properties, property) } return typeDef } type block int8 const ( externBlock block = iota typedefBlock localStructBlock enumBlock ) // findTypeDefs will return a list of type definition lines. func findTypeDefs(content []byte) [][]string { lines := strings.Split(string(content), "\n") // Create a structure that will hold the lines that define the type. var ( singleType []string foundTypes [][]string blocks []block ) for i, line := range lines { if strings.Index(line, "extern \"C\" {") == 0 { // fmt.Println("Line", i ,": START EXTERN BLOCK") blocks = append(blocks, externBlock) continue } else if strings.Index(line, "struct ") == 0 { // fmt.Println("Line", i ,": START LOCAL STRUCT BLOCK") blocks = append(blocks, localStructBlock) continue } else if strings.Index(line, "enum ") == 0 { // fmt.Println("Line", i ,": START ENUM BLOCK") blocks = append(blocks, enumBlock) continue } else if strings.Index(line, "}") == 0 { if len(blocks) == 0 { panic(fmt.Sprintln("\tLine", i, ": extra closing bracket encountered", line)) } n := len(blocks)-1 b := blocks[n] blocks = blocks[:n] switch b { case localStructBlock: // fmt.Println("Line", i ,": END LOCAL STRUCT BLOCK") continue case externBlock: // fmt.Println("Line", i ,": END EXTERN BLOCK") continue case enumBlock: // fmt.Println("Line", i ,": END ENUM BLOCK") continue case typedefBlock: singleType = append(singleType, line) foundTypes = append(foundTypes, singleType) // fmt.Println("\tLine", i, ": Type found:\n", strings.Join(singleType, "\n")) // reset singleType = []string{} default: panic(fmt.Sprintln("\tLine", i, ": extra closing curly braket found")) } } else if strings.Index(line, "typedef ") == 0 { // Check to see if this is a single line type and avoid using // the blocks stack if strings.Contains(line, ";") { // Skip if this is a function definition if strings.Contains(line, ")") { fmt.Println("\tLine", i, ": skip function: ", line) continue } singleType = append(singleType, line) foundTypes = append(foundTypes, singleType) // fmt.Println("\tLine", i, ": Single line type found:\n", strings.Join(singleType, "\n")) // reset singleType = []string{} } else { blocks = append(blocks, typedefBlock) singleType = append(singleType, line) } } else if len(blocks) > 0 { b := blocks[len(blocks) - 1] if b == typedefBlock { singleType = append(singleType, line) } } // // If a type was found, keep appending our struct lines until we // // reach the end of the definition. // if accumulatingTypedefLines { // //fmt.Println("Line", i, ": Appending line for type found:", line) // // Keep adding the lines to our list of lines until we // // reach an end bracket. // singleType = append(singleType, line) // if strings.Contains(line, "}") { // //fmt.Println("Line", i, ": Found end of type definition.") // accumulatingTypedefLines = false // foundTypes = append(foundTypes, singleType) // singleType = []string{} // } // } } return foundTypes } func strInSlice(a string, list []string) bool { for _, b := range list { if b == a { return true } } return false }
{ panic(err) }
conditional_block
parse.go
// Package types is a package that parses the GDNative headers for type definitions // to create wrapper structures for Go. package types import ( "fmt" "github.com/pinzolo/casee" "io/ioutil" "os" "regexp" "path/filepath" "strings" "github.com/godot-go/godot-go/cmd/gdnativeapijson" ) var cTypeRegex = regexp.MustCompile(`(const)?\s*([\w_][\w_\d]*)\s*(\**)`) // GlobalMethods contains the list of methods not associated with a GoTypeDef type GlobalMethods []gdnativeapijson.GoMethod // ConstructorIndex indexes by gdnativeapijson.GoTypeDef.CName type ConstructorIndex map[string][]gdnativeapijson.GoMethod // MethodIndex indexes by gdnativeapijson.GoTypeDef.CName type MethodIndex map[string][]gdnativeapijson.GoMethod // GoTypeDefIndex indexes by C header file name and then by C typedef name type GoTypeDefIndex map[string]map[string]gdnativeapijson.GoTypeDef // parseGodotHeaders will parse the GDNative headers. Takes a list of headers/structs to ignore. // Definitions in the given headers and definitions // with the given name will not be added to the returned list of type definitions. // We'll need to manually create these structures. func parseGodotHeaders( packagePath string, constructorIndex ConstructorIndex, methodIndex MethodIndex, excludeHeaders, excludeStructs []string) GoTypeDefIndex { var ( index = GoTypeDefIndex{} relPath string err error godotHeaderPath = filepath.Join(packagePath, "godot_headers") ) // Walk through all of the godot filename files err = filepath.Walk(godotHeaderPath, func(path string, f os.FileInfo, err error) error { if !f.IsDir() && filepath.Ext(path) == ".h" { relPath, err = filepath.Rel(godotHeaderPath, path) if err != nil { panic(err) } // Read the filename content, err := ioutil.ReadFile(path) if err != nil { panic(err) } // Find all of the type definitions in the filename file // fmt.Println("Parsing File ", path, "...") foundTypesLines := findTypeDefs(content) // After extracting the lines, we can now parse the type definition to // a structure that we can use to build a Go wrapper. for _, foundTypeLines := range foundTypesLines { typeDef := parseTypeDef(foundTypeLines, relPath) typeDef.Constructors = constructorIndex[typeDef.CName] typeDef.Methods = methodIndex[typeDef.CName]
tdMap[typeDef.CName] = typeDef } else { index[relPath] = map[string]gdnativeapijson.GoTypeDef{ typeDef.CName: typeDef, } } } } } return nil }) if err != nil { panic(err) } return index } func parseTypeDef(typeLines []string, headerName string) gdnativeapijson.GoTypeDef { // Create a structure for our type definition. typeDef := gdnativeapijson.GoTypeDef{ CHeaderFilename: headerName, Properties: []gdnativeapijson.GoProperty{}, } // Small function for splitting a line to get the uncommented line and // get the comment itself. getComment := func(line string) (def, comment string) { halves := strings.Split(line, "//") def = halves[0] if len(halves) > 1 { comment = strings.TrimSpace(halves[1]) } if strings.HasPrefix(comment, "/") { comment = strings.Replace(comment, "/", "", 1) } return def, comment } // If the type definition is a single line, handle it a little differently if len(typeLines) == 1 { // Extract the comment if there is one. line, comment := getComment(typeLines[0]) // Check to see if the property is a pointer type if strings.Contains(line, "*") { line = strings.Replace(line, "*", "", 1) typeDef.IsPointer = true } var err error // Get the words of the line words := strings.Split(line, " ") typeDef.CName = strings.Replace(words[len(words)-1], ";", "", 1) goTypeName, usage := gdnativeapijson.ToGoTypeName(typeDef.CName) typeDef.Name = goTypeName typeDef.Base = words[len(words)-2] typeDef.Comment = comment typeDef.Usage = usage if err != nil { panic(fmt.Errorf("%s\n%w", line, err)) } return typeDef } // Extract the name of the type. lastLine := typeLines[len(typeLines)-1] words := strings.Split(lastLine, " ") typeDef.CName = strings.Replace(words[len(words)-1], ";", "", 1) var err error // Extract the base type firstLine := typeLines[0] words = strings.Split(firstLine, " ") typeDef.Base = words[1] if err != nil { panic(fmt.Errorf("%s\n%w", strings.Join(typeLines, "\n"), err)) } // Convert the name of the type to a Go name typeDef.Name, _ = gdnativeapijson.ToGoTypeName(typeDef.CName) if len(typeDef.Name) == 0 { typeDef.Name = words[2] } // Extract the properties from the type var properties []string if strings.HasSuffix(strings.TrimSpace(firstLine), "{") { properties = typeLines[1 : len(typeLines)-1] } else { properties = typeLines[2 : len(typeLines)-1] } var accumLines string // Loop through each property line for _, line := range properties { if strings.HasPrefix(strings.TrimSpace(line), "//") || len(strings.TrimSpace(line)) == 0 { continue } if !strings.Contains(line, ";") && typeDef.Base != "enum" { accumLines += line } else { line = accumLines + line accumLines = "" } // Skip function definitions if strings.Contains(line, "(*") { continue } // Create a type definition for the property property := gdnativeapijson.GoProperty{} // Extract the comment if there is one. line, comment := getComment(line) property.Comment = comment // Sanitize the line line = strings.TrimSpace(line) line = strings.Split(line, ";")[0] line = strings.Replace(line, "unsigned ", "u", 1) line = strings.Replace(line, "const ", "", 1) // Split the line by spaces words = strings.Split(line, " ") // Check to see if the line is just a comment if words[0] == "//" || (strings.Index(line, "/*") == 0 && strings.Index(line, "*/") == (len(line)-2)) { continue } // Set the property details if typeDef.Base == "enum" { // Strip any commas in the name words[0] = strings.Replace(words[0], ",", "", 1) property.CName = words[0] property.Name = casee.ToPascalCase(strings.Replace(words[0], "GODOT_", "", 1)) } else { if len(words) < 2 { fmt.Println("Skipping irregular line:", line) continue } property.Base = words[0] property.CName = words[1] property.Name = casee.ToPascalCase(strings.Replace(words[1], "godot_", "", 1)) } // Check to see if the property is a pointer type if strings.Contains(property.CName, "*") { property.CName = strings.Replace(property.CName, "*", "", 1) property.Name = strings.Replace(property.Name, "*", "", 1) property.IsPointer = true } // Skip empty property names if property.Name == "" { continue } if strings.Contains(property.Name, "}") { panic(fmt.Errorf("malformed Name: %+v", property)) } // Append the property to the type definition typeDef.Properties = append(typeDef.Properties, property) } return typeDef } type block int8 const ( externBlock block = iota typedefBlock localStructBlock enumBlock ) // findTypeDefs will return a list of type definition lines. func findTypeDefs(content []byte) [][]string { lines := strings.Split(string(content), "\n") // Create a structure that will hold the lines that define the type. var ( singleType []string foundTypes [][]string blocks []block ) for i, line := range lines { if strings.Index(line, "extern \"C\" {") == 0 { // fmt.Println("Line", i ,": START EXTERN BLOCK") blocks = append(blocks, externBlock) continue } else if strings.Index(line, "struct ") == 0 { // fmt.Println("Line", i ,": START LOCAL STRUCT BLOCK") blocks = append(blocks, localStructBlock) continue } else if strings.Index(line, "enum ") == 0 { // fmt.Println("Line", i ,": START ENUM BLOCK") blocks = append(blocks, enumBlock) continue } else if strings.Index(line, "}") == 0 { if len(blocks) == 0 { panic(fmt.Sprintln("\tLine", i, ": extra closing bracket encountered", line)) } n := len(blocks)-1 b := blocks[n] blocks = blocks[:n] switch b { case localStructBlock: // fmt.Println("Line", i ,": END LOCAL STRUCT BLOCK") continue case externBlock: // fmt.Println("Line", i ,": END EXTERN BLOCK") continue case enumBlock: // fmt.Println("Line", i ,": END ENUM BLOCK") continue case typedefBlock: singleType = append(singleType, line) foundTypes = append(foundTypes, singleType) // fmt.Println("\tLine", i, ": Type found:\n", strings.Join(singleType, "\n")) // reset singleType = []string{} default: panic(fmt.Sprintln("\tLine", i, ": extra closing curly braket found")) } } else if strings.Index(line, "typedef ") == 0 { // Check to see if this is a single line type and avoid using // the blocks stack if strings.Contains(line, ";") { // Skip if this is a function definition if strings.Contains(line, ")") { fmt.Println("\tLine", i, ": skip function: ", line) continue } singleType = append(singleType, line) foundTypes = append(foundTypes, singleType) // fmt.Println("\tLine", i, ": Single line type found:\n", strings.Join(singleType, "\n")) // reset singleType = []string{} } else { blocks = append(blocks, typedefBlock) singleType = append(singleType, line) } } else if len(blocks) > 0 { b := blocks[len(blocks) - 1] if b == typedefBlock { singleType = append(singleType, line) } } // // If a type was found, keep appending our struct lines until we // // reach the end of the definition. // if accumulatingTypedefLines { // //fmt.Println("Line", i, ": Appending line for type found:", line) // // Keep adding the lines to our list of lines until we // // reach an end bracket. // singleType = append(singleType, line) // if strings.Contains(line, "}") { // //fmt.Println("Line", i, ": Found end of type definition.") // accumulatingTypedefLines = false // foundTypes = append(foundTypes, singleType) // singleType = []string{} // } // } } return foundTypes } func strInSlice(a string, list []string) bool { for _, b := range list { if b == a { return true } } return false }
// Only add the type if it's not in our exclude list. if !strInSlice(typeDef.CName, excludeStructs) && !strInSlice(typeDef.CHeaderFilename, excludeHeaders) { if tdMap, ok := index[relPath]; ok {
random_line_split
parse.go
// Package types is a package that parses the GDNative headers for type definitions // to create wrapper structures for Go. package types import ( "fmt" "github.com/pinzolo/casee" "io/ioutil" "os" "regexp" "path/filepath" "strings" "github.com/godot-go/godot-go/cmd/gdnativeapijson" ) var cTypeRegex = regexp.MustCompile(`(const)?\s*([\w_][\w_\d]*)\s*(\**)`) // GlobalMethods contains the list of methods not associated with a GoTypeDef type GlobalMethods []gdnativeapijson.GoMethod // ConstructorIndex indexes by gdnativeapijson.GoTypeDef.CName type ConstructorIndex map[string][]gdnativeapijson.GoMethod // MethodIndex indexes by gdnativeapijson.GoTypeDef.CName type MethodIndex map[string][]gdnativeapijson.GoMethod // GoTypeDefIndex indexes by C header file name and then by C typedef name type GoTypeDefIndex map[string]map[string]gdnativeapijson.GoTypeDef // parseGodotHeaders will parse the GDNative headers. Takes a list of headers/structs to ignore. // Definitions in the given headers and definitions // with the given name will not be added to the returned list of type definitions. // We'll need to manually create these structures. func parseGodotHeaders( packagePath string, constructorIndex ConstructorIndex, methodIndex MethodIndex, excludeHeaders, excludeStructs []string) GoTypeDefIndex { var ( index = GoTypeDefIndex{} relPath string err error godotHeaderPath = filepath.Join(packagePath, "godot_headers") ) // Walk through all of the godot filename files err = filepath.Walk(godotHeaderPath, func(path string, f os.FileInfo, err error) error { if !f.IsDir() && filepath.Ext(path) == ".h" { relPath, err = filepath.Rel(godotHeaderPath, path) if err != nil { panic(err) } // Read the filename content, err := ioutil.ReadFile(path) if err != nil { panic(err) } // Find all of the type definitions in the filename file // fmt.Println("Parsing File ", path, "...") foundTypesLines := findTypeDefs(content) // After extracting the lines, we can now parse the type definition to // a structure that we can use to build a Go wrapper. for _, foundTypeLines := range foundTypesLines { typeDef := parseTypeDef(foundTypeLines, relPath) typeDef.Constructors = constructorIndex[typeDef.CName] typeDef.Methods = methodIndex[typeDef.CName] // Only add the type if it's not in our exclude list. if !strInSlice(typeDef.CName, excludeStructs) && !strInSlice(typeDef.CHeaderFilename, excludeHeaders) { if tdMap, ok := index[relPath]; ok { tdMap[typeDef.CName] = typeDef } else { index[relPath] = map[string]gdnativeapijson.GoTypeDef{ typeDef.CName: typeDef, } } } } } return nil }) if err != nil { panic(err) } return index } func parseTypeDef(typeLines []string, headerName string) gdnativeapijson.GoTypeDef
type block int8 const ( externBlock block = iota typedefBlock localStructBlock enumBlock ) // findTypeDefs will return a list of type definition lines. func findTypeDefs(content []byte) [][]string { lines := strings.Split(string(content), "\n") // Create a structure that will hold the lines that define the type. var ( singleType []string foundTypes [][]string blocks []block ) for i, line := range lines { if strings.Index(line, "extern \"C\" {") == 0 { // fmt.Println("Line", i ,": START EXTERN BLOCK") blocks = append(blocks, externBlock) continue } else if strings.Index(line, "struct ") == 0 { // fmt.Println("Line", i ,": START LOCAL STRUCT BLOCK") blocks = append(blocks, localStructBlock) continue } else if strings.Index(line, "enum ") == 0 { // fmt.Println("Line", i ,": START ENUM BLOCK") blocks = append(blocks, enumBlock) continue } else if strings.Index(line, "}") == 0 { if len(blocks) == 0 { panic(fmt.Sprintln("\tLine", i, ": extra closing bracket encountered", line)) } n := len(blocks)-1 b := blocks[n] blocks = blocks[:n] switch b { case localStructBlock: // fmt.Println("Line", i ,": END LOCAL STRUCT BLOCK") continue case externBlock: // fmt.Println("Line", i ,": END EXTERN BLOCK") continue case enumBlock: // fmt.Println("Line", i ,": END ENUM BLOCK") continue case typedefBlock: singleType = append(singleType, line) foundTypes = append(foundTypes, singleType) // fmt.Println("\tLine", i, ": Type found:\n", strings.Join(singleType, "\n")) // reset singleType = []string{} default: panic(fmt.Sprintln("\tLine", i, ": extra closing curly braket found")) } } else if strings.Index(line, "typedef ") == 0 { // Check to see if this is a single line type and avoid using // the blocks stack if strings.Contains(line, ";") { // Skip if this is a function definition if strings.Contains(line, ")") { fmt.Println("\tLine", i, ": skip function: ", line) continue } singleType = append(singleType, line) foundTypes = append(foundTypes, singleType) // fmt.Println("\tLine", i, ": Single line type found:\n", strings.Join(singleType, "\n")) // reset singleType = []string{} } else { blocks = append(blocks, typedefBlock) singleType = append(singleType, line) } } else if len(blocks) > 0 { b := blocks[len(blocks) - 1] if b == typedefBlock { singleType = append(singleType, line) } } // // If a type was found, keep appending our struct lines until we // // reach the end of the definition. // if accumulatingTypedefLines { // //fmt.Println("Line", i, ": Appending line for type found:", line) // // Keep adding the lines to our list of lines until we // // reach an end bracket. // singleType = append(singleType, line) // if strings.Contains(line, "}") { // //fmt.Println("Line", i, ": Found end of type definition.") // accumulatingTypedefLines = false // foundTypes = append(foundTypes, singleType) // singleType = []string{} // } // } } return foundTypes } func strInSlice(a string, list []string) bool { for _, b := range list { if b == a { return true } } return false }
{ // Create a structure for our type definition. typeDef := gdnativeapijson.GoTypeDef{ CHeaderFilename: headerName, Properties: []gdnativeapijson.GoProperty{}, } // Small function for splitting a line to get the uncommented line and // get the comment itself. getComment := func(line string) (def, comment string) { halves := strings.Split(line, "//") def = halves[0] if len(halves) > 1 { comment = strings.TrimSpace(halves[1]) } if strings.HasPrefix(comment, "/") { comment = strings.Replace(comment, "/", "", 1) } return def, comment } // If the type definition is a single line, handle it a little differently if len(typeLines) == 1 { // Extract the comment if there is one. line, comment := getComment(typeLines[0]) // Check to see if the property is a pointer type if strings.Contains(line, "*") { line = strings.Replace(line, "*", "", 1) typeDef.IsPointer = true } var err error // Get the words of the line words := strings.Split(line, " ") typeDef.CName = strings.Replace(words[len(words)-1], ";", "", 1) goTypeName, usage := gdnativeapijson.ToGoTypeName(typeDef.CName) typeDef.Name = goTypeName typeDef.Base = words[len(words)-2] typeDef.Comment = comment typeDef.Usage = usage if err != nil { panic(fmt.Errorf("%s\n%w", line, err)) } return typeDef } // Extract the name of the type. lastLine := typeLines[len(typeLines)-1] words := strings.Split(lastLine, " ") typeDef.CName = strings.Replace(words[len(words)-1], ";", "", 1) var err error // Extract the base type firstLine := typeLines[0] words = strings.Split(firstLine, " ") typeDef.Base = words[1] if err != nil { panic(fmt.Errorf("%s\n%w", strings.Join(typeLines, "\n"), err)) } // Convert the name of the type to a Go name typeDef.Name, _ = gdnativeapijson.ToGoTypeName(typeDef.CName) if len(typeDef.Name) == 0 { typeDef.Name = words[2] } // Extract the properties from the type var properties []string if strings.HasSuffix(strings.TrimSpace(firstLine), "{") { properties = typeLines[1 : len(typeLines)-1] } else { properties = typeLines[2 : len(typeLines)-1] } var accumLines string // Loop through each property line for _, line := range properties { if strings.HasPrefix(strings.TrimSpace(line), "//") || len(strings.TrimSpace(line)) == 0 { continue } if !strings.Contains(line, ";") && typeDef.Base != "enum" { accumLines += line } else { line = accumLines + line accumLines = "" } // Skip function definitions if strings.Contains(line, "(*") { continue } // Create a type definition for the property property := gdnativeapijson.GoProperty{} // Extract the comment if there is one. line, comment := getComment(line) property.Comment = comment // Sanitize the line line = strings.TrimSpace(line) line = strings.Split(line, ";")[0] line = strings.Replace(line, "unsigned ", "u", 1) line = strings.Replace(line, "const ", "", 1) // Split the line by spaces words = strings.Split(line, " ") // Check to see if the line is just a comment if words[0] == "//" || (strings.Index(line, "/*") == 0 && strings.Index(line, "*/") == (len(line)-2)) { continue } // Set the property details if typeDef.Base == "enum" { // Strip any commas in the name words[0] = strings.Replace(words[0], ",", "", 1) property.CName = words[0] property.Name = casee.ToPascalCase(strings.Replace(words[0], "GODOT_", "", 1)) } else { if len(words) < 2 { fmt.Println("Skipping irregular line:", line) continue } property.Base = words[0] property.CName = words[1] property.Name = casee.ToPascalCase(strings.Replace(words[1], "godot_", "", 1)) } // Check to see if the property is a pointer type if strings.Contains(property.CName, "*") { property.CName = strings.Replace(property.CName, "*", "", 1) property.Name = strings.Replace(property.Name, "*", "", 1) property.IsPointer = true } // Skip empty property names if property.Name == "" { continue } if strings.Contains(property.Name, "}") { panic(fmt.Errorf("malformed Name: %+v", property)) } // Append the property to the type definition typeDef.Properties = append(typeDef.Properties, property) } return typeDef }
identifier_body
weapon.rs
use std::{ path::Path, sync::{Arc, Mutex}, }; use crate::{ projectile::{ Projectile, ProjectileKind, }, actor::Actor, HandleFromSelf, GameTime, level::CleanUp, }; use rg3d::{ physics::{RayCastOptions, Physics}, sound::{ source::Source, buffer::BufferKind, context::Context, }, engine::resource_manager::ResourceManager, resource::{ model::Model, }, scene::{ SceneInterfaceMut, node::Node, Scene, graph::Graph, light::{ LightKind, LightBuilder, PointLight, }, base::{BaseBuilder, AsBase}, }, core::{ pool::{ Pool, PoolIterator, PoolIteratorMut, Handle, }, color::Color, visitor::{ Visit, VisitResult, Visitor, }, math::{vec3::Vec3, ray::Ray}, }, }; #[derive(Copy, Clone, PartialEq, Eq)] pub enum WeaponKind { M4, Ak47, PlasmaRifle, } impl WeaponKind { pub fn id(&self) -> u32 { match self { WeaponKind::M4 => 0, WeaponKind::Ak47 => 1, WeaponKind::PlasmaRifle => 2 } } pub fn new(id: u32) -> Result<Self, String> { match id { 0 => Ok(WeaponKind::M4), 1 => Ok(WeaponKind::Ak47), 2 => Ok(WeaponKind::PlasmaRifle), _ => return Err(format!("unknown weapon kind {}", id)) } } } pub struct Weapon { self_handle: Handle<Weapon>, kind: WeaponKind, model: Handle<Node>, laser_dot: Handle<Node>, shot_point: Handle<Node>, offset: Vec3, dest_offset: Vec3, last_shot_time: f64, shot_position: Vec3, owner: Handle<Actor>, ammo: u32, definition: &'static WeaponDefinition, } pub struct WeaponDefinition { model: &'static str, shot_sound: &'static str, ammo: u32, } impl HandleFromSelf<Weapon> for Weapon { fn self_handle(&self) -> Handle<Weapon> { self.self_handle } } impl Default for Weapon { fn default() -> Self { Self { self_handle: Default::default(), kind: WeaponKind::M4, laser_dot: Handle::NONE, model: Handle::NONE, offset: Vec3::ZERO, shot_point: Handle::NONE, dest_offset: Vec3::ZERO, last_shot_time: 0.0, shot_position: Vec3::ZERO, owner: Handle::NONE, ammo: 250, definition: Self::get_definition(WeaponKind::M4), } } } impl Visit for Weapon { fn visit(&mut self, name: &str, visitor: &mut Visitor) -> VisitResult { visitor.enter_region(name)?; let mut kind_id = self.kind.id(); kind_id.visit("KindId", visitor)?; if visitor.is_reading() { self.kind = WeaponKind::new(kind_id)? } self.definition = Self::get_definition(self.kind); self.self_handle.visit("SelfHandle", visitor)?; self.model.visit("Model", visitor)?; self.laser_dot.visit("LaserDot", visitor)?; self.offset.visit("Offset", visitor)?; self.dest_offset.visit("DestOffset", visitor)?; self.last_shot_time.visit("LastShotTime", visitor)?; self.owner.visit("Owner", visitor)?; self.ammo.visit("Ammo", visitor)?; visitor.leave_region() } } impl Weapon { pub fn get_definition(kind: WeaponKind) -> &'static WeaponDefinition { match kind { WeaponKind::M4 => { static DEFINITION: WeaponDefinition = WeaponDefinition { model: "data/models/m4.FBX", shot_sound: "data/sounds/m4_shot.wav", ammo: 115, }; &DEFINITION } WeaponKind::Ak47 => { static DEFINITION: WeaponDefinition = WeaponDefinition { model: "data/models/ak47.FBX", shot_sound: "data/sounds/m4_shot.wav", ammo: 100, }; &DEFINITION } WeaponKind::PlasmaRifle => { static DEFINITION: WeaponDefinition = WeaponDefinition { model: "data/models/plasma_rifle.FBX", shot_sound: "data/sounds/plasma_shot.wav", ammo: 40, }; &DEFINITION } } } pub fn new(kind: WeaponKind, resource_manager: &mut ResourceManager, scene: &mut Scene) -> Weapon { let definition = Self::get_definition(kind); let model = Model::instantiate( resource_manager.request_model(Path::new(definition.model)).unwrap(), scene).root; let SceneInterfaceMut { graph, .. } = scene.interface_mut(); let laser_dot = graph.add_node(Node::Light( LightBuilder::new(LightKind::Point(PointLight::new(0.5)), BaseBuilder::new()) .with_color(Color::opaque(255, 0, 0)) .cast_shadows(false) .build())); let shot_point = graph.find_by_name(model, "Weapon:ShotPoint"); if shot_point.is_none() { println!("Shot point not found!"); } Weapon { kind, laser_dot, model, shot_point, definition, ammo: definition.ammo, ..Default::default() } } pub fn set_visibility(&self, visibility: bool, graph: &mut Graph) { graph.get_mut(self.model).base_mut().set_visibility(visibility); graph.get_mut(self.laser_dot).base_mut().set_visibility(visibility); } pub fn get_model(&self) -> Handle<Node> { self.model } pub fn update(&mut self, scene: &mut Scene) { let SceneInterfaceMut { graph, physics, .. } = scene.interface_mut(); self.offset.follow(&self.dest_offset, 0.2); self.update_laser_sight(graph, physics); let node = graph.get_mut(self.model); node.base_mut().get_local_transform_mut().set_position(self.offset); self.shot_position = node.base().get_global_position(); } fn get_shot_position(&self, graph: &Graph) -> Vec3 { if self.shot_point.is_some() { graph.get(self.shot_point).base().get_global_position() } else { // Fallback graph.get(self.model).base().get_global_position() } } pub fn get_kind(&self) -> WeaponKind { self.kind } pub fn add_ammo(&mut self, amount: u32) { self.ammo += amount; } fn update_laser_sight(&self, graph: &mut Graph, physics: &Physics) { let mut laser_dot_position = Vec3::ZERO; let model = graph.get(self.model); let begin = model.base().get_global_position(); let end = begin + model.base().get_look_vector().scale(100.0); if let Some(ray) = Ray::from_two_points(&begin, &end) { let mut result = Vec::new(); if physics.ray_cast(&ray, RayCastOptions::default(), &mut result) { let offset = result[0].normal.normalized().unwrap_or_default().scale(0.2); laser_dot_position = result[0].position + offset; } } graph.get_mut(self.laser_dot).base_mut().get_local_transform_mut().set_position(laser_dot_position); } fn play_shot_sound(&self, resource_manager: &mut ResourceManager, sound_context: Arc<Mutex<Context>>) { let mut sound_context = sound_context.lock().unwrap(); let shot_buffer = resource_manager.request_sound_buffer( Path::new(self.definition.shot_sound), BufferKind::Normal).unwrap(); let mut shot_sound = Source::new_spatial(shot_buffer).unwrap(); shot_sound.set_play_once(true); shot_sound.play(); shot_sound.as_spatial_mut().set_position(&self.shot_position); sound_context.add_source(shot_sound); } pub fn get_ammo(&self) -> u32 { self.ammo } pub fn get_owner(&self) -> Handle<Actor> { self.owner } pub fn set_owner(&mut self, owner: Handle<Actor>) { self.owner = owner; } pub fn try_shoot(&mut self, scene: &mut Scene, resource_manager: &mut ResourceManager, sound_context: Arc<Mutex<Context>>, time: GameTime, weapon_velocity: Vec3) -> Option<Projectile> { if self.ammo != 0 && time.elapsed - self.last_shot_time >= 0.1 { self.ammo -= 1; self.offset = Vec3::new(0.0, 0.0, -0.05); self.last_shot_time = time.elapsed; self.play_shot_sound(resource_manager, sound_context); let (dir, pos) = { let graph = scene.interface().graph; (graph.get(self.model).base().get_look_vector(), self.get_shot_position(graph)) }; match self.kind { WeaponKind::M4 | WeaponKind::Ak47 => { Some(Projectile::new(ProjectileKind::Bullet, resource_manager, scene, dir, pos, self.self_handle, weapon_velocity)) } WeaponKind::PlasmaRifle => { Some(Projectile::new(ProjectileKind::Plasma, resource_manager, scene, dir, pos, self.self_handle, weapon_velocity)) } } } else { None } } } impl CleanUp for Weapon { fn clean_up(&mut self, scene: &mut Scene) { let SceneInterfaceMut { graph, .. } = scene.interface_mut(); graph.remove_node(self.model); graph.remove_node(self.laser_dot); } } pub struct WeaponContainer { pool: Pool<Weapon> } impl WeaponContainer { pub fn new() -> Self { Self { pool: Pool::new() } } pub fn add(&mut self, weapon: Weapon) -> Handle<Weapon> { let handle = self.pool.spawn(weapon); self.pool.borrow_mut(handle).self_handle = handle; handle } pub fn
(&self) -> PoolIterator<Weapon> { self.pool.iter() } pub fn iter_mut(&mut self) -> PoolIteratorMut<Weapon> { self.pool.iter_mut() } pub fn get(&self, handle: Handle<Weapon>) -> &Weapon { self.pool.borrow(handle) } pub fn get_mut(&mut self, handle: Handle<Weapon>) -> &mut Weapon { self.pool.borrow_mut(handle) } pub fn update(&mut self, scene: &mut Scene) { for weapon in self.pool.iter_mut() { weapon.update(scene) } } } impl Visit for WeaponContainer { fn visit(&mut self, name: &str, visitor: &mut Visitor) -> VisitResult { visitor.enter_region(name)?; self.pool.visit("Pool", visitor)?; visitor.leave_region() } }
iter
identifier_name
weapon.rs
use std::{ path::Path, sync::{Arc, Mutex}, }; use crate::{ projectile::{ Projectile, ProjectileKind, }, actor::Actor, HandleFromSelf, GameTime, level::CleanUp, }; use rg3d::{ physics::{RayCastOptions, Physics}, sound::{ source::Source, buffer::BufferKind, context::Context, }, engine::resource_manager::ResourceManager, resource::{ model::Model, }, scene::{ SceneInterfaceMut, node::Node, Scene, graph::Graph, light::{ LightKind, LightBuilder, PointLight, }, base::{BaseBuilder, AsBase}, }, core::{ pool::{ Pool, PoolIterator, PoolIteratorMut, Handle, }, color::Color, visitor::{ Visit, VisitResult, Visitor, }, math::{vec3::Vec3, ray::Ray}, }, }; #[derive(Copy, Clone, PartialEq, Eq)] pub enum WeaponKind { M4, Ak47, PlasmaRifle, } impl WeaponKind { pub fn id(&self) -> u32 { match self { WeaponKind::M4 => 0, WeaponKind::Ak47 => 1, WeaponKind::PlasmaRifle => 2 } } pub fn new(id: u32) -> Result<Self, String> { match id { 0 => Ok(WeaponKind::M4), 1 => Ok(WeaponKind::Ak47), 2 => Ok(WeaponKind::PlasmaRifle), _ => return Err(format!("unknown weapon kind {}", id)) } } } pub struct Weapon { self_handle: Handle<Weapon>, kind: WeaponKind, model: Handle<Node>, laser_dot: Handle<Node>, shot_point: Handle<Node>, offset: Vec3, dest_offset: Vec3, last_shot_time: f64, shot_position: Vec3, owner: Handle<Actor>, ammo: u32, definition: &'static WeaponDefinition, } pub struct WeaponDefinition { model: &'static str, shot_sound: &'static str, ammo: u32, } impl HandleFromSelf<Weapon> for Weapon { fn self_handle(&self) -> Handle<Weapon> { self.self_handle } } impl Default for Weapon { fn default() -> Self { Self { self_handle: Default::default(), kind: WeaponKind::M4, laser_dot: Handle::NONE, model: Handle::NONE, offset: Vec3::ZERO, shot_point: Handle::NONE, dest_offset: Vec3::ZERO, last_shot_time: 0.0, shot_position: Vec3::ZERO, owner: Handle::NONE, ammo: 250, definition: Self::get_definition(WeaponKind::M4), } } } impl Visit for Weapon { fn visit(&mut self, name: &str, visitor: &mut Visitor) -> VisitResult { visitor.enter_region(name)?; let mut kind_id = self.kind.id(); kind_id.visit("KindId", visitor)?; if visitor.is_reading() { self.kind = WeaponKind::new(kind_id)? } self.definition = Self::get_definition(self.kind); self.self_handle.visit("SelfHandle", visitor)?; self.model.visit("Model", visitor)?; self.laser_dot.visit("LaserDot", visitor)?; self.offset.visit("Offset", visitor)?; self.dest_offset.visit("DestOffset", visitor)?; self.last_shot_time.visit("LastShotTime", visitor)?; self.owner.visit("Owner", visitor)?; self.ammo.visit("Ammo", visitor)?; visitor.leave_region() } } impl Weapon { pub fn get_definition(kind: WeaponKind) -> &'static WeaponDefinition { match kind { WeaponKind::M4 => { static DEFINITION: WeaponDefinition = WeaponDefinition { model: "data/models/m4.FBX", shot_sound: "data/sounds/m4_shot.wav", ammo: 115, }; &DEFINITION } WeaponKind::Ak47 => { static DEFINITION: WeaponDefinition = WeaponDefinition { model: "data/models/ak47.FBX", shot_sound: "data/sounds/m4_shot.wav", ammo: 100, }; &DEFINITION } WeaponKind::PlasmaRifle => { static DEFINITION: WeaponDefinition = WeaponDefinition { model: "data/models/plasma_rifle.FBX", shot_sound: "data/sounds/plasma_shot.wav", ammo: 40, }; &DEFINITION } } } pub fn new(kind: WeaponKind, resource_manager: &mut ResourceManager, scene: &mut Scene) -> Weapon { let definition = Self::get_definition(kind); let model = Model::instantiate( resource_manager.request_model(Path::new(definition.model)).unwrap(), scene).root; let SceneInterfaceMut { graph, .. } = scene.interface_mut(); let laser_dot = graph.add_node(Node::Light( LightBuilder::new(LightKind::Point(PointLight::new(0.5)), BaseBuilder::new()) .with_color(Color::opaque(255, 0, 0)) .cast_shadows(false) .build())); let shot_point = graph.find_by_name(model, "Weapon:ShotPoint"); if shot_point.is_none() { println!("Shot point not found!"); } Weapon { kind, laser_dot, model, shot_point, definition, ammo: definition.ammo, ..Default::default() } } pub fn set_visibility(&self, visibility: bool, graph: &mut Graph) { graph.get_mut(self.model).base_mut().set_visibility(visibility); graph.get_mut(self.laser_dot).base_mut().set_visibility(visibility); } pub fn get_model(&self) -> Handle<Node> { self.model } pub fn update(&mut self, scene: &mut Scene) { let SceneInterfaceMut { graph, physics, .. } = scene.interface_mut(); self.offset.follow(&self.dest_offset, 0.2); self.update_laser_sight(graph, physics); let node = graph.get_mut(self.model); node.base_mut().get_local_transform_mut().set_position(self.offset); self.shot_position = node.base().get_global_position(); } fn get_shot_position(&self, graph: &Graph) -> Vec3 { if self.shot_point.is_some() { graph.get(self.shot_point).base().get_global_position() } else { // Fallback graph.get(self.model).base().get_global_position() } } pub fn get_kind(&self) -> WeaponKind { self.kind } pub fn add_ammo(&mut self, amount: u32) { self.ammo += amount; } fn update_laser_sight(&self, graph: &mut Graph, physics: &Physics) { let mut laser_dot_position = Vec3::ZERO; let model = graph.get(self.model); let begin = model.base().get_global_position(); let end = begin + model.base().get_look_vector().scale(100.0); if let Some(ray) = Ray::from_two_points(&begin, &end) { let mut result = Vec::new(); if physics.ray_cast(&ray, RayCastOptions::default(), &mut result) { let offset = result[0].normal.normalized().unwrap_or_default().scale(0.2); laser_dot_position = result[0].position + offset; } } graph.get_mut(self.laser_dot).base_mut().get_local_transform_mut().set_position(laser_dot_position); } fn play_shot_sound(&self, resource_manager: &mut ResourceManager, sound_context: Arc<Mutex<Context>>) { let mut sound_context = sound_context.lock().unwrap(); let shot_buffer = resource_manager.request_sound_buffer( Path::new(self.definition.shot_sound), BufferKind::Normal).unwrap(); let mut shot_sound = Source::new_spatial(shot_buffer).unwrap(); shot_sound.set_play_once(true); shot_sound.play(); shot_sound.as_spatial_mut().set_position(&self.shot_position); sound_context.add_source(shot_sound); } pub fn get_ammo(&self) -> u32 { self.ammo } pub fn get_owner(&self) -> Handle<Actor> { self.owner } pub fn set_owner(&mut self, owner: Handle<Actor>) { self.owner = owner; } pub fn try_shoot(&mut self, scene: &mut Scene, resource_manager: &mut ResourceManager, sound_context: Arc<Mutex<Context>>, time: GameTime, weapon_velocity: Vec3) -> Option<Projectile>
} impl CleanUp for Weapon { fn clean_up(&mut self, scene: &mut Scene) { let SceneInterfaceMut { graph, .. } = scene.interface_mut(); graph.remove_node(self.model); graph.remove_node(self.laser_dot); } } pub struct WeaponContainer { pool: Pool<Weapon> } impl WeaponContainer { pub fn new() -> Self { Self { pool: Pool::new() } } pub fn add(&mut self, weapon: Weapon) -> Handle<Weapon> { let handle = self.pool.spawn(weapon); self.pool.borrow_mut(handle).self_handle = handle; handle } pub fn iter(&self) -> PoolIterator<Weapon> { self.pool.iter() } pub fn iter_mut(&mut self) -> PoolIteratorMut<Weapon> { self.pool.iter_mut() } pub fn get(&self, handle: Handle<Weapon>) -> &Weapon { self.pool.borrow(handle) } pub fn get_mut(&mut self, handle: Handle<Weapon>) -> &mut Weapon { self.pool.borrow_mut(handle) } pub fn update(&mut self, scene: &mut Scene) { for weapon in self.pool.iter_mut() { weapon.update(scene) } } } impl Visit for WeaponContainer { fn visit(&mut self, name: &str, visitor: &mut Visitor) -> VisitResult { visitor.enter_region(name)?; self.pool.visit("Pool", visitor)?; visitor.leave_region() } }
{ if self.ammo != 0 && time.elapsed - self.last_shot_time >= 0.1 { self.ammo -= 1; self.offset = Vec3::new(0.0, 0.0, -0.05); self.last_shot_time = time.elapsed; self.play_shot_sound(resource_manager, sound_context); let (dir, pos) = { let graph = scene.interface().graph; (graph.get(self.model).base().get_look_vector(), self.get_shot_position(graph)) }; match self.kind { WeaponKind::M4 | WeaponKind::Ak47 => { Some(Projectile::new(ProjectileKind::Bullet, resource_manager, scene, dir, pos, self.self_handle, weapon_velocity)) } WeaponKind::PlasmaRifle => { Some(Projectile::new(ProjectileKind::Plasma, resource_manager, scene, dir, pos, self.self_handle, weapon_velocity)) } } } else { None } }
identifier_body
weapon.rs
use std::{ path::Path, sync::{Arc, Mutex}, }; use crate::{ projectile::{ Projectile, ProjectileKind, }, actor::Actor, HandleFromSelf, GameTime, level::CleanUp, }; use rg3d::{ physics::{RayCastOptions, Physics}, sound::{ source::Source, buffer::BufferKind, context::Context, }, engine::resource_manager::ResourceManager, resource::{ model::Model, }, scene::{ SceneInterfaceMut, node::Node, Scene, graph::Graph, light::{ LightKind, LightBuilder, PointLight, }, base::{BaseBuilder, AsBase}, }, core::{ pool::{ Pool, PoolIterator, PoolIteratorMut, Handle, }, color::Color, visitor::{ Visit, VisitResult, Visitor, }, math::{vec3::Vec3, ray::Ray}, }, }; #[derive(Copy, Clone, PartialEq, Eq)] pub enum WeaponKind { M4, Ak47, PlasmaRifle, } impl WeaponKind { pub fn id(&self) -> u32 { match self { WeaponKind::M4 => 0, WeaponKind::Ak47 => 1, WeaponKind::PlasmaRifle => 2 } } pub fn new(id: u32) -> Result<Self, String> { match id { 0 => Ok(WeaponKind::M4), 1 => Ok(WeaponKind::Ak47), 2 => Ok(WeaponKind::PlasmaRifle), _ => return Err(format!("unknown weapon kind {}", id)) } } } pub struct Weapon { self_handle: Handle<Weapon>, kind: WeaponKind, model: Handle<Node>, laser_dot: Handle<Node>, shot_point: Handle<Node>, offset: Vec3, dest_offset: Vec3, last_shot_time: f64, shot_position: Vec3, owner: Handle<Actor>, ammo: u32, definition: &'static WeaponDefinition, } pub struct WeaponDefinition { model: &'static str, shot_sound: &'static str, ammo: u32, } impl HandleFromSelf<Weapon> for Weapon { fn self_handle(&self) -> Handle<Weapon> { self.self_handle } } impl Default for Weapon { fn default() -> Self { Self { self_handle: Default::default(), kind: WeaponKind::M4, laser_dot: Handle::NONE, model: Handle::NONE, offset: Vec3::ZERO, shot_point: Handle::NONE, dest_offset: Vec3::ZERO, last_shot_time: 0.0, shot_position: Vec3::ZERO, owner: Handle::NONE, ammo: 250, definition: Self::get_definition(WeaponKind::M4), } } } impl Visit for Weapon { fn visit(&mut self, name: &str, visitor: &mut Visitor) -> VisitResult { visitor.enter_region(name)?; let mut kind_id = self.kind.id(); kind_id.visit("KindId", visitor)?; if visitor.is_reading() { self.kind = WeaponKind::new(kind_id)? } self.definition = Self::get_definition(self.kind); self.self_handle.visit("SelfHandle", visitor)?; self.model.visit("Model", visitor)?; self.laser_dot.visit("LaserDot", visitor)?; self.offset.visit("Offset", visitor)?; self.dest_offset.visit("DestOffset", visitor)?; self.last_shot_time.visit("LastShotTime", visitor)?; self.owner.visit("Owner", visitor)?; self.ammo.visit("Ammo", visitor)?; visitor.leave_region() } } impl Weapon { pub fn get_definition(kind: WeaponKind) -> &'static WeaponDefinition { match kind { WeaponKind::M4 => { static DEFINITION: WeaponDefinition = WeaponDefinition { model: "data/models/m4.FBX", shot_sound: "data/sounds/m4_shot.wav", ammo: 115, }; &DEFINITION } WeaponKind::Ak47 => { static DEFINITION: WeaponDefinition = WeaponDefinition { model: "data/models/ak47.FBX", shot_sound: "data/sounds/m4_shot.wav", ammo: 100, }; &DEFINITION } WeaponKind::PlasmaRifle => { static DEFINITION: WeaponDefinition = WeaponDefinition { model: "data/models/plasma_rifle.FBX", shot_sound: "data/sounds/plasma_shot.wav", ammo: 40, }; &DEFINITION } } } pub fn new(kind: WeaponKind, resource_manager: &mut ResourceManager, scene: &mut Scene) -> Weapon { let definition = Self::get_definition(kind); let model = Model::instantiate( resource_manager.request_model(Path::new(definition.model)).unwrap(), scene).root; let SceneInterfaceMut { graph, .. } = scene.interface_mut(); let laser_dot = graph.add_node(Node::Light( LightBuilder::new(LightKind::Point(PointLight::new(0.5)), BaseBuilder::new()) .with_color(Color::opaque(255, 0, 0)) .cast_shadows(false) .build())); let shot_point = graph.find_by_name(model, "Weapon:ShotPoint"); if shot_point.is_none() { println!("Shot point not found!"); } Weapon { kind, laser_dot, model, shot_point, definition, ammo: definition.ammo, ..Default::default() } } pub fn set_visibility(&self, visibility: bool, graph: &mut Graph) { graph.get_mut(self.model).base_mut().set_visibility(visibility); graph.get_mut(self.laser_dot).base_mut().set_visibility(visibility); } pub fn get_model(&self) -> Handle<Node> { self.model } pub fn update(&mut self, scene: &mut Scene) { let SceneInterfaceMut { graph, physics, .. } = scene.interface_mut(); self.offset.follow(&self.dest_offset, 0.2); self.update_laser_sight(graph, physics); let node = graph.get_mut(self.model); node.base_mut().get_local_transform_mut().set_position(self.offset); self.shot_position = node.base().get_global_position(); } fn get_shot_position(&self, graph: &Graph) -> Vec3 { if self.shot_point.is_some() { graph.get(self.shot_point).base().get_global_position() } else { // Fallback graph.get(self.model).base().get_global_position() } } pub fn get_kind(&self) -> WeaponKind { self.kind } pub fn add_ammo(&mut self, amount: u32) { self.ammo += amount; } fn update_laser_sight(&self, graph: &mut Graph, physics: &Physics) { let mut laser_dot_position = Vec3::ZERO; let model = graph.get(self.model); let begin = model.base().get_global_position(); let end = begin + model.base().get_look_vector().scale(100.0); if let Some(ray) = Ray::from_two_points(&begin, &end) { let mut result = Vec::new(); if physics.ray_cast(&ray, RayCastOptions::default(), &mut result) { let offset = result[0].normal.normalized().unwrap_or_default().scale(0.2); laser_dot_position = result[0].position + offset; } } graph.get_mut(self.laser_dot).base_mut().get_local_transform_mut().set_position(laser_dot_position); } fn play_shot_sound(&self, resource_manager: &mut ResourceManager, sound_context: Arc<Mutex<Context>>) { let mut sound_context = sound_context.lock().unwrap(); let shot_buffer = resource_manager.request_sound_buffer( Path::new(self.definition.shot_sound), BufferKind::Normal).unwrap(); let mut shot_sound = Source::new_spatial(shot_buffer).unwrap(); shot_sound.set_play_once(true); shot_sound.play(); shot_sound.as_spatial_mut().set_position(&self.shot_position); sound_context.add_source(shot_sound); } pub fn get_ammo(&self) -> u32 { self.ammo } pub fn get_owner(&self) -> Handle<Actor> { self.owner } pub fn set_owner(&mut self, owner: Handle<Actor>) { self.owner = owner; } pub fn try_shoot(&mut self, scene: &mut Scene, resource_manager: &mut ResourceManager, sound_context: Arc<Mutex<Context>>, time: GameTime, weapon_velocity: Vec3) -> Option<Projectile> { if self.ammo != 0 && time.elapsed - self.last_shot_time >= 0.1 { self.ammo -= 1; self.offset = Vec3::new(0.0, 0.0, -0.05); self.last_shot_time = time.elapsed; self.play_shot_sound(resource_manager, sound_context); let (dir, pos) = { let graph = scene.interface().graph; (graph.get(self.model).base().get_look_vector(), self.get_shot_position(graph)) }; match self.kind { WeaponKind::M4 | WeaponKind::Ak47 => { Some(Projectile::new(ProjectileKind::Bullet, resource_manager, scene, dir, pos, self.self_handle, weapon_velocity)) } WeaponKind::PlasmaRifle => { Some(Projectile::new(ProjectileKind::Plasma, resource_manager, scene, dir, pos, self.self_handle, weapon_velocity)) } } } else { None } } } impl CleanUp for Weapon { fn clean_up(&mut self, scene: &mut Scene) { let SceneInterfaceMut { graph, .. } = scene.interface_mut(); graph.remove_node(self.model); graph.remove_node(self.laser_dot); } } pub struct WeaponContainer { pool: Pool<Weapon> } impl WeaponContainer { pub fn new() -> Self { Self { pool: Pool::new() } } pub fn add(&mut self, weapon: Weapon) -> Handle<Weapon> { let handle = self.pool.spawn(weapon); self.pool.borrow_mut(handle).self_handle = handle; handle } pub fn iter(&self) -> PoolIterator<Weapon> { self.pool.iter() } pub fn iter_mut(&mut self) -> PoolIteratorMut<Weapon> { self.pool.iter_mut() } pub fn get(&self, handle: Handle<Weapon>) -> &Weapon { self.pool.borrow(handle) } pub fn get_mut(&mut self, handle: Handle<Weapon>) -> &mut Weapon { self.pool.borrow_mut(handle) } pub fn update(&mut self, scene: &mut Scene) { for weapon in self.pool.iter_mut() {
} impl Visit for WeaponContainer { fn visit(&mut self, name: &str, visitor: &mut Visitor) -> VisitResult { visitor.enter_region(name)?; self.pool.visit("Pool", visitor)?; visitor.leave_region() } }
weapon.update(scene) } }
random_line_split
weapon.rs
use std::{ path::Path, sync::{Arc, Mutex}, }; use crate::{ projectile::{ Projectile, ProjectileKind, }, actor::Actor, HandleFromSelf, GameTime, level::CleanUp, }; use rg3d::{ physics::{RayCastOptions, Physics}, sound::{ source::Source, buffer::BufferKind, context::Context, }, engine::resource_manager::ResourceManager, resource::{ model::Model, }, scene::{ SceneInterfaceMut, node::Node, Scene, graph::Graph, light::{ LightKind, LightBuilder, PointLight, }, base::{BaseBuilder, AsBase}, }, core::{ pool::{ Pool, PoolIterator, PoolIteratorMut, Handle, }, color::Color, visitor::{ Visit, VisitResult, Visitor, }, math::{vec3::Vec3, ray::Ray}, }, }; #[derive(Copy, Clone, PartialEq, Eq)] pub enum WeaponKind { M4, Ak47, PlasmaRifle, } impl WeaponKind { pub fn id(&self) -> u32 { match self { WeaponKind::M4 => 0, WeaponKind::Ak47 => 1, WeaponKind::PlasmaRifle => 2 } } pub fn new(id: u32) -> Result<Self, String> { match id { 0 => Ok(WeaponKind::M4), 1 => Ok(WeaponKind::Ak47), 2 => Ok(WeaponKind::PlasmaRifle), _ => return Err(format!("unknown weapon kind {}", id)) } } } pub struct Weapon { self_handle: Handle<Weapon>, kind: WeaponKind, model: Handle<Node>, laser_dot: Handle<Node>, shot_point: Handle<Node>, offset: Vec3, dest_offset: Vec3, last_shot_time: f64, shot_position: Vec3, owner: Handle<Actor>, ammo: u32, definition: &'static WeaponDefinition, } pub struct WeaponDefinition { model: &'static str, shot_sound: &'static str, ammo: u32, } impl HandleFromSelf<Weapon> for Weapon { fn self_handle(&self) -> Handle<Weapon> { self.self_handle } } impl Default for Weapon { fn default() -> Self { Self { self_handle: Default::default(), kind: WeaponKind::M4, laser_dot: Handle::NONE, model: Handle::NONE, offset: Vec3::ZERO, shot_point: Handle::NONE, dest_offset: Vec3::ZERO, last_shot_time: 0.0, shot_position: Vec3::ZERO, owner: Handle::NONE, ammo: 250, definition: Self::get_definition(WeaponKind::M4), } } } impl Visit for Weapon { fn visit(&mut self, name: &str, visitor: &mut Visitor) -> VisitResult { visitor.enter_region(name)?; let mut kind_id = self.kind.id(); kind_id.visit("KindId", visitor)?; if visitor.is_reading() { self.kind = WeaponKind::new(kind_id)? } self.definition = Self::get_definition(self.kind); self.self_handle.visit("SelfHandle", visitor)?; self.model.visit("Model", visitor)?; self.laser_dot.visit("LaserDot", visitor)?; self.offset.visit("Offset", visitor)?; self.dest_offset.visit("DestOffset", visitor)?; self.last_shot_time.visit("LastShotTime", visitor)?; self.owner.visit("Owner", visitor)?; self.ammo.visit("Ammo", visitor)?; visitor.leave_region() } } impl Weapon { pub fn get_definition(kind: WeaponKind) -> &'static WeaponDefinition { match kind { WeaponKind::M4 => { static DEFINITION: WeaponDefinition = WeaponDefinition { model: "data/models/m4.FBX", shot_sound: "data/sounds/m4_shot.wav", ammo: 115, }; &DEFINITION } WeaponKind::Ak47 => { static DEFINITION: WeaponDefinition = WeaponDefinition { model: "data/models/ak47.FBX", shot_sound: "data/sounds/m4_shot.wav", ammo: 100, }; &DEFINITION } WeaponKind::PlasmaRifle => { static DEFINITION: WeaponDefinition = WeaponDefinition { model: "data/models/plasma_rifle.FBX", shot_sound: "data/sounds/plasma_shot.wav", ammo: 40, }; &DEFINITION } } } pub fn new(kind: WeaponKind, resource_manager: &mut ResourceManager, scene: &mut Scene) -> Weapon { let definition = Self::get_definition(kind); let model = Model::instantiate( resource_manager.request_model(Path::new(definition.model)).unwrap(), scene).root; let SceneInterfaceMut { graph, .. } = scene.interface_mut(); let laser_dot = graph.add_node(Node::Light( LightBuilder::new(LightKind::Point(PointLight::new(0.5)), BaseBuilder::new()) .with_color(Color::opaque(255, 0, 0)) .cast_shadows(false) .build())); let shot_point = graph.find_by_name(model, "Weapon:ShotPoint"); if shot_point.is_none() { println!("Shot point not found!"); } Weapon { kind, laser_dot, model, shot_point, definition, ammo: definition.ammo, ..Default::default() } } pub fn set_visibility(&self, visibility: bool, graph: &mut Graph) { graph.get_mut(self.model).base_mut().set_visibility(visibility); graph.get_mut(self.laser_dot).base_mut().set_visibility(visibility); } pub fn get_model(&self) -> Handle<Node> { self.model } pub fn update(&mut self, scene: &mut Scene) { let SceneInterfaceMut { graph, physics, .. } = scene.interface_mut(); self.offset.follow(&self.dest_offset, 0.2); self.update_laser_sight(graph, physics); let node = graph.get_mut(self.model); node.base_mut().get_local_transform_mut().set_position(self.offset); self.shot_position = node.base().get_global_position(); } fn get_shot_position(&self, graph: &Graph) -> Vec3 { if self.shot_point.is_some() { graph.get(self.shot_point).base().get_global_position() } else { // Fallback graph.get(self.model).base().get_global_position() } } pub fn get_kind(&self) -> WeaponKind { self.kind } pub fn add_ammo(&mut self, amount: u32) { self.ammo += amount; } fn update_laser_sight(&self, graph: &mut Graph, physics: &Physics) { let mut laser_dot_position = Vec3::ZERO; let model = graph.get(self.model); let begin = model.base().get_global_position(); let end = begin + model.base().get_look_vector().scale(100.0); if let Some(ray) = Ray::from_two_points(&begin, &end) { let mut result = Vec::new(); if physics.ray_cast(&ray, RayCastOptions::default(), &mut result) { let offset = result[0].normal.normalized().unwrap_or_default().scale(0.2); laser_dot_position = result[0].position + offset; } } graph.get_mut(self.laser_dot).base_mut().get_local_transform_mut().set_position(laser_dot_position); } fn play_shot_sound(&self, resource_manager: &mut ResourceManager, sound_context: Arc<Mutex<Context>>) { let mut sound_context = sound_context.lock().unwrap(); let shot_buffer = resource_manager.request_sound_buffer( Path::new(self.definition.shot_sound), BufferKind::Normal).unwrap(); let mut shot_sound = Source::new_spatial(shot_buffer).unwrap(); shot_sound.set_play_once(true); shot_sound.play(); shot_sound.as_spatial_mut().set_position(&self.shot_position); sound_context.add_source(shot_sound); } pub fn get_ammo(&self) -> u32 { self.ammo } pub fn get_owner(&self) -> Handle<Actor> { self.owner } pub fn set_owner(&mut self, owner: Handle<Actor>) { self.owner = owner; } pub fn try_shoot(&mut self, scene: &mut Scene, resource_manager: &mut ResourceManager, sound_context: Arc<Mutex<Context>>, time: GameTime, weapon_velocity: Vec3) -> Option<Projectile> { if self.ammo != 0 && time.elapsed - self.last_shot_time >= 0.1 { self.ammo -= 1; self.offset = Vec3::new(0.0, 0.0, -0.05); self.last_shot_time = time.elapsed; self.play_shot_sound(resource_manager, sound_context); let (dir, pos) = { let graph = scene.interface().graph; (graph.get(self.model).base().get_look_vector(), self.get_shot_position(graph)) }; match self.kind { WeaponKind::M4 | WeaponKind::Ak47 =>
WeaponKind::PlasmaRifle => { Some(Projectile::new(ProjectileKind::Plasma, resource_manager, scene, dir, pos, self.self_handle, weapon_velocity)) } } } else { None } } } impl CleanUp for Weapon { fn clean_up(&mut self, scene: &mut Scene) { let SceneInterfaceMut { graph, .. } = scene.interface_mut(); graph.remove_node(self.model); graph.remove_node(self.laser_dot); } } pub struct WeaponContainer { pool: Pool<Weapon> } impl WeaponContainer { pub fn new() -> Self { Self { pool: Pool::new() } } pub fn add(&mut self, weapon: Weapon) -> Handle<Weapon> { let handle = self.pool.spawn(weapon); self.pool.borrow_mut(handle).self_handle = handle; handle } pub fn iter(&self) -> PoolIterator<Weapon> { self.pool.iter() } pub fn iter_mut(&mut self) -> PoolIteratorMut<Weapon> { self.pool.iter_mut() } pub fn get(&self, handle: Handle<Weapon>) -> &Weapon { self.pool.borrow(handle) } pub fn get_mut(&mut self, handle: Handle<Weapon>) -> &mut Weapon { self.pool.borrow_mut(handle) } pub fn update(&mut self, scene: &mut Scene) { for weapon in self.pool.iter_mut() { weapon.update(scene) } } } impl Visit for WeaponContainer { fn visit(&mut self, name: &str, visitor: &mut Visitor) -> VisitResult { visitor.enter_region(name)?; self.pool.visit("Pool", visitor)?; visitor.leave_region() } }
{ Some(Projectile::new(ProjectileKind::Bullet, resource_manager, scene, dir, pos, self.self_handle, weapon_velocity)) }
conditional_block
unet3d.py
import argparse import lbann import lbann.models import lbann.contrib.args import lbann.contrib.launcher import lbann.modules as lm from lbann.core.util import get_parallel_strategy_args class UNet3D(lm.Module): """The 3D U-Net. See: \"{O}zg\"{u}n \c{C}i\c{c}ek, Ahmed Abdulkadir, Soeren S. Lienkamp, Thomas Brox, and Olaf Ronneberger. "3D U-Net: learning dense volumetric segmentation from sparse annotation." In International conference on medical image computing and computer-assisted intervention, pp. 424-432, 2016. Note that this model assumes the same spatial input/output sizes with extra padding to simplify the implementation. """ global_count = 0 # Static counter, used for default names def __init__(self, name=None): """Initialize 3D U-Net. Args: name (str, optional): Module name (default: 'alexnet_module<index>'). """ UNet3D.global_count += 1 self.instance = 0 self.name = (name if name else "unet3d_module{0}".format(UNet3D.global_count)) # The list of ([down-conv filters], [up-conv filters], deconv filters) self.BLOCKS = [ ([32, 64], [64, 64], 128), # bconv1_down, bconv3_up, deconv3 ([64, 128], [128, 128], 256), # bconv2_down, bconv2_up, deconv2 ([128, 256], [256, 256], 512), # bconv3_down, bconv1_up, deconv1 ] # The list of the number of filters of the "bottom" convolution block self.BOTTOM_BLOCK = [256, 512] # The number of pooling/deconvolution layers self.NUM_LEVELS = len(self.BLOCKS) # Whether PARTITIONED_LEVELS-th pooling/deconvolution is partitioned self.PARTITION_INCLUDE_POOL = True # Deconvolution should have the same number of input/output channels assert self.BLOCKS[-1][2] == self.BOTTOM_BLOCK[1] assert all([self.BLOCKS[x][2] == self.BLOCKS[x+1][1][-1] for x in range(self.NUM_LEVELS-1)]) # Building blocks self.downconvs = [] self.upconvs = [] self.deconvs = [] for i, blocks in enumerate(self.BLOCKS): downBlock, upBlock, deconv = blocks self.downconvs.append(UNet3DConvBlock( downBlock, name="{}_bconv{}_down".format(self.name, i+1))) ui = self.NUM_LEVELS-1-i self.upconvs.insert(0, UNet3DConvBlock( upBlock, name="{}_bconv{}_up".format(self.name, ui+1))) self.deconvs.insert(0, Deconvolution3dModule( deconv, 2, stride=2, padding=0, activation=None, bias=False, name="{}_deconv{}".format(self.name, ui+1))) # The bottom convolution self.bottomconv = UNet3DConvBlock( self.BOTTOM_BLOCK, name="{}_bconv_bottom".format(self.name)) # The last convolution self.lastconv = lm.Convolution3dModule( 3, 1, stride=1, padding=0, activation=None, bias=False, name="{}_lconv".format(self.name)) def forward(self, x): self.instance += 1 x_concat = [] for i in range(self.NUM_LEVELS): x = self.downconvs[i](x) x_concat.append(x) x = lbann.Pooling( x, num_dims=3, has_vectors=False, pool_dims_i=2, pool_pads_i=0, pool_strides_i=2, pool_mode="max", name="{}_pool{}_instance{}".format( self.name, i+1, self.instance)) x = self.bottomconv(x) for i in range(self.NUM_LEVELS): x = self.deconvs[i](x) x = self.upconvs[i](x, x_concat=x_concat[self.NUM_LEVELS-1-i]) x = self.lastconv(x) x = lbann.Softmax( x, softmax_mode="channel") return x class UNet3DConvBlock(lm.Module): """Basic block of an optional concatenation layer and a list of 3D convolutional layers. """ def __init__(self, out_channels_list, name): super().__init__() self.name = name self.instance = 0 assert len(out_channels_list) == 2 self.convs = [] for i, channels in enumerate(out_channels_list): self.convs.append(Convolution3dBNModule( channels, 3, stride=1, padding=1, activation=lbann.Relu, bias=False, name="{}_conv_block_{}".format(self.name, i+1))) def forward(self, x, x_concat=None): self.instance += 1 if x_concat is not None: x = lbann.Concatenation( [x, x_concat], axis=0) for c in self.convs: x = c(x) return x class Convolution3dBNModule(lm.Module): """Basic block of a batch-normalization layer, 3D convolutional layer, and an optional activation layer. """ def __init__(self, *args, **kwargs): super().__init__() self.name = kwargs["name"] self.activation = None if "activation" not in kwargs.keys() \ else kwargs["activation"] kwargs["activation"] = None self.conv = lm.Convolution3dModule(*args, **kwargs) bn_scale = lbann.Weights( initializer=lbann.ConstantInitializer(value=1.0), name="{}_bn_scale".format(self.name)) bn_bias = lbann.Weights( initializer=lbann.ConstantInitializer(value=0.0), name="{}_bn_bias".format(self.name)) self.bn_weights = [bn_scale, bn_bias] self.instance = 0 def forward(self, x): self.instance += 1 x = self.conv(x) x = lbann.BatchNormalization( x, weights=self.bn_weights, statistics_group_size=-1, name="{}_bn_instance{}".format( self.name, self.instance)) if self.activation is not None: x = self.activation(x) return x class Deconvolution3dModule(lm.ConvolutionModule): """Basic block for 3D deconvolutional neural networks. Applies a deconvolution and a nonlinear activation function. This is a wrapper class for ConvolutionModule. """ def __init__(self, *args, **kwargs): super().__init__(3, transpose=True, *args, **kwargs) def create_unet3d_data_reader(train_dir, test_dir): readers = [] for role, shuffle, role_dir in [ ("train", True, train_dir), ("test", False, test_dir)]: if role_dir is None: continue readers.append(lbann.reader_pb2.Reader( name="hdf5", role=role, shuffle=shuffle, data_file_pattern="{}/*.hdf5".format(role_dir), validation_percent=0, percent_of_data_to_use=1.0, scaling_factor_int16=1.0, hdf5_key_data="volume", hdf5_key_labels="segmentation", hdf5_hyperslab_labels=True, disable_labels=False, disable_responses=True, )) return lbann.reader_pb2.DataReader(reader=readers) def create_unet3d_optimizer(learn_rate): # TODO: This is a temporal optimizer copied from CosomoFlow. adam = lbann.Adam( learn_rate=learn_rate, beta1=0.9, beta2=0.999, eps=1e-8) return adam if __name__ == '__main__': desc = ('Construct and run the 3D U-Net on a 3D segmentation dataset.' 'Running the experiment is only supported on LC systems.') parser = argparse.ArgumentParser(description=desc) lbann.contrib.args.add_scheduler_arguments(parser) # General arguments parser.add_argument( '--job-name', action='store', default='lbann_unet3d', type=str, help='scheduler job name (default: lbann_unet3d)') parser.add_argument( '--mini-batch-size', action='store', default=1, type=int, help='mini-batch size (default: 1)', metavar='NUM') parser.add_argument( '--num-epochs', action='store', default=5, type=int, help='number of epochs (default: 100)', metavar='NUM') # Model specific arguments parser.add_argument( '--learning-rate', action='store', default=0.001, type=float, help='the initial learning rate (default: 0.001)') parser.add_argument( '--partition-level', action='store', default=4, type=int, help='the spatial partition level (default: 4)') parser.add_argument( '--depth-groups', action='store', type=int, default=4, help='the number of processes for the depth dimension (default: 4)') default_lc_dataset = '/p/gpfs1/brainusr/datasets/LiTS/hdf5_dim128_float' default_train_dir = '{}/train'.format(default_lc_dataset) default_test_dir = '{}/test'.format(default_lc_dataset) parser.add_argument( '--train-dir', action='store', type=str, default=default_train_dir, help='the directory of the training dataset (default: \'{}\')' .format(default_train_dir)) parser.add_argument( '--test-dir', action='store', type=str, default=default_test_dir, help='the directory of the test dataset (default: \'{}\')' .format(default_test_dir)) parser.add_argument( '--dynamically-reclaim-error-signals', action='store_true', help='Allow LBANN to reclaim error signals buffers (default: False)') parser.add_argument( '--batch-job', action='store_true', help='Run as a batch job (default: false)') lbann.contrib.args.add_optimizer_arguments( parser, default_optimizer="adam", default_learning_rate=0.001, ) args = parser.parse_args() parallel_strategy = get_parallel_strategy_args( sample_groups=args.mini_batch_size, depth_groups=args.depth_groups) # Construct layer graph input = lbann.Input( target_mode='label_reconstruction') volume = lbann.Identity(input) output = UNet3D()(volume) segmentation = lbann.Identity(input) ce = lbann.CrossEntropy( [output, segmentation], use_labels=True) obj = lbann.ObjectiveFunction([ce]) layers = list(lbann.traverse_layer_graph(input)) for l in layers: l.parallel_strategy = parallel_strategy # Setup model metrics = [lbann.Metric(ce, name='CE', unit='')] callbacks = [ lbann.CallbackPrint(), lbann.CallbackTimer(), lbann.CallbackGPUMemoryUsage(), lbann.CallbackProfiler(skip_init=True), ] # # TODO: Use polynomial learning rate decay (https://github.com/LLNL/lbann/issues/1581) # callbacks.append( # lbann.CallbackPolyLearningRate( # power=1.0, # num_epochs=100, # end_lr=1e-5)) model = lbann.Model( epochs=args.num_epochs, layers=layers, objective_function=obj, callbacks=callbacks, ) # Setup optimizer optimizer = lbann.contrib.args.create_optimizer(args) # Setup data reader data_reader = create_unet3d_data_reader( train_dir=args.train_dir, test_dir=args.test_dir) # Setup trainer trainer = lbann.Trainer(mini_batch_size=args.mini_batch_size) # Runtime parameters/arguments environment = lbann.contrib.args.get_distconv_environment( num_io_partitions=args.depth_groups) if args.dynamically_reclaim_error_signals:
else: environment['LBANN_KEEP_ERROR_SIGNALS'] = 1 lbann_args = ['--use_data_store'] # Run experiment kwargs = lbann.contrib.args.get_scheduler_kwargs(args) lbann.contrib.launcher.run( trainer, model, data_reader, optimizer, job_name=args.job_name, environment=environment, lbann_args=lbann_args, batch_job=args.batch_job, **kwargs)
environment['LBANN_KEEP_ERROR_SIGNALS'] = 0
conditional_block
unet3d.py
import argparse
from lbann.core.util import get_parallel_strategy_args class UNet3D(lm.Module): """The 3D U-Net. See: \"{O}zg\"{u}n \c{C}i\c{c}ek, Ahmed Abdulkadir, Soeren S. Lienkamp, Thomas Brox, and Olaf Ronneberger. "3D U-Net: learning dense volumetric segmentation from sparse annotation." In International conference on medical image computing and computer-assisted intervention, pp. 424-432, 2016. Note that this model assumes the same spatial input/output sizes with extra padding to simplify the implementation. """ global_count = 0 # Static counter, used for default names def __init__(self, name=None): """Initialize 3D U-Net. Args: name (str, optional): Module name (default: 'alexnet_module<index>'). """ UNet3D.global_count += 1 self.instance = 0 self.name = (name if name else "unet3d_module{0}".format(UNet3D.global_count)) # The list of ([down-conv filters], [up-conv filters], deconv filters) self.BLOCKS = [ ([32, 64], [64, 64], 128), # bconv1_down, bconv3_up, deconv3 ([64, 128], [128, 128], 256), # bconv2_down, bconv2_up, deconv2 ([128, 256], [256, 256], 512), # bconv3_down, bconv1_up, deconv1 ] # The list of the number of filters of the "bottom" convolution block self.BOTTOM_BLOCK = [256, 512] # The number of pooling/deconvolution layers self.NUM_LEVELS = len(self.BLOCKS) # Whether PARTITIONED_LEVELS-th pooling/deconvolution is partitioned self.PARTITION_INCLUDE_POOL = True # Deconvolution should have the same number of input/output channels assert self.BLOCKS[-1][2] == self.BOTTOM_BLOCK[1] assert all([self.BLOCKS[x][2] == self.BLOCKS[x+1][1][-1] for x in range(self.NUM_LEVELS-1)]) # Building blocks self.downconvs = [] self.upconvs = [] self.deconvs = [] for i, blocks in enumerate(self.BLOCKS): downBlock, upBlock, deconv = blocks self.downconvs.append(UNet3DConvBlock( downBlock, name="{}_bconv{}_down".format(self.name, i+1))) ui = self.NUM_LEVELS-1-i self.upconvs.insert(0, UNet3DConvBlock( upBlock, name="{}_bconv{}_up".format(self.name, ui+1))) self.deconvs.insert(0, Deconvolution3dModule( deconv, 2, stride=2, padding=0, activation=None, bias=False, name="{}_deconv{}".format(self.name, ui+1))) # The bottom convolution self.bottomconv = UNet3DConvBlock( self.BOTTOM_BLOCK, name="{}_bconv_bottom".format(self.name)) # The last convolution self.lastconv = lm.Convolution3dModule( 3, 1, stride=1, padding=0, activation=None, bias=False, name="{}_lconv".format(self.name)) def forward(self, x): self.instance += 1 x_concat = [] for i in range(self.NUM_LEVELS): x = self.downconvs[i](x) x_concat.append(x) x = lbann.Pooling( x, num_dims=3, has_vectors=False, pool_dims_i=2, pool_pads_i=0, pool_strides_i=2, pool_mode="max", name="{}_pool{}_instance{}".format( self.name, i+1, self.instance)) x = self.bottomconv(x) for i in range(self.NUM_LEVELS): x = self.deconvs[i](x) x = self.upconvs[i](x, x_concat=x_concat[self.NUM_LEVELS-1-i]) x = self.lastconv(x) x = lbann.Softmax( x, softmax_mode="channel") return x class UNet3DConvBlock(lm.Module): """Basic block of an optional concatenation layer and a list of 3D convolutional layers. """ def __init__(self, out_channels_list, name): super().__init__() self.name = name self.instance = 0 assert len(out_channels_list) == 2 self.convs = [] for i, channels in enumerate(out_channels_list): self.convs.append(Convolution3dBNModule( channels, 3, stride=1, padding=1, activation=lbann.Relu, bias=False, name="{}_conv_block_{}".format(self.name, i+1))) def forward(self, x, x_concat=None): self.instance += 1 if x_concat is not None: x = lbann.Concatenation( [x, x_concat], axis=0) for c in self.convs: x = c(x) return x class Convolution3dBNModule(lm.Module): """Basic block of a batch-normalization layer, 3D convolutional layer, and an optional activation layer. """ def __init__(self, *args, **kwargs): super().__init__() self.name = kwargs["name"] self.activation = None if "activation" not in kwargs.keys() \ else kwargs["activation"] kwargs["activation"] = None self.conv = lm.Convolution3dModule(*args, **kwargs) bn_scale = lbann.Weights( initializer=lbann.ConstantInitializer(value=1.0), name="{}_bn_scale".format(self.name)) bn_bias = lbann.Weights( initializer=lbann.ConstantInitializer(value=0.0), name="{}_bn_bias".format(self.name)) self.bn_weights = [bn_scale, bn_bias] self.instance = 0 def forward(self, x): self.instance += 1 x = self.conv(x) x = lbann.BatchNormalization( x, weights=self.bn_weights, statistics_group_size=-1, name="{}_bn_instance{}".format( self.name, self.instance)) if self.activation is not None: x = self.activation(x) return x class Deconvolution3dModule(lm.ConvolutionModule): """Basic block for 3D deconvolutional neural networks. Applies a deconvolution and a nonlinear activation function. This is a wrapper class for ConvolutionModule. """ def __init__(self, *args, **kwargs): super().__init__(3, transpose=True, *args, **kwargs) def create_unet3d_data_reader(train_dir, test_dir): readers = [] for role, shuffle, role_dir in [ ("train", True, train_dir), ("test", False, test_dir)]: if role_dir is None: continue readers.append(lbann.reader_pb2.Reader( name="hdf5", role=role, shuffle=shuffle, data_file_pattern="{}/*.hdf5".format(role_dir), validation_percent=0, percent_of_data_to_use=1.0, scaling_factor_int16=1.0, hdf5_key_data="volume", hdf5_key_labels="segmentation", hdf5_hyperslab_labels=True, disable_labels=False, disable_responses=True, )) return lbann.reader_pb2.DataReader(reader=readers) def create_unet3d_optimizer(learn_rate): # TODO: This is a temporal optimizer copied from CosomoFlow. adam = lbann.Adam( learn_rate=learn_rate, beta1=0.9, beta2=0.999, eps=1e-8) return adam if __name__ == '__main__': desc = ('Construct and run the 3D U-Net on a 3D segmentation dataset.' 'Running the experiment is only supported on LC systems.') parser = argparse.ArgumentParser(description=desc) lbann.contrib.args.add_scheduler_arguments(parser) # General arguments parser.add_argument( '--job-name', action='store', default='lbann_unet3d', type=str, help='scheduler job name (default: lbann_unet3d)') parser.add_argument( '--mini-batch-size', action='store', default=1, type=int, help='mini-batch size (default: 1)', metavar='NUM') parser.add_argument( '--num-epochs', action='store', default=5, type=int, help='number of epochs (default: 100)', metavar='NUM') # Model specific arguments parser.add_argument( '--learning-rate', action='store', default=0.001, type=float, help='the initial learning rate (default: 0.001)') parser.add_argument( '--partition-level', action='store', default=4, type=int, help='the spatial partition level (default: 4)') parser.add_argument( '--depth-groups', action='store', type=int, default=4, help='the number of processes for the depth dimension (default: 4)') default_lc_dataset = '/p/gpfs1/brainusr/datasets/LiTS/hdf5_dim128_float' default_train_dir = '{}/train'.format(default_lc_dataset) default_test_dir = '{}/test'.format(default_lc_dataset) parser.add_argument( '--train-dir', action='store', type=str, default=default_train_dir, help='the directory of the training dataset (default: \'{}\')' .format(default_train_dir)) parser.add_argument( '--test-dir', action='store', type=str, default=default_test_dir, help='the directory of the test dataset (default: \'{}\')' .format(default_test_dir)) parser.add_argument( '--dynamically-reclaim-error-signals', action='store_true', help='Allow LBANN to reclaim error signals buffers (default: False)') parser.add_argument( '--batch-job', action='store_true', help='Run as a batch job (default: false)') lbann.contrib.args.add_optimizer_arguments( parser, default_optimizer="adam", default_learning_rate=0.001, ) args = parser.parse_args() parallel_strategy = get_parallel_strategy_args( sample_groups=args.mini_batch_size, depth_groups=args.depth_groups) # Construct layer graph input = lbann.Input( target_mode='label_reconstruction') volume = lbann.Identity(input) output = UNet3D()(volume) segmentation = lbann.Identity(input) ce = lbann.CrossEntropy( [output, segmentation], use_labels=True) obj = lbann.ObjectiveFunction([ce]) layers = list(lbann.traverse_layer_graph(input)) for l in layers: l.parallel_strategy = parallel_strategy # Setup model metrics = [lbann.Metric(ce, name='CE', unit='')] callbacks = [ lbann.CallbackPrint(), lbann.CallbackTimer(), lbann.CallbackGPUMemoryUsage(), lbann.CallbackProfiler(skip_init=True), ] # # TODO: Use polynomial learning rate decay (https://github.com/LLNL/lbann/issues/1581) # callbacks.append( # lbann.CallbackPolyLearningRate( # power=1.0, # num_epochs=100, # end_lr=1e-5)) model = lbann.Model( epochs=args.num_epochs, layers=layers, objective_function=obj, callbacks=callbacks, ) # Setup optimizer optimizer = lbann.contrib.args.create_optimizer(args) # Setup data reader data_reader = create_unet3d_data_reader( train_dir=args.train_dir, test_dir=args.test_dir) # Setup trainer trainer = lbann.Trainer(mini_batch_size=args.mini_batch_size) # Runtime parameters/arguments environment = lbann.contrib.args.get_distconv_environment( num_io_partitions=args.depth_groups) if args.dynamically_reclaim_error_signals: environment['LBANN_KEEP_ERROR_SIGNALS'] = 0 else: environment['LBANN_KEEP_ERROR_SIGNALS'] = 1 lbann_args = ['--use_data_store'] # Run experiment kwargs = lbann.contrib.args.get_scheduler_kwargs(args) lbann.contrib.launcher.run( trainer, model, data_reader, optimizer, job_name=args.job_name, environment=environment, lbann_args=lbann_args, batch_job=args.batch_job, **kwargs)
import lbann import lbann.models import lbann.contrib.args import lbann.contrib.launcher import lbann.modules as lm
random_line_split
unet3d.py
import argparse import lbann import lbann.models import lbann.contrib.args import lbann.contrib.launcher import lbann.modules as lm from lbann.core.util import get_parallel_strategy_args class UNet3D(lm.Module): """The 3D U-Net. See: \"{O}zg\"{u}n \c{C}i\c{c}ek, Ahmed Abdulkadir, Soeren S. Lienkamp, Thomas Brox, and Olaf Ronneberger. "3D U-Net: learning dense volumetric segmentation from sparse annotation." In International conference on medical image computing and computer-assisted intervention, pp. 424-432, 2016. Note that this model assumes the same spatial input/output sizes with extra padding to simplify the implementation. """ global_count = 0 # Static counter, used for default names def __init__(self, name=None): """Initialize 3D U-Net. Args: name (str, optional): Module name (default: 'alexnet_module<index>'). """ UNet3D.global_count += 1 self.instance = 0 self.name = (name if name else "unet3d_module{0}".format(UNet3D.global_count)) # The list of ([down-conv filters], [up-conv filters], deconv filters) self.BLOCKS = [ ([32, 64], [64, 64], 128), # bconv1_down, bconv3_up, deconv3 ([64, 128], [128, 128], 256), # bconv2_down, bconv2_up, deconv2 ([128, 256], [256, 256], 512), # bconv3_down, bconv1_up, deconv1 ] # The list of the number of filters of the "bottom" convolution block self.BOTTOM_BLOCK = [256, 512] # The number of pooling/deconvolution layers self.NUM_LEVELS = len(self.BLOCKS) # Whether PARTITIONED_LEVELS-th pooling/deconvolution is partitioned self.PARTITION_INCLUDE_POOL = True # Deconvolution should have the same number of input/output channels assert self.BLOCKS[-1][2] == self.BOTTOM_BLOCK[1] assert all([self.BLOCKS[x][2] == self.BLOCKS[x+1][1][-1] for x in range(self.NUM_LEVELS-1)]) # Building blocks self.downconvs = [] self.upconvs = [] self.deconvs = [] for i, blocks in enumerate(self.BLOCKS): downBlock, upBlock, deconv = blocks self.downconvs.append(UNet3DConvBlock( downBlock, name="{}_bconv{}_down".format(self.name, i+1))) ui = self.NUM_LEVELS-1-i self.upconvs.insert(0, UNet3DConvBlock( upBlock, name="{}_bconv{}_up".format(self.name, ui+1))) self.deconvs.insert(0, Deconvolution3dModule( deconv, 2, stride=2, padding=0, activation=None, bias=False, name="{}_deconv{}".format(self.name, ui+1))) # The bottom convolution self.bottomconv = UNet3DConvBlock( self.BOTTOM_BLOCK, name="{}_bconv_bottom".format(self.name)) # The last convolution self.lastconv = lm.Convolution3dModule( 3, 1, stride=1, padding=0, activation=None, bias=False, name="{}_lconv".format(self.name)) def forward(self, x): self.instance += 1 x_concat = [] for i in range(self.NUM_LEVELS): x = self.downconvs[i](x) x_concat.append(x) x = lbann.Pooling( x, num_dims=3, has_vectors=False, pool_dims_i=2, pool_pads_i=0, pool_strides_i=2, pool_mode="max", name="{}_pool{}_instance{}".format( self.name, i+1, self.instance)) x = self.bottomconv(x) for i in range(self.NUM_LEVELS): x = self.deconvs[i](x) x = self.upconvs[i](x, x_concat=x_concat[self.NUM_LEVELS-1-i]) x = self.lastconv(x) x = lbann.Softmax( x, softmax_mode="channel") return x class UNet3DConvBlock(lm.Module): """Basic block of an optional concatenation layer and a list of 3D convolutional layers. """ def __init__(self, out_channels_list, name): super().__init__() self.name = name self.instance = 0 assert len(out_channels_list) == 2 self.convs = [] for i, channels in enumerate(out_channels_list): self.convs.append(Convolution3dBNModule( channels, 3, stride=1, padding=1, activation=lbann.Relu, bias=False, name="{}_conv_block_{}".format(self.name, i+1))) def forward(self, x, x_concat=None): self.instance += 1 if x_concat is not None: x = lbann.Concatenation( [x, x_concat], axis=0) for c in self.convs: x = c(x) return x class Convolution3dBNModule(lm.Module): """Basic block of a batch-normalization layer, 3D convolutional layer, and an optional activation layer. """ def __init__(self, *args, **kwargs):
def forward(self, x): self.instance += 1 x = self.conv(x) x = lbann.BatchNormalization( x, weights=self.bn_weights, statistics_group_size=-1, name="{}_bn_instance{}".format( self.name, self.instance)) if self.activation is not None: x = self.activation(x) return x class Deconvolution3dModule(lm.ConvolutionModule): """Basic block for 3D deconvolutional neural networks. Applies a deconvolution and a nonlinear activation function. This is a wrapper class for ConvolutionModule. """ def __init__(self, *args, **kwargs): super().__init__(3, transpose=True, *args, **kwargs) def create_unet3d_data_reader(train_dir, test_dir): readers = [] for role, shuffle, role_dir in [ ("train", True, train_dir), ("test", False, test_dir)]: if role_dir is None: continue readers.append(lbann.reader_pb2.Reader( name="hdf5", role=role, shuffle=shuffle, data_file_pattern="{}/*.hdf5".format(role_dir), validation_percent=0, percent_of_data_to_use=1.0, scaling_factor_int16=1.0, hdf5_key_data="volume", hdf5_key_labels="segmentation", hdf5_hyperslab_labels=True, disable_labels=False, disable_responses=True, )) return lbann.reader_pb2.DataReader(reader=readers) def create_unet3d_optimizer(learn_rate): # TODO: This is a temporal optimizer copied from CosomoFlow. adam = lbann.Adam( learn_rate=learn_rate, beta1=0.9, beta2=0.999, eps=1e-8) return adam if __name__ == '__main__': desc = ('Construct and run the 3D U-Net on a 3D segmentation dataset.' 'Running the experiment is only supported on LC systems.') parser = argparse.ArgumentParser(description=desc) lbann.contrib.args.add_scheduler_arguments(parser) # General arguments parser.add_argument( '--job-name', action='store', default='lbann_unet3d', type=str, help='scheduler job name (default: lbann_unet3d)') parser.add_argument( '--mini-batch-size', action='store', default=1, type=int, help='mini-batch size (default: 1)', metavar='NUM') parser.add_argument( '--num-epochs', action='store', default=5, type=int, help='number of epochs (default: 100)', metavar='NUM') # Model specific arguments parser.add_argument( '--learning-rate', action='store', default=0.001, type=float, help='the initial learning rate (default: 0.001)') parser.add_argument( '--partition-level', action='store', default=4, type=int, help='the spatial partition level (default: 4)') parser.add_argument( '--depth-groups', action='store', type=int, default=4, help='the number of processes for the depth dimension (default: 4)') default_lc_dataset = '/p/gpfs1/brainusr/datasets/LiTS/hdf5_dim128_float' default_train_dir = '{}/train'.format(default_lc_dataset) default_test_dir = '{}/test'.format(default_lc_dataset) parser.add_argument( '--train-dir', action='store', type=str, default=default_train_dir, help='the directory of the training dataset (default: \'{}\')' .format(default_train_dir)) parser.add_argument( '--test-dir', action='store', type=str, default=default_test_dir, help='the directory of the test dataset (default: \'{}\')' .format(default_test_dir)) parser.add_argument( '--dynamically-reclaim-error-signals', action='store_true', help='Allow LBANN to reclaim error signals buffers (default: False)') parser.add_argument( '--batch-job', action='store_true', help='Run as a batch job (default: false)') lbann.contrib.args.add_optimizer_arguments( parser, default_optimizer="adam", default_learning_rate=0.001, ) args = parser.parse_args() parallel_strategy = get_parallel_strategy_args( sample_groups=args.mini_batch_size, depth_groups=args.depth_groups) # Construct layer graph input = lbann.Input( target_mode='label_reconstruction') volume = lbann.Identity(input) output = UNet3D()(volume) segmentation = lbann.Identity(input) ce = lbann.CrossEntropy( [output, segmentation], use_labels=True) obj = lbann.ObjectiveFunction([ce]) layers = list(lbann.traverse_layer_graph(input)) for l in layers: l.parallel_strategy = parallel_strategy # Setup model metrics = [lbann.Metric(ce, name='CE', unit='')] callbacks = [ lbann.CallbackPrint(), lbann.CallbackTimer(), lbann.CallbackGPUMemoryUsage(), lbann.CallbackProfiler(skip_init=True), ] # # TODO: Use polynomial learning rate decay (https://github.com/LLNL/lbann/issues/1581) # callbacks.append( # lbann.CallbackPolyLearningRate( # power=1.0, # num_epochs=100, # end_lr=1e-5)) model = lbann.Model( epochs=args.num_epochs, layers=layers, objective_function=obj, callbacks=callbacks, ) # Setup optimizer optimizer = lbann.contrib.args.create_optimizer(args) # Setup data reader data_reader = create_unet3d_data_reader( train_dir=args.train_dir, test_dir=args.test_dir) # Setup trainer trainer = lbann.Trainer(mini_batch_size=args.mini_batch_size) # Runtime parameters/arguments environment = lbann.contrib.args.get_distconv_environment( num_io_partitions=args.depth_groups) if args.dynamically_reclaim_error_signals: environment['LBANN_KEEP_ERROR_SIGNALS'] = 0 else: environment['LBANN_KEEP_ERROR_SIGNALS'] = 1 lbann_args = ['--use_data_store'] # Run experiment kwargs = lbann.contrib.args.get_scheduler_kwargs(args) lbann.contrib.launcher.run( trainer, model, data_reader, optimizer, job_name=args.job_name, environment=environment, lbann_args=lbann_args, batch_job=args.batch_job, **kwargs)
super().__init__() self.name = kwargs["name"] self.activation = None if "activation" not in kwargs.keys() \ else kwargs["activation"] kwargs["activation"] = None self.conv = lm.Convolution3dModule(*args, **kwargs) bn_scale = lbann.Weights( initializer=lbann.ConstantInitializer(value=1.0), name="{}_bn_scale".format(self.name)) bn_bias = lbann.Weights( initializer=lbann.ConstantInitializer(value=0.0), name="{}_bn_bias".format(self.name)) self.bn_weights = [bn_scale, bn_bias] self.instance = 0
identifier_body
unet3d.py
import argparse import lbann import lbann.models import lbann.contrib.args import lbann.contrib.launcher import lbann.modules as lm from lbann.core.util import get_parallel_strategy_args class UNet3D(lm.Module): """The 3D U-Net. See: \"{O}zg\"{u}n \c{C}i\c{c}ek, Ahmed Abdulkadir, Soeren S. Lienkamp, Thomas Brox, and Olaf Ronneberger. "3D U-Net: learning dense volumetric segmentation from sparse annotation." In International conference on medical image computing and computer-assisted intervention, pp. 424-432, 2016. Note that this model assumes the same spatial input/output sizes with extra padding to simplify the implementation. """ global_count = 0 # Static counter, used for default names def __init__(self, name=None): """Initialize 3D U-Net. Args: name (str, optional): Module name (default: 'alexnet_module<index>'). """ UNet3D.global_count += 1 self.instance = 0 self.name = (name if name else "unet3d_module{0}".format(UNet3D.global_count)) # The list of ([down-conv filters], [up-conv filters], deconv filters) self.BLOCKS = [ ([32, 64], [64, 64], 128), # bconv1_down, bconv3_up, deconv3 ([64, 128], [128, 128], 256), # bconv2_down, bconv2_up, deconv2 ([128, 256], [256, 256], 512), # bconv3_down, bconv1_up, deconv1 ] # The list of the number of filters of the "bottom" convolution block self.BOTTOM_BLOCK = [256, 512] # The number of pooling/deconvolution layers self.NUM_LEVELS = len(self.BLOCKS) # Whether PARTITIONED_LEVELS-th pooling/deconvolution is partitioned self.PARTITION_INCLUDE_POOL = True # Deconvolution should have the same number of input/output channels assert self.BLOCKS[-1][2] == self.BOTTOM_BLOCK[1] assert all([self.BLOCKS[x][2] == self.BLOCKS[x+1][1][-1] for x in range(self.NUM_LEVELS-1)]) # Building blocks self.downconvs = [] self.upconvs = [] self.deconvs = [] for i, blocks in enumerate(self.BLOCKS): downBlock, upBlock, deconv = blocks self.downconvs.append(UNet3DConvBlock( downBlock, name="{}_bconv{}_down".format(self.name, i+1))) ui = self.NUM_LEVELS-1-i self.upconvs.insert(0, UNet3DConvBlock( upBlock, name="{}_bconv{}_up".format(self.name, ui+1))) self.deconvs.insert(0, Deconvolution3dModule( deconv, 2, stride=2, padding=0, activation=None, bias=False, name="{}_deconv{}".format(self.name, ui+1))) # The bottom convolution self.bottomconv = UNet3DConvBlock( self.BOTTOM_BLOCK, name="{}_bconv_bottom".format(self.name)) # The last convolution self.lastconv = lm.Convolution3dModule( 3, 1, stride=1, padding=0, activation=None, bias=False, name="{}_lconv".format(self.name)) def forward(self, x): self.instance += 1 x_concat = [] for i in range(self.NUM_LEVELS): x = self.downconvs[i](x) x_concat.append(x) x = lbann.Pooling( x, num_dims=3, has_vectors=False, pool_dims_i=2, pool_pads_i=0, pool_strides_i=2, pool_mode="max", name="{}_pool{}_instance{}".format( self.name, i+1, self.instance)) x = self.bottomconv(x) for i in range(self.NUM_LEVELS): x = self.deconvs[i](x) x = self.upconvs[i](x, x_concat=x_concat[self.NUM_LEVELS-1-i]) x = self.lastconv(x) x = lbann.Softmax( x, softmax_mode="channel") return x class UNet3DConvBlock(lm.Module): """Basic block of an optional concatenation layer and a list of 3D convolutional layers. """ def __init__(self, out_channels_list, name): super().__init__() self.name = name self.instance = 0 assert len(out_channels_list) == 2 self.convs = [] for i, channels in enumerate(out_channels_list): self.convs.append(Convolution3dBNModule( channels, 3, stride=1, padding=1, activation=lbann.Relu, bias=False, name="{}_conv_block_{}".format(self.name, i+1))) def forward(self, x, x_concat=None): self.instance += 1 if x_concat is not None: x = lbann.Concatenation( [x, x_concat], axis=0) for c in self.convs: x = c(x) return x class Convolution3dBNModule(lm.Module): """Basic block of a batch-normalization layer, 3D convolutional layer, and an optional activation layer. """ def __init__(self, *args, **kwargs): super().__init__() self.name = kwargs["name"] self.activation = None if "activation" not in kwargs.keys() \ else kwargs["activation"] kwargs["activation"] = None self.conv = lm.Convolution3dModule(*args, **kwargs) bn_scale = lbann.Weights( initializer=lbann.ConstantInitializer(value=1.0), name="{}_bn_scale".format(self.name)) bn_bias = lbann.Weights( initializer=lbann.ConstantInitializer(value=0.0), name="{}_bn_bias".format(self.name)) self.bn_weights = [bn_scale, bn_bias] self.instance = 0 def forward(self, x): self.instance += 1 x = self.conv(x) x = lbann.BatchNormalization( x, weights=self.bn_weights, statistics_group_size=-1, name="{}_bn_instance{}".format( self.name, self.instance)) if self.activation is not None: x = self.activation(x) return x class Deconvolution3dModule(lm.ConvolutionModule): """Basic block for 3D deconvolutional neural networks. Applies a deconvolution and a nonlinear activation function. This is a wrapper class for ConvolutionModule. """ def __init__(self, *args, **kwargs): super().__init__(3, transpose=True, *args, **kwargs) def create_unet3d_data_reader(train_dir, test_dir): readers = [] for role, shuffle, role_dir in [ ("train", True, train_dir), ("test", False, test_dir)]: if role_dir is None: continue readers.append(lbann.reader_pb2.Reader( name="hdf5", role=role, shuffle=shuffle, data_file_pattern="{}/*.hdf5".format(role_dir), validation_percent=0, percent_of_data_to_use=1.0, scaling_factor_int16=1.0, hdf5_key_data="volume", hdf5_key_labels="segmentation", hdf5_hyperslab_labels=True, disable_labels=False, disable_responses=True, )) return lbann.reader_pb2.DataReader(reader=readers) def
(learn_rate): # TODO: This is a temporal optimizer copied from CosomoFlow. adam = lbann.Adam( learn_rate=learn_rate, beta1=0.9, beta2=0.999, eps=1e-8) return adam if __name__ == '__main__': desc = ('Construct and run the 3D U-Net on a 3D segmentation dataset.' 'Running the experiment is only supported on LC systems.') parser = argparse.ArgumentParser(description=desc) lbann.contrib.args.add_scheduler_arguments(parser) # General arguments parser.add_argument( '--job-name', action='store', default='lbann_unet3d', type=str, help='scheduler job name (default: lbann_unet3d)') parser.add_argument( '--mini-batch-size', action='store', default=1, type=int, help='mini-batch size (default: 1)', metavar='NUM') parser.add_argument( '--num-epochs', action='store', default=5, type=int, help='number of epochs (default: 100)', metavar='NUM') # Model specific arguments parser.add_argument( '--learning-rate', action='store', default=0.001, type=float, help='the initial learning rate (default: 0.001)') parser.add_argument( '--partition-level', action='store', default=4, type=int, help='the spatial partition level (default: 4)') parser.add_argument( '--depth-groups', action='store', type=int, default=4, help='the number of processes for the depth dimension (default: 4)') default_lc_dataset = '/p/gpfs1/brainusr/datasets/LiTS/hdf5_dim128_float' default_train_dir = '{}/train'.format(default_lc_dataset) default_test_dir = '{}/test'.format(default_lc_dataset) parser.add_argument( '--train-dir', action='store', type=str, default=default_train_dir, help='the directory of the training dataset (default: \'{}\')' .format(default_train_dir)) parser.add_argument( '--test-dir', action='store', type=str, default=default_test_dir, help='the directory of the test dataset (default: \'{}\')' .format(default_test_dir)) parser.add_argument( '--dynamically-reclaim-error-signals', action='store_true', help='Allow LBANN to reclaim error signals buffers (default: False)') parser.add_argument( '--batch-job', action='store_true', help='Run as a batch job (default: false)') lbann.contrib.args.add_optimizer_arguments( parser, default_optimizer="adam", default_learning_rate=0.001, ) args = parser.parse_args() parallel_strategy = get_parallel_strategy_args( sample_groups=args.mini_batch_size, depth_groups=args.depth_groups) # Construct layer graph input = lbann.Input( target_mode='label_reconstruction') volume = lbann.Identity(input) output = UNet3D()(volume) segmentation = lbann.Identity(input) ce = lbann.CrossEntropy( [output, segmentation], use_labels=True) obj = lbann.ObjectiveFunction([ce]) layers = list(lbann.traverse_layer_graph(input)) for l in layers: l.parallel_strategy = parallel_strategy # Setup model metrics = [lbann.Metric(ce, name='CE', unit='')] callbacks = [ lbann.CallbackPrint(), lbann.CallbackTimer(), lbann.CallbackGPUMemoryUsage(), lbann.CallbackProfiler(skip_init=True), ] # # TODO: Use polynomial learning rate decay (https://github.com/LLNL/lbann/issues/1581) # callbacks.append( # lbann.CallbackPolyLearningRate( # power=1.0, # num_epochs=100, # end_lr=1e-5)) model = lbann.Model( epochs=args.num_epochs, layers=layers, objective_function=obj, callbacks=callbacks, ) # Setup optimizer optimizer = lbann.contrib.args.create_optimizer(args) # Setup data reader data_reader = create_unet3d_data_reader( train_dir=args.train_dir, test_dir=args.test_dir) # Setup trainer trainer = lbann.Trainer(mini_batch_size=args.mini_batch_size) # Runtime parameters/arguments environment = lbann.contrib.args.get_distconv_environment( num_io_partitions=args.depth_groups) if args.dynamically_reclaim_error_signals: environment['LBANN_KEEP_ERROR_SIGNALS'] = 0 else: environment['LBANN_KEEP_ERROR_SIGNALS'] = 1 lbann_args = ['--use_data_store'] # Run experiment kwargs = lbann.contrib.args.get_scheduler_kwargs(args) lbann.contrib.launcher.run( trainer, model, data_reader, optimizer, job_name=args.job_name, environment=environment, lbann_args=lbann_args, batch_job=args.batch_job, **kwargs)
create_unet3d_optimizer
identifier_name
package.rs
//! An interpreter for the rust-installer package format. Responsible //! for installing from a directory or tarball to an installation //! prefix, represented by a `Components` instance. use crate::dist::component::components::*; use crate::dist::component::transaction::*; use crate::dist::temp; use crate::errors::*; use crate::utils::notifications::Notification; use crate::utils::utils; use std::collections::HashSet; use std::fmt; use std::io::Read; use std::path::{Path, PathBuf}; /// The current metadata revision used by rust-installer pub const INSTALLER_VERSION: &str = "3"; pub const VERSION_FILE: &str = "rust-installer-version"; pub trait Package: fmt::Debug { fn contains(&self, component: &str, short_name: Option<&str>) -> bool; fn install<'a>( &self, target: &Components, component: &str, short_name: Option<&str>, tx: Transaction<'a>, ) -> Result<Transaction<'a>>; fn components(&self) -> Vec<String>; } #[derive(Debug)] pub struct DirectoryPackage { path: PathBuf, components: HashSet<String>, copy: bool, } impl DirectoryPackage { pub fn new(path: PathBuf, copy: bool) -> Result<Self> { validate_installer_version(&path)?; let content = utils::read_file("package components", &path.join("components"))?; let components = content .lines() .map(std::borrow::ToOwned::to_owned) .collect(); Ok(DirectoryPackage { path, components, copy, }) } } fn validate_installer_version(path: &Path) -> Result<()> { let file = utils::read_file("installer version", &path.join(VERSION_FILE))?; let v = file.trim(); if v == INSTALLER_VERSION { Ok(()) } else { Err(ErrorKind::BadInstallerVersion(v.to_owned()).into()) } } impl Package for DirectoryPackage { fn contains(&self, component: &str, short_name: Option<&str>) -> bool { self.components.contains(component) || if let Some(n) = short_name { self.components.contains(n) } else { false } } fn install<'a>( &self, target: &Components, name: &str, short_name: Option<&str>, tx: Transaction<'a>, ) -> Result<Transaction<'a>> { let actual_name = if self.components.contains(name) { name } else if let Some(n) = short_name { n } else { name }; let root = self.path.join(actual_name); let manifest = utils::read_file("package manifest", &root.join("manifest.in"))?; let mut builder = target.add(name, tx); for l in manifest.lines() { let part = ComponentPart::decode(l) .ok_or_else(|| ErrorKind::CorruptComponent(name.to_owned()))?; let path = part.1; let src_path = root.join(&path); match &*part.0 { "file" => { if self.copy { builder.copy_file(path.clone(), &src_path)? } else { builder.move_file(path.clone(), &src_path)? } } "dir" => { if self.copy { builder.copy_dir(path.clone(), &src_path)? } else { builder.move_dir(path.clone(), &src_path)? } } _ => return Err(ErrorKind::CorruptComponent(name.to_owned()).into()), } set_file_perms(&target.prefix().path().join(path), &src_path)?; } let tx = builder.finish()?; Ok(tx) } fn components(&self) -> Vec<String> { self.components.iter().cloned().collect() } } // On Unix we need to set up the file permissions correctly so // binaries are executable and directories readable. This shouldn't be // necessary: the source files *should* have the right permissions, // but due to rust-lang/rust#25479 they don't. #[cfg(unix)] fn set_file_perms(dest_path: &Path, src_path: &Path) -> Result<()> { use std::fs::{self, Metadata}; use std::os::unix::fs::PermissionsExt; use walkdir::WalkDir; // Compute whether this entry needs the X bit fn needs_x(meta: &Metadata) -> bool { meta.is_dir() || // Directories need it meta.permissions().mode() & 0o700 == 0o700 // If it is rwx for the user, it gets the X bit } // By convention, anything in the bin/ directory of the package is a binary let is_bin = if let Some(p) = src_path.parent() { p.ends_with("bin") } else { false }; let is_dir = utils::is_directory(dest_path); if is_dir { // Walk the directory setting everything for entry in WalkDir::new(dest_path) { let entry = entry.chain_err(|| ErrorKind::ComponentDirPermissionsFailed)?; let meta = entry .metadata() .chain_err(|| ErrorKind::ComponentDirPermissionsFailed)?; let mut perm = meta.permissions(); perm.set_mode(if needs_x(&meta) { 0o755 } else { 0o644 }); fs::set_permissions(entry.path(), perm) .chain_err(|| ErrorKind::ComponentFilePermissionsFailed)?; } } else { let meta = fs::metadata(dest_path).chain_err(|| ErrorKind::ComponentFilePermissionsFailed)?; let mut perm = meta.permissions(); perm.set_mode(if is_bin || needs_x(&meta) { 0o755 } else { 0o644 }); fs::set_permissions(dest_path, perm) .chain_err(|| ErrorKind::ComponentFilePermissionsFailed)?; } Ok(()) } #[cfg(windows)] fn set_file_perms(_dest_path: &Path, _src_path: &Path) -> Result<()> { Ok(()) } #[derive(Debug)] pub struct TarPackage<'a>(DirectoryPackage, temp::Dir<'a>); impl<'a> TarPackage<'a> { pub fn new<R: Read>( stream: R, temp_cfg: &'a temp::Cfg, notify_handler: Option<&'a dyn Fn(Notification<'_>)>, ) -> Result<Self> { let temp_dir = temp_cfg.new_directory()?; let mut archive = tar::Archive::new(stream); // The rust-installer packages unpack to a directory called // $pkgname-$version-$target. Skip that directory when // unpacking. unpack_without_first_dir(&mut archive, &*temp_dir, notify_handler)?; Ok(TarPackage( DirectoryPackage::new(temp_dir.to_owned(), false)?, temp_dir, )) } } #[cfg(windows)] mod unpacker { use std::sync::atomic::{AtomicUsize, Ordering}; use std::sync::Arc; use threadpool; use crate::utils::notifications::Notification; pub struct Unpacker<'a> { n_files: Arc<AtomicUsize>, pool: threadpool::ThreadPool, notify_handler: Option<&'a dyn Fn(Notification<'_>)>, } impl<'a> Unpacker<'a> { pub fn new(notify_handler: Option<&'a dyn Fn(Notification<'_>)>) -> Self
pub fn handle(&mut self, unpacked: tar::Unpacked) { if let tar::Unpacked::File(f) = unpacked { self.n_files.fetch_add(1, Ordering::Relaxed); let n_files = self.n_files.clone(); self.pool.execute(move || { drop(f); n_files.fetch_sub(1, Ordering::Relaxed); }); } } } impl<'a> Drop for Unpacker<'a> { fn drop(&mut self) { // Some explanation is in order. Even though the tar we are reading from (if // any) will have had its FileWithProgress download tracking // completed before we hit drop, that is not true if we are unwinding due to a // failure, where the logical ownership of the progress bar is // ambiguous, and as the tracker itself is abstracted out behind // notifications etc we cannot just query for that. So: we assume no // more reads of the underlying tar will take place: either the // error unwinding will stop reads, or we completed; either way, we // notify finished to the tracker to force a reset to zero; we set // the units to files, show our progress, and set our units back // afterwards. The largest archives today - rust docs - have ~20k // items, and the download tracker's progress is confounded with // actual handling of data today, we synthesis a data buffer and // pretend to have bytes to deliver. self.notify_handler .map(|handler| handler(Notification::DownloadFinished)); self.notify_handler .map(|handler| handler(Notification::DownloadPushUnits("handles"))); let mut prev_files = self.n_files.load(Ordering::Relaxed); self.notify_handler.map(|handler| { handler(Notification::DownloadContentLengthReceived( prev_files as u64, )) }); if prev_files > 50 { println!("Closing {} deferred file handles", prev_files); } let buf: Vec<u8> = vec![0; prev_files]; assert!(32767 > prev_files); let mut current_files = prev_files; while current_files != 0 { use std::thread::sleep; sleep(std::time::Duration::from_millis(100)); prev_files = current_files; current_files = self.n_files.load(Ordering::Relaxed); let step_count = prev_files - current_files; self.notify_handler.map(|handler| { handler(Notification::DownloadDataReceived(&buf[0..step_count])) }); } self.pool.join(); self.notify_handler .map(|handler| handler(Notification::DownloadFinished)); self.notify_handler .map(|handler| handler(Notification::DownloadPopUnits)); } } } #[cfg(not(windows))] mod unpacker { use crate::utils::notifications::Notification; pub struct Unpacker {} impl Unpacker { pub fn new<'a>(_notify_handler: Option<&'a dyn Fn(Notification<'_>)>) -> Unpacker { Unpacker {} } pub fn handle(&mut self, _unpacked: tar::Unpacked) {} } } fn unpack_without_first_dir<'a, R: Read>( archive: &mut tar::Archive<R>, path: &Path, notify_handler: Option<&'a dyn Fn(Notification<'_>)>, ) -> Result<()> { let mut unpacker = unpacker::Unpacker::new(notify_handler); let entries = archive .entries() .chain_err(|| ErrorKind::ExtractingPackage)?; let mut checked_parents: HashSet<PathBuf> = HashSet::new(); for entry in entries { let mut entry = entry.chain_err(|| ErrorKind::ExtractingPackage)?; let relpath = { let path = entry.path(); let path = path.chain_err(|| ErrorKind::ExtractingPackage)?; path.into_owned() }; let mut components = relpath.components(); // Throw away the first path component components.next(); let full_path = path.join(&components.as_path()); // Create the full path to the entry if it does not exist already if let Some(parent) = full_path.parent() { if !checked_parents.contains(parent) { checked_parents.insert(parent.to_owned()); // It would be nice to optimise this stat out, but the tar could be like so: // a/deep/file.txt // a/file.txt // which would require tracking the segments rather than a simple hash. // Until profile shows that one stat per dir is a problem (vs one stat per file) // leave till later. if !parent.exists() { std::fs::create_dir_all(&parent).chain_err(|| ErrorKind::ExtractingPackage)? } } } entry.set_preserve_mtime(false); entry .unpack(&full_path) .map(|unpacked| unpacker.handle(unpacked)) .chain_err(|| ErrorKind::ExtractingPackage)?; } Ok(()) } impl<'a> Package for TarPackage<'a> { fn contains(&self, component: &str, short_name: Option<&str>) -> bool { self.0.contains(component, short_name) } fn install<'b>( &self, target: &Components, component: &str, short_name: Option<&str>, tx: Transaction<'b>, ) -> Result<Transaction<'b>> { self.0.install(target, component, short_name, tx) } fn components(&self) -> Vec<String> { self.0.components() } } #[derive(Debug)] pub struct TarGzPackage<'a>(TarPackage<'a>); impl<'a> TarGzPackage<'a> { pub fn new<R: Read>( stream: R, temp_cfg: &'a temp::Cfg, notify_handler: Option<&'a dyn Fn(Notification<'_>)>, ) -> Result<Self> { let stream = flate2::read::GzDecoder::new(stream); Ok(TarGzPackage(TarPackage::new( stream, temp_cfg, notify_handler, )?)) } } impl<'a> Package for TarGzPackage<'a> { fn contains(&self, component: &str, short_name: Option<&str>) -> bool { self.0.contains(component, short_name) } fn install<'b>( &self, target: &Components, component: &str, short_name: Option<&str>, tx: Transaction<'b>, ) -> Result<Transaction<'b>> { self.0.install(target, component, short_name, tx) } fn components(&self) -> Vec<String> { self.0.components() } } #[derive(Debug)] pub struct TarXzPackage<'a>(TarPackage<'a>); impl<'a> TarXzPackage<'a> { pub fn new<R: Read>( stream: R, temp_cfg: &'a temp::Cfg, notify_handler: Option<&'a dyn Fn(Notification<'_>)>, ) -> Result<Self> { let stream = xz2::read::XzDecoder::new(stream); Ok(TarXzPackage(TarPackage::new( stream, temp_cfg, notify_handler, )?)) } } impl<'a> Package for TarXzPackage<'a> { fn contains(&self, component: &str, short_name: Option<&str>) -> bool { self.0.contains(component, short_name) } fn install<'b>( &self, target: &Components, component: &str, short_name: Option<&str>, tx: Transaction<'b>, ) -> Result<Transaction<'b>> { self.0.install(target, component, short_name, tx) } fn components(&self) -> Vec<String> { self.0.components() } }
{ // Defaults to hardware thread count threads; this is suitable for // our needs as IO bound operations tend to show up as write latencies // rather than close latencies, so we don't need to look at // more threads to get more IO dispatched at this stage in the process. let pool = threadpool::Builder::new() .thread_name("CloseHandle".into()) .build(); Unpacker { n_files: Arc::new(AtomicUsize::new(0)), pool: pool, notify_handler: notify_handler, } }
identifier_body
package.rs
//! An interpreter for the rust-installer package format. Responsible //! for installing from a directory or tarball to an installation //! prefix, represented by a `Components` instance. use crate::dist::component::components::*; use crate::dist::component::transaction::*; use crate::dist::temp; use crate::errors::*; use crate::utils::notifications::Notification; use crate::utils::utils; use std::collections::HashSet; use std::fmt; use std::io::Read; use std::path::{Path, PathBuf}; /// The current metadata revision used by rust-installer pub const INSTALLER_VERSION: &str = "3"; pub const VERSION_FILE: &str = "rust-installer-version"; pub trait Package: fmt::Debug { fn contains(&self, component: &str, short_name: Option<&str>) -> bool; fn install<'a>( &self, target: &Components, component: &str, short_name: Option<&str>, tx: Transaction<'a>, ) -> Result<Transaction<'a>>; fn components(&self) -> Vec<String>; } #[derive(Debug)] pub struct DirectoryPackage { path: PathBuf, components: HashSet<String>, copy: bool, } impl DirectoryPackage { pub fn new(path: PathBuf, copy: bool) -> Result<Self> { validate_installer_version(&path)?; let content = utils::read_file("package components", &path.join("components"))?; let components = content .lines() .map(std::borrow::ToOwned::to_owned) .collect(); Ok(DirectoryPackage { path, components, copy, }) } } fn validate_installer_version(path: &Path) -> Result<()> { let file = utils::read_file("installer version", &path.join(VERSION_FILE))?; let v = file.trim(); if v == INSTALLER_VERSION { Ok(()) } else { Err(ErrorKind::BadInstallerVersion(v.to_owned()).into()) } } impl Package for DirectoryPackage { fn contains(&self, component: &str, short_name: Option<&str>) -> bool { self.components.contains(component) || if let Some(n) = short_name { self.components.contains(n) } else { false } } fn install<'a>( &self, target: &Components, name: &str, short_name: Option<&str>, tx: Transaction<'a>, ) -> Result<Transaction<'a>> { let actual_name = if self.components.contains(name) { name } else if let Some(n) = short_name { n } else { name }; let root = self.path.join(actual_name); let manifest = utils::read_file("package manifest", &root.join("manifest.in"))?; let mut builder = target.add(name, tx); for l in manifest.lines() { let part = ComponentPart::decode(l) .ok_or_else(|| ErrorKind::CorruptComponent(name.to_owned()))?; let path = part.1; let src_path = root.join(&path); match &*part.0 { "file" => { if self.copy { builder.copy_file(path.clone(), &src_path)? } else { builder.move_file(path.clone(), &src_path)? } } "dir" => { if self.copy { builder.copy_dir(path.clone(), &src_path)? } else { builder.move_dir(path.clone(), &src_path)? } } _ => return Err(ErrorKind::CorruptComponent(name.to_owned()).into()), } set_file_perms(&target.prefix().path().join(path), &src_path)?; } let tx = builder.finish()?; Ok(tx) } fn components(&self) -> Vec<String> { self.components.iter().cloned().collect() } } // On Unix we need to set up the file permissions correctly so // binaries are executable and directories readable. This shouldn't be // necessary: the source files *should* have the right permissions, // but due to rust-lang/rust#25479 they don't. #[cfg(unix)] fn set_file_perms(dest_path: &Path, src_path: &Path) -> Result<()> { use std::fs::{self, Metadata}; use std::os::unix::fs::PermissionsExt; use walkdir::WalkDir; // Compute whether this entry needs the X bit fn needs_x(meta: &Metadata) -> bool { meta.is_dir() || // Directories need it meta.permissions().mode() & 0o700 == 0o700 // If it is rwx for the user, it gets the X bit } // By convention, anything in the bin/ directory of the package is a binary let is_bin = if let Some(p) = src_path.parent() { p.ends_with("bin") } else { false }; let is_dir = utils::is_directory(dest_path); if is_dir { // Walk the directory setting everything for entry in WalkDir::new(dest_path) { let entry = entry.chain_err(|| ErrorKind::ComponentDirPermissionsFailed)?; let meta = entry .metadata() .chain_err(|| ErrorKind::ComponentDirPermissionsFailed)?; let mut perm = meta.permissions(); perm.set_mode(if needs_x(&meta) { 0o755 } else { 0o644 }); fs::set_permissions(entry.path(), perm) .chain_err(|| ErrorKind::ComponentFilePermissionsFailed)?; } } else { let meta = fs::metadata(dest_path).chain_err(|| ErrorKind::ComponentFilePermissionsFailed)?; let mut perm = meta.permissions(); perm.set_mode(if is_bin || needs_x(&meta) { 0o755 } else { 0o644 }); fs::set_permissions(dest_path, perm) .chain_err(|| ErrorKind::ComponentFilePermissionsFailed)?; } Ok(()) } #[cfg(windows)] fn set_file_perms(_dest_path: &Path, _src_path: &Path) -> Result<()> { Ok(()) } #[derive(Debug)] pub struct TarPackage<'a>(DirectoryPackage, temp::Dir<'a>); impl<'a> TarPackage<'a> { pub fn new<R: Read>( stream: R, temp_cfg: &'a temp::Cfg, notify_handler: Option<&'a dyn Fn(Notification<'_>)>, ) -> Result<Self> { let temp_dir = temp_cfg.new_directory()?; let mut archive = tar::Archive::new(stream); // The rust-installer packages unpack to a directory called // $pkgname-$version-$target. Skip that directory when // unpacking. unpack_without_first_dir(&mut archive, &*temp_dir, notify_handler)?; Ok(TarPackage( DirectoryPackage::new(temp_dir.to_owned(), false)?, temp_dir, )) } } #[cfg(windows)] mod unpacker { use std::sync::atomic::{AtomicUsize, Ordering}; use std::sync::Arc; use threadpool; use crate::utils::notifications::Notification; pub struct
<'a> { n_files: Arc<AtomicUsize>, pool: threadpool::ThreadPool, notify_handler: Option<&'a dyn Fn(Notification<'_>)>, } impl<'a> Unpacker<'a> { pub fn new(notify_handler: Option<&'a dyn Fn(Notification<'_>)>) -> Self { // Defaults to hardware thread count threads; this is suitable for // our needs as IO bound operations tend to show up as write latencies // rather than close latencies, so we don't need to look at // more threads to get more IO dispatched at this stage in the process. let pool = threadpool::Builder::new() .thread_name("CloseHandle".into()) .build(); Unpacker { n_files: Arc::new(AtomicUsize::new(0)), pool: pool, notify_handler: notify_handler, } } pub fn handle(&mut self, unpacked: tar::Unpacked) { if let tar::Unpacked::File(f) = unpacked { self.n_files.fetch_add(1, Ordering::Relaxed); let n_files = self.n_files.clone(); self.pool.execute(move || { drop(f); n_files.fetch_sub(1, Ordering::Relaxed); }); } } } impl<'a> Drop for Unpacker<'a> { fn drop(&mut self) { // Some explanation is in order. Even though the tar we are reading from (if // any) will have had its FileWithProgress download tracking // completed before we hit drop, that is not true if we are unwinding due to a // failure, where the logical ownership of the progress bar is // ambiguous, and as the tracker itself is abstracted out behind // notifications etc we cannot just query for that. So: we assume no // more reads of the underlying tar will take place: either the // error unwinding will stop reads, or we completed; either way, we // notify finished to the tracker to force a reset to zero; we set // the units to files, show our progress, and set our units back // afterwards. The largest archives today - rust docs - have ~20k // items, and the download tracker's progress is confounded with // actual handling of data today, we synthesis a data buffer and // pretend to have bytes to deliver. self.notify_handler .map(|handler| handler(Notification::DownloadFinished)); self.notify_handler .map(|handler| handler(Notification::DownloadPushUnits("handles"))); let mut prev_files = self.n_files.load(Ordering::Relaxed); self.notify_handler.map(|handler| { handler(Notification::DownloadContentLengthReceived( prev_files as u64, )) }); if prev_files > 50 { println!("Closing {} deferred file handles", prev_files); } let buf: Vec<u8> = vec![0; prev_files]; assert!(32767 > prev_files); let mut current_files = prev_files; while current_files != 0 { use std::thread::sleep; sleep(std::time::Duration::from_millis(100)); prev_files = current_files; current_files = self.n_files.load(Ordering::Relaxed); let step_count = prev_files - current_files; self.notify_handler.map(|handler| { handler(Notification::DownloadDataReceived(&buf[0..step_count])) }); } self.pool.join(); self.notify_handler .map(|handler| handler(Notification::DownloadFinished)); self.notify_handler .map(|handler| handler(Notification::DownloadPopUnits)); } } } #[cfg(not(windows))] mod unpacker { use crate::utils::notifications::Notification; pub struct Unpacker {} impl Unpacker { pub fn new<'a>(_notify_handler: Option<&'a dyn Fn(Notification<'_>)>) -> Unpacker { Unpacker {} } pub fn handle(&mut self, _unpacked: tar::Unpacked) {} } } fn unpack_without_first_dir<'a, R: Read>( archive: &mut tar::Archive<R>, path: &Path, notify_handler: Option<&'a dyn Fn(Notification<'_>)>, ) -> Result<()> { let mut unpacker = unpacker::Unpacker::new(notify_handler); let entries = archive .entries() .chain_err(|| ErrorKind::ExtractingPackage)?; let mut checked_parents: HashSet<PathBuf> = HashSet::new(); for entry in entries { let mut entry = entry.chain_err(|| ErrorKind::ExtractingPackage)?; let relpath = { let path = entry.path(); let path = path.chain_err(|| ErrorKind::ExtractingPackage)?; path.into_owned() }; let mut components = relpath.components(); // Throw away the first path component components.next(); let full_path = path.join(&components.as_path()); // Create the full path to the entry if it does not exist already if let Some(parent) = full_path.parent() { if !checked_parents.contains(parent) { checked_parents.insert(parent.to_owned()); // It would be nice to optimise this stat out, but the tar could be like so: // a/deep/file.txt // a/file.txt // which would require tracking the segments rather than a simple hash. // Until profile shows that one stat per dir is a problem (vs one stat per file) // leave till later. if !parent.exists() { std::fs::create_dir_all(&parent).chain_err(|| ErrorKind::ExtractingPackage)? } } } entry.set_preserve_mtime(false); entry .unpack(&full_path) .map(|unpacked| unpacker.handle(unpacked)) .chain_err(|| ErrorKind::ExtractingPackage)?; } Ok(()) } impl<'a> Package for TarPackage<'a> { fn contains(&self, component: &str, short_name: Option<&str>) -> bool { self.0.contains(component, short_name) } fn install<'b>( &self, target: &Components, component: &str, short_name: Option<&str>, tx: Transaction<'b>, ) -> Result<Transaction<'b>> { self.0.install(target, component, short_name, tx) } fn components(&self) -> Vec<String> { self.0.components() } } #[derive(Debug)] pub struct TarGzPackage<'a>(TarPackage<'a>); impl<'a> TarGzPackage<'a> { pub fn new<R: Read>( stream: R, temp_cfg: &'a temp::Cfg, notify_handler: Option<&'a dyn Fn(Notification<'_>)>, ) -> Result<Self> { let stream = flate2::read::GzDecoder::new(stream); Ok(TarGzPackage(TarPackage::new( stream, temp_cfg, notify_handler, )?)) } } impl<'a> Package for TarGzPackage<'a> { fn contains(&self, component: &str, short_name: Option<&str>) -> bool { self.0.contains(component, short_name) } fn install<'b>( &self, target: &Components, component: &str, short_name: Option<&str>, tx: Transaction<'b>, ) -> Result<Transaction<'b>> { self.0.install(target, component, short_name, tx) } fn components(&self) -> Vec<String> { self.0.components() } } #[derive(Debug)] pub struct TarXzPackage<'a>(TarPackage<'a>); impl<'a> TarXzPackage<'a> { pub fn new<R: Read>( stream: R, temp_cfg: &'a temp::Cfg, notify_handler: Option<&'a dyn Fn(Notification<'_>)>, ) -> Result<Self> { let stream = xz2::read::XzDecoder::new(stream); Ok(TarXzPackage(TarPackage::new( stream, temp_cfg, notify_handler, )?)) } } impl<'a> Package for TarXzPackage<'a> { fn contains(&self, component: &str, short_name: Option<&str>) -> bool { self.0.contains(component, short_name) } fn install<'b>( &self, target: &Components, component: &str, short_name: Option<&str>, tx: Transaction<'b>, ) -> Result<Transaction<'b>> { self.0.install(target, component, short_name, tx) } fn components(&self) -> Vec<String> { self.0.components() } }
Unpacker
identifier_name
package.rs
//! An interpreter for the rust-installer package format. Responsible //! for installing from a directory or tarball to an installation //! prefix, represented by a `Components` instance. use crate::dist::component::components::*; use crate::dist::component::transaction::*; use crate::dist::temp; use crate::errors::*; use crate::utils::notifications::Notification; use crate::utils::utils; use std::collections::HashSet; use std::fmt; use std::io::Read; use std::path::{Path, PathBuf}; /// The current metadata revision used by rust-installer pub const INSTALLER_VERSION: &str = "3"; pub const VERSION_FILE: &str = "rust-installer-version"; pub trait Package: fmt::Debug { fn contains(&self, component: &str, short_name: Option<&str>) -> bool; fn install<'a>( &self, target: &Components, component: &str, short_name: Option<&str>, tx: Transaction<'a>, ) -> Result<Transaction<'a>>; fn components(&self) -> Vec<String>; } #[derive(Debug)] pub struct DirectoryPackage { path: PathBuf, components: HashSet<String>, copy: bool, } impl DirectoryPackage { pub fn new(path: PathBuf, copy: bool) -> Result<Self> { validate_installer_version(&path)?; let content = utils::read_file("package components", &path.join("components"))?; let components = content .lines() .map(std::borrow::ToOwned::to_owned) .collect(); Ok(DirectoryPackage { path, components, copy, }) } } fn validate_installer_version(path: &Path) -> Result<()> { let file = utils::read_file("installer version", &path.join(VERSION_FILE))?; let v = file.trim(); if v == INSTALLER_VERSION { Ok(()) } else { Err(ErrorKind::BadInstallerVersion(v.to_owned()).into()) } } impl Package for DirectoryPackage { fn contains(&self, component: &str, short_name: Option<&str>) -> bool { self.components.contains(component) || if let Some(n) = short_name { self.components.contains(n) } else { false } } fn install<'a>( &self, target: &Components, name: &str, short_name: Option<&str>, tx: Transaction<'a>, ) -> Result<Transaction<'a>> { let actual_name = if self.components.contains(name) { name } else if let Some(n) = short_name { n } else { name }; let root = self.path.join(actual_name); let manifest = utils::read_file("package manifest", &root.join("manifest.in"))?; let mut builder = target.add(name, tx); for l in manifest.lines() { let part = ComponentPart::decode(l) .ok_or_else(|| ErrorKind::CorruptComponent(name.to_owned()))?; let path = part.1; let src_path = root.join(&path); match &*part.0 { "file" => { if self.copy { builder.copy_file(path.clone(), &src_path)? } else { builder.move_file(path.clone(), &src_path)? } } "dir" => { if self.copy { builder.copy_dir(path.clone(), &src_path)? } else { builder.move_dir(path.clone(), &src_path)? } } _ => return Err(ErrorKind::CorruptComponent(name.to_owned()).into()), } set_file_perms(&target.prefix().path().join(path), &src_path)?; } let tx = builder.finish()?; Ok(tx) } fn components(&self) -> Vec<String> { self.components.iter().cloned().collect() } } // On Unix we need to set up the file permissions correctly so // binaries are executable and directories readable. This shouldn't be // necessary: the source files *should* have the right permissions, // but due to rust-lang/rust#25479 they don't. #[cfg(unix)] fn set_file_perms(dest_path: &Path, src_path: &Path) -> Result<()> { use std::fs::{self, Metadata}; use std::os::unix::fs::PermissionsExt; use walkdir::WalkDir; // Compute whether this entry needs the X bit fn needs_x(meta: &Metadata) -> bool { meta.is_dir() || // Directories need it meta.permissions().mode() & 0o700 == 0o700 // If it is rwx for the user, it gets the X bit } // By convention, anything in the bin/ directory of the package is a binary let is_bin = if let Some(p) = src_path.parent() { p.ends_with("bin") } else { false }; let is_dir = utils::is_directory(dest_path); if is_dir { // Walk the directory setting everything for entry in WalkDir::new(dest_path) { let entry = entry.chain_err(|| ErrorKind::ComponentDirPermissionsFailed)?; let meta = entry .metadata() .chain_err(|| ErrorKind::ComponentDirPermissionsFailed)?; let mut perm = meta.permissions(); perm.set_mode(if needs_x(&meta) { 0o755 } else { 0o644 }); fs::set_permissions(entry.path(), perm) .chain_err(|| ErrorKind::ComponentFilePermissionsFailed)?; } } else { let meta = fs::metadata(dest_path).chain_err(|| ErrorKind::ComponentFilePermissionsFailed)?; let mut perm = meta.permissions(); perm.set_mode(if is_bin || needs_x(&meta) { 0o755 } else { 0o644 }); fs::set_permissions(dest_path, perm) .chain_err(|| ErrorKind::ComponentFilePermissionsFailed)?; } Ok(()) } #[cfg(windows)] fn set_file_perms(_dest_path: &Path, _src_path: &Path) -> Result<()> { Ok(()) } #[derive(Debug)] pub struct TarPackage<'a>(DirectoryPackage, temp::Dir<'a>); impl<'a> TarPackage<'a> { pub fn new<R: Read>( stream: R, temp_cfg: &'a temp::Cfg, notify_handler: Option<&'a dyn Fn(Notification<'_>)>, ) -> Result<Self> { let temp_dir = temp_cfg.new_directory()?; let mut archive = tar::Archive::new(stream); // The rust-installer packages unpack to a directory called // $pkgname-$version-$target. Skip that directory when // unpacking. unpack_without_first_dir(&mut archive, &*temp_dir, notify_handler)?; Ok(TarPackage( DirectoryPackage::new(temp_dir.to_owned(), false)?, temp_dir, )) } } #[cfg(windows)] mod unpacker { use std::sync::atomic::{AtomicUsize, Ordering}; use std::sync::Arc; use threadpool; use crate::utils::notifications::Notification; pub struct Unpacker<'a> { n_files: Arc<AtomicUsize>, pool: threadpool::ThreadPool, notify_handler: Option<&'a dyn Fn(Notification<'_>)>, } impl<'a> Unpacker<'a> { pub fn new(notify_handler: Option<&'a dyn Fn(Notification<'_>)>) -> Self { // Defaults to hardware thread count threads; this is suitable for // our needs as IO bound operations tend to show up as write latencies // rather than close latencies, so we don't need to look at // more threads to get more IO dispatched at this stage in the process. let pool = threadpool::Builder::new() .thread_name("CloseHandle".into()) .build(); Unpacker { n_files: Arc::new(AtomicUsize::new(0)), pool: pool, notify_handler: notify_handler, } } pub fn handle(&mut self, unpacked: tar::Unpacked) { if let tar::Unpacked::File(f) = unpacked { self.n_files.fetch_add(1, Ordering::Relaxed); let n_files = self.n_files.clone(); self.pool.execute(move || { drop(f); n_files.fetch_sub(1, Ordering::Relaxed); }); } } } impl<'a> Drop for Unpacker<'a> { fn drop(&mut self) { // Some explanation is in order. Even though the tar we are reading from (if // any) will have had its FileWithProgress download tracking // completed before we hit drop, that is not true if we are unwinding due to a // failure, where the logical ownership of the progress bar is // ambiguous, and as the tracker itself is abstracted out behind // notifications etc we cannot just query for that. So: we assume no // more reads of the underlying tar will take place: either the // error unwinding will stop reads, or we completed; either way, we // notify finished to the tracker to force a reset to zero; we set // the units to files, show our progress, and set our units back // afterwards. The largest archives today - rust docs - have ~20k // items, and the download tracker's progress is confounded with // actual handling of data today, we synthesis a data buffer and // pretend to have bytes to deliver. self.notify_handler .map(|handler| handler(Notification::DownloadFinished)); self.notify_handler .map(|handler| handler(Notification::DownloadPushUnits("handles")));
handler(Notification::DownloadContentLengthReceived( prev_files as u64, )) }); if prev_files > 50 { println!("Closing {} deferred file handles", prev_files); } let buf: Vec<u8> = vec![0; prev_files]; assert!(32767 > prev_files); let mut current_files = prev_files; while current_files != 0 { use std::thread::sleep; sleep(std::time::Duration::from_millis(100)); prev_files = current_files; current_files = self.n_files.load(Ordering::Relaxed); let step_count = prev_files - current_files; self.notify_handler.map(|handler| { handler(Notification::DownloadDataReceived(&buf[0..step_count])) }); } self.pool.join(); self.notify_handler .map(|handler| handler(Notification::DownloadFinished)); self.notify_handler .map(|handler| handler(Notification::DownloadPopUnits)); } } } #[cfg(not(windows))] mod unpacker { use crate::utils::notifications::Notification; pub struct Unpacker {} impl Unpacker { pub fn new<'a>(_notify_handler: Option<&'a dyn Fn(Notification<'_>)>) -> Unpacker { Unpacker {} } pub fn handle(&mut self, _unpacked: tar::Unpacked) {} } } fn unpack_without_first_dir<'a, R: Read>( archive: &mut tar::Archive<R>, path: &Path, notify_handler: Option<&'a dyn Fn(Notification<'_>)>, ) -> Result<()> { let mut unpacker = unpacker::Unpacker::new(notify_handler); let entries = archive .entries() .chain_err(|| ErrorKind::ExtractingPackage)?; let mut checked_parents: HashSet<PathBuf> = HashSet::new(); for entry in entries { let mut entry = entry.chain_err(|| ErrorKind::ExtractingPackage)?; let relpath = { let path = entry.path(); let path = path.chain_err(|| ErrorKind::ExtractingPackage)?; path.into_owned() }; let mut components = relpath.components(); // Throw away the first path component components.next(); let full_path = path.join(&components.as_path()); // Create the full path to the entry if it does not exist already if let Some(parent) = full_path.parent() { if !checked_parents.contains(parent) { checked_parents.insert(parent.to_owned()); // It would be nice to optimise this stat out, but the tar could be like so: // a/deep/file.txt // a/file.txt // which would require tracking the segments rather than a simple hash. // Until profile shows that one stat per dir is a problem (vs one stat per file) // leave till later. if !parent.exists() { std::fs::create_dir_all(&parent).chain_err(|| ErrorKind::ExtractingPackage)? } } } entry.set_preserve_mtime(false); entry .unpack(&full_path) .map(|unpacked| unpacker.handle(unpacked)) .chain_err(|| ErrorKind::ExtractingPackage)?; } Ok(()) } impl<'a> Package for TarPackage<'a> { fn contains(&self, component: &str, short_name: Option<&str>) -> bool { self.0.contains(component, short_name) } fn install<'b>( &self, target: &Components, component: &str, short_name: Option<&str>, tx: Transaction<'b>, ) -> Result<Transaction<'b>> { self.0.install(target, component, short_name, tx) } fn components(&self) -> Vec<String> { self.0.components() } } #[derive(Debug)] pub struct TarGzPackage<'a>(TarPackage<'a>); impl<'a> TarGzPackage<'a> { pub fn new<R: Read>( stream: R, temp_cfg: &'a temp::Cfg, notify_handler: Option<&'a dyn Fn(Notification<'_>)>, ) -> Result<Self> { let stream = flate2::read::GzDecoder::new(stream); Ok(TarGzPackage(TarPackage::new( stream, temp_cfg, notify_handler, )?)) } } impl<'a> Package for TarGzPackage<'a> { fn contains(&self, component: &str, short_name: Option<&str>) -> bool { self.0.contains(component, short_name) } fn install<'b>( &self, target: &Components, component: &str, short_name: Option<&str>, tx: Transaction<'b>, ) -> Result<Transaction<'b>> { self.0.install(target, component, short_name, tx) } fn components(&self) -> Vec<String> { self.0.components() } } #[derive(Debug)] pub struct TarXzPackage<'a>(TarPackage<'a>); impl<'a> TarXzPackage<'a> { pub fn new<R: Read>( stream: R, temp_cfg: &'a temp::Cfg, notify_handler: Option<&'a dyn Fn(Notification<'_>)>, ) -> Result<Self> { let stream = xz2::read::XzDecoder::new(stream); Ok(TarXzPackage(TarPackage::new( stream, temp_cfg, notify_handler, )?)) } } impl<'a> Package for TarXzPackage<'a> { fn contains(&self, component: &str, short_name: Option<&str>) -> bool { self.0.contains(component, short_name) } fn install<'b>( &self, target: &Components, component: &str, short_name: Option<&str>, tx: Transaction<'b>, ) -> Result<Transaction<'b>> { self.0.install(target, component, short_name, tx) } fn components(&self) -> Vec<String> { self.0.components() } }
let mut prev_files = self.n_files.load(Ordering::Relaxed); self.notify_handler.map(|handler| {
random_line_split
package.rs
//! An interpreter for the rust-installer package format. Responsible //! for installing from a directory or tarball to an installation //! prefix, represented by a `Components` instance. use crate::dist::component::components::*; use crate::dist::component::transaction::*; use crate::dist::temp; use crate::errors::*; use crate::utils::notifications::Notification; use crate::utils::utils; use std::collections::HashSet; use std::fmt; use std::io::Read; use std::path::{Path, PathBuf}; /// The current metadata revision used by rust-installer pub const INSTALLER_VERSION: &str = "3"; pub const VERSION_FILE: &str = "rust-installer-version"; pub trait Package: fmt::Debug { fn contains(&self, component: &str, short_name: Option<&str>) -> bool; fn install<'a>( &self, target: &Components, component: &str, short_name: Option<&str>, tx: Transaction<'a>, ) -> Result<Transaction<'a>>; fn components(&self) -> Vec<String>; } #[derive(Debug)] pub struct DirectoryPackage { path: PathBuf, components: HashSet<String>, copy: bool, } impl DirectoryPackage { pub fn new(path: PathBuf, copy: bool) -> Result<Self> { validate_installer_version(&path)?; let content = utils::read_file("package components", &path.join("components"))?; let components = content .lines() .map(std::borrow::ToOwned::to_owned) .collect(); Ok(DirectoryPackage { path, components, copy, }) } } fn validate_installer_version(path: &Path) -> Result<()> { let file = utils::read_file("installer version", &path.join(VERSION_FILE))?; let v = file.trim(); if v == INSTALLER_VERSION { Ok(()) } else { Err(ErrorKind::BadInstallerVersion(v.to_owned()).into()) } } impl Package for DirectoryPackage { fn contains(&self, component: &str, short_name: Option<&str>) -> bool { self.components.contains(component) || if let Some(n) = short_name { self.components.contains(n) } else { false } } fn install<'a>( &self, target: &Components, name: &str, short_name: Option<&str>, tx: Transaction<'a>, ) -> Result<Transaction<'a>> { let actual_name = if self.components.contains(name) { name } else if let Some(n) = short_name { n } else { name }; let root = self.path.join(actual_name); let manifest = utils::read_file("package manifest", &root.join("manifest.in"))?; let mut builder = target.add(name, tx); for l in manifest.lines() { let part = ComponentPart::decode(l) .ok_or_else(|| ErrorKind::CorruptComponent(name.to_owned()))?; let path = part.1; let src_path = root.join(&path); match &*part.0 { "file" => { if self.copy { builder.copy_file(path.clone(), &src_path)? } else { builder.move_file(path.clone(), &src_path)? } } "dir" => { if self.copy { builder.copy_dir(path.clone(), &src_path)? } else { builder.move_dir(path.clone(), &src_path)? } } _ => return Err(ErrorKind::CorruptComponent(name.to_owned()).into()), } set_file_perms(&target.prefix().path().join(path), &src_path)?; } let tx = builder.finish()?; Ok(tx) } fn components(&self) -> Vec<String> { self.components.iter().cloned().collect() } } // On Unix we need to set up the file permissions correctly so // binaries are executable and directories readable. This shouldn't be // necessary: the source files *should* have the right permissions, // but due to rust-lang/rust#25479 they don't. #[cfg(unix)] fn set_file_perms(dest_path: &Path, src_path: &Path) -> Result<()> { use std::fs::{self, Metadata}; use std::os::unix::fs::PermissionsExt; use walkdir::WalkDir; // Compute whether this entry needs the X bit fn needs_x(meta: &Metadata) -> bool { meta.is_dir() || // Directories need it meta.permissions().mode() & 0o700 == 0o700 // If it is rwx for the user, it gets the X bit } // By convention, anything in the bin/ directory of the package is a binary let is_bin = if let Some(p) = src_path.parent() { p.ends_with("bin") } else { false }; let is_dir = utils::is_directory(dest_path); if is_dir { // Walk the directory setting everything for entry in WalkDir::new(dest_path) { let entry = entry.chain_err(|| ErrorKind::ComponentDirPermissionsFailed)?; let meta = entry .metadata() .chain_err(|| ErrorKind::ComponentDirPermissionsFailed)?; let mut perm = meta.permissions(); perm.set_mode(if needs_x(&meta) { 0o755 } else { 0o644 }); fs::set_permissions(entry.path(), perm) .chain_err(|| ErrorKind::ComponentFilePermissionsFailed)?; } } else { let meta = fs::metadata(dest_path).chain_err(|| ErrorKind::ComponentFilePermissionsFailed)?; let mut perm = meta.permissions(); perm.set_mode(if is_bin || needs_x(&meta)
else { 0o644 }); fs::set_permissions(dest_path, perm) .chain_err(|| ErrorKind::ComponentFilePermissionsFailed)?; } Ok(()) } #[cfg(windows)] fn set_file_perms(_dest_path: &Path, _src_path: &Path) -> Result<()> { Ok(()) } #[derive(Debug)] pub struct TarPackage<'a>(DirectoryPackage, temp::Dir<'a>); impl<'a> TarPackage<'a> { pub fn new<R: Read>( stream: R, temp_cfg: &'a temp::Cfg, notify_handler: Option<&'a dyn Fn(Notification<'_>)>, ) -> Result<Self> { let temp_dir = temp_cfg.new_directory()?; let mut archive = tar::Archive::new(stream); // The rust-installer packages unpack to a directory called // $pkgname-$version-$target. Skip that directory when // unpacking. unpack_without_first_dir(&mut archive, &*temp_dir, notify_handler)?; Ok(TarPackage( DirectoryPackage::new(temp_dir.to_owned(), false)?, temp_dir, )) } } #[cfg(windows)] mod unpacker { use std::sync::atomic::{AtomicUsize, Ordering}; use std::sync::Arc; use threadpool; use crate::utils::notifications::Notification; pub struct Unpacker<'a> { n_files: Arc<AtomicUsize>, pool: threadpool::ThreadPool, notify_handler: Option<&'a dyn Fn(Notification<'_>)>, } impl<'a> Unpacker<'a> { pub fn new(notify_handler: Option<&'a dyn Fn(Notification<'_>)>) -> Self { // Defaults to hardware thread count threads; this is suitable for // our needs as IO bound operations tend to show up as write latencies // rather than close latencies, so we don't need to look at // more threads to get more IO dispatched at this stage in the process. let pool = threadpool::Builder::new() .thread_name("CloseHandle".into()) .build(); Unpacker { n_files: Arc::new(AtomicUsize::new(0)), pool: pool, notify_handler: notify_handler, } } pub fn handle(&mut self, unpacked: tar::Unpacked) { if let tar::Unpacked::File(f) = unpacked { self.n_files.fetch_add(1, Ordering::Relaxed); let n_files = self.n_files.clone(); self.pool.execute(move || { drop(f); n_files.fetch_sub(1, Ordering::Relaxed); }); } } } impl<'a> Drop for Unpacker<'a> { fn drop(&mut self) { // Some explanation is in order. Even though the tar we are reading from (if // any) will have had its FileWithProgress download tracking // completed before we hit drop, that is not true if we are unwinding due to a // failure, where the logical ownership of the progress bar is // ambiguous, and as the tracker itself is abstracted out behind // notifications etc we cannot just query for that. So: we assume no // more reads of the underlying tar will take place: either the // error unwinding will stop reads, or we completed; either way, we // notify finished to the tracker to force a reset to zero; we set // the units to files, show our progress, and set our units back // afterwards. The largest archives today - rust docs - have ~20k // items, and the download tracker's progress is confounded with // actual handling of data today, we synthesis a data buffer and // pretend to have bytes to deliver. self.notify_handler .map(|handler| handler(Notification::DownloadFinished)); self.notify_handler .map(|handler| handler(Notification::DownloadPushUnits("handles"))); let mut prev_files = self.n_files.load(Ordering::Relaxed); self.notify_handler.map(|handler| { handler(Notification::DownloadContentLengthReceived( prev_files as u64, )) }); if prev_files > 50 { println!("Closing {} deferred file handles", prev_files); } let buf: Vec<u8> = vec![0; prev_files]; assert!(32767 > prev_files); let mut current_files = prev_files; while current_files != 0 { use std::thread::sleep; sleep(std::time::Duration::from_millis(100)); prev_files = current_files; current_files = self.n_files.load(Ordering::Relaxed); let step_count = prev_files - current_files; self.notify_handler.map(|handler| { handler(Notification::DownloadDataReceived(&buf[0..step_count])) }); } self.pool.join(); self.notify_handler .map(|handler| handler(Notification::DownloadFinished)); self.notify_handler .map(|handler| handler(Notification::DownloadPopUnits)); } } } #[cfg(not(windows))] mod unpacker { use crate::utils::notifications::Notification; pub struct Unpacker {} impl Unpacker { pub fn new<'a>(_notify_handler: Option<&'a dyn Fn(Notification<'_>)>) -> Unpacker { Unpacker {} } pub fn handle(&mut self, _unpacked: tar::Unpacked) {} } } fn unpack_without_first_dir<'a, R: Read>( archive: &mut tar::Archive<R>, path: &Path, notify_handler: Option<&'a dyn Fn(Notification<'_>)>, ) -> Result<()> { let mut unpacker = unpacker::Unpacker::new(notify_handler); let entries = archive .entries() .chain_err(|| ErrorKind::ExtractingPackage)?; let mut checked_parents: HashSet<PathBuf> = HashSet::new(); for entry in entries { let mut entry = entry.chain_err(|| ErrorKind::ExtractingPackage)?; let relpath = { let path = entry.path(); let path = path.chain_err(|| ErrorKind::ExtractingPackage)?; path.into_owned() }; let mut components = relpath.components(); // Throw away the first path component components.next(); let full_path = path.join(&components.as_path()); // Create the full path to the entry if it does not exist already if let Some(parent) = full_path.parent() { if !checked_parents.contains(parent) { checked_parents.insert(parent.to_owned()); // It would be nice to optimise this stat out, but the tar could be like so: // a/deep/file.txt // a/file.txt // which would require tracking the segments rather than a simple hash. // Until profile shows that one stat per dir is a problem (vs one stat per file) // leave till later. if !parent.exists() { std::fs::create_dir_all(&parent).chain_err(|| ErrorKind::ExtractingPackage)? } } } entry.set_preserve_mtime(false); entry .unpack(&full_path) .map(|unpacked| unpacker.handle(unpacked)) .chain_err(|| ErrorKind::ExtractingPackage)?; } Ok(()) } impl<'a> Package for TarPackage<'a> { fn contains(&self, component: &str, short_name: Option<&str>) -> bool { self.0.contains(component, short_name) } fn install<'b>( &self, target: &Components, component: &str, short_name: Option<&str>, tx: Transaction<'b>, ) -> Result<Transaction<'b>> { self.0.install(target, component, short_name, tx) } fn components(&self) -> Vec<String> { self.0.components() } } #[derive(Debug)] pub struct TarGzPackage<'a>(TarPackage<'a>); impl<'a> TarGzPackage<'a> { pub fn new<R: Read>( stream: R, temp_cfg: &'a temp::Cfg, notify_handler: Option<&'a dyn Fn(Notification<'_>)>, ) -> Result<Self> { let stream = flate2::read::GzDecoder::new(stream); Ok(TarGzPackage(TarPackage::new( stream, temp_cfg, notify_handler, )?)) } } impl<'a> Package for TarGzPackage<'a> { fn contains(&self, component: &str, short_name: Option<&str>) -> bool { self.0.contains(component, short_name) } fn install<'b>( &self, target: &Components, component: &str, short_name: Option<&str>, tx: Transaction<'b>, ) -> Result<Transaction<'b>> { self.0.install(target, component, short_name, tx) } fn components(&self) -> Vec<String> { self.0.components() } } #[derive(Debug)] pub struct TarXzPackage<'a>(TarPackage<'a>); impl<'a> TarXzPackage<'a> { pub fn new<R: Read>( stream: R, temp_cfg: &'a temp::Cfg, notify_handler: Option<&'a dyn Fn(Notification<'_>)>, ) -> Result<Self> { let stream = xz2::read::XzDecoder::new(stream); Ok(TarXzPackage(TarPackage::new( stream, temp_cfg, notify_handler, )?)) } } impl<'a> Package for TarXzPackage<'a> { fn contains(&self, component: &str, short_name: Option<&str>) -> bool { self.0.contains(component, short_name) } fn install<'b>( &self, target: &Components, component: &str, short_name: Option<&str>, tx: Transaction<'b>, ) -> Result<Transaction<'b>> { self.0.install(target, component, short_name, tx) } fn components(&self) -> Vec<String> { self.0.components() } }
{ 0o755 }
conditional_block
app.js
import Vue from 'vue'; import VueResource from 'vue-resource'; // window.Vue = Vue; Vue.use(VueResource); import Alert from './components/Alert.vue'; // var Vue = require('vue'); // var VueResource = require('vue-resource'); // // window.Vue = Vue; // Vue.use(VueResource); // import Profile from './Profile.vue'; // new Vue({ // el: '#app', // // components: { Profile } // data:{ // message:'Hello Vue.js!' // } // }); // new Vue({ // el: '#app', // data: { // // message: 'Hello Vue.js!', // todos: [ // { text: 'Learn JavaScript' }, // { text: 'Learn Vue.js' }, // { text: 'Build Something Awesome' }, // { text:'QIBU'} // ] // } // }); // new Vue({ // el: '#app', // data: { // message: 'Hello Vue.js!' // }, // methods: { // reverseMessage: function () { // this.message = this.message.split('').reverse().join('') // } // } // }); new Vue({ el: '#app2', data: { message: 'Hello' } }); Vue.component('counter',{ template:'#counter-template', props:['subject'], data:function() { return { count:0 }; } }); new Vue({ el: '#app3', data: { points: 50, first: 'Xiajun', last: 'Yan', fullname: 'Xiajun Yan', }, computed: { skill: function() { if (this.points <= 50) { return 'B'; } return 'A'; }, fullname: function() { return this.first + ' ' + this.last; }, }, watch:{ first:function(first){ this.fullname = first + ' ' + this.last; }, last:function(last){ this.fullname = this.first + ' ' + last; }, }, methods: { handleIt: function() { alert('Handled'); }, minusCount: function() { this.count -= 1; } }, components: { counter: { template: '#counter-template', props: ['subject'], data: function() { return { count: 0 }; } } } }); new Vue({ el: '#app', data: { newTodo: '', todos: [{ text: 'Add some todos' }] }, methods: { addTodo: function() { var text = this.newTodo.trim() if (text) { this.todos.push({ text: text }) this.newTodo = '' } }, removeTodo: function(index) { this.todos.splice(index, 1) } } }); new Vue({ el:'#app4', data: { // store a list of plans plans:[ { name:'Enterprise', price:100 }, { name:'Pro', price:50 }, { name:'Personal', price:10 }, { name:'Free', price:0 }, ], active: {} }, components: { plan: { // template: '#plan-template', props:['plan','active'], computed: { isUpgrade : function() { return this.plan.price > this.active.price; //this.plan.price //this.active.price } }, methods: { setActivePlan: function(){ this.active = this.plan; } } } } }); new Vue({ el:"#app5", data: { tasks: [ { body: 'Go to the store', completed:false,price:100}, { body: 'Go to the bank', completed:false,price:100 }, { body: 'Go to the doctor', completed:true,price:100 } ] }, components: { taskcomp: { template:'#tasks-template', props:['list'], computed: { remaining: function() { // completedTasks = // inProgress = ! var vm = this; // return this.list.filter(function(task){ // return !vm.isCompleted(task); // }).length; return this.list.filter(this.isInProgress).length; } }, methods: { setPrice: function(task) { task.price = 50; }, isCompleted:function(task) { return task.completed; }, isInProgress:function(task) { return !this.isCompleted(task); }, deleteTask: function(task) { this.list.$remove(task); }, clearCompleted: function(){ this.list = this.list.filter(this.isInProgress); } } } } }); Vue.component('tasks',{ template: '#tasks-template', data: function(){ return { list: [] }; }, created: function() { this.fetchTaskList(); // var vm = this; // $.getJSON('api/tasks', function(tasks){ // console.log(tasks); // // console.log(data); // //assign these data to a list // vm.list = tasks; // }.bind(this)); }, //with api/tasks, no longer pass props through from our sever side. // props:['list'], //when this components is initially created, I want to go ahead and make a AJAX request. // created() is shorthand // created() { // // this.list = JSON.parse(this.list); // }, // methods: { fetchTaskList: function(){ var resource = this.$resource('api/tasks{/id}'); resource.get({}).then((response) => { this.list = response.data; }); // resource.update({id:5},{body:'Update task body'}).then((response) =>{ // this.list = response.data; // }); }, // this.$http.get('api/tasks').then((response) => { // console.log(response.); // }, (response) => { // // error callback // }); // } // $.getJSON('api/tasks', function(tasks){ // this.list = tasks; // }.bind(this)); deleteTask: function(task){ this.list.$remove(task); }, getTaskID:function(task) { console.log(task.id); } } }); new Vue({ el:'#app6' }); Vue.filter('jsonIt',function(value){ return JSON.stringify(value); }); Vue.filter('role',function(value,role){ return value.filter(function(item) { return item.role == role; }); });// people | role 'admin' Vue.component('message',{ template:'#message-template', data:function() { return { message :''}; }, methods: { storeMessage: function() { // console.log('Storing ' + this.message); // $dispatch, parent can listen for that event, which is handled this.$dispatch('new-message',this.message); // $broadcast this.message = ''; } } }); // new Vue({ // el:'#app', // data: { // people: [ // {name:'Joe',role:'admin'}, // {name:'Susan',role:'admin'}, // {name:'Frank',role:'student'}, // {name:'Jeffrey',role:'admin'}, // ], // messages: [] // }, // methods:{ // handleNewMessage: function(message) { // this.messages.push(message); // // console.log('Parent is handling ' + message); // } // }, // // events: { // // "new-message": function(message) { // // console.log('Parent is handling ' + message); // // } // // }, // components: { Alert }, // ready() { // // alert('Ready to go!'); // } // }) //Lesson // Vue.directive('ajax',{ // //The way of this work is: // //when you attach this ('ajax') directive to html tag, // //vue will instantly call this bind() method // bind: function() { // }, // //Next, as soon as that finishs, the update() method will // //instantly be called, and the way they work is : this will // //repeatly be called, whenever the bind value(value) changes // update: function(value) { // }, // //And finally, as you might expect, when the directive is unbind, // //will trigger this method, where you can remove any of their listeners or same stuffs like that // unbind: function() { // } // }); // // keep it simple // Vue.directive('ajax',function(value){}); // Vue.http.headers.common['X-CSRF-TOKEN'] = document.querySelector('input[name="_token"]').value; Vue.directive('ajax',{ params: ['complete'], bind: function() { // console.log(this.params); //First thing we need to do, directive object, which we can fetch. //In our situation, we need to add a event listener to the form element //if you using jQuery, you could say: $(this.el).on('---') //Let's listen for when the form is submitted, and then will trigger a method called "onSubmit" this.el.addEventListener('submit',this.onSubmit.bind(this)); }, update: function(value) { // alert('update'); }, //when we trigger this, onSubmit:function(e) { //"this"-->will no longer refer to which is bund in the bind() method (this.el.---), //"this" now refer to the form that will be submitted //so if we wanna say: no, within the context of this method, we still wanna this to refer //to it did before, JavaScript did it in a very weird stuffs: just make sure we bind the obeject: //.bind(this) in the above method // //We prevent the default action, so we do not submit the form, and instead we use //view resource to submit (this.vm in the following) e.preventDefault(); // Vue.http.post // // this.vm refers to the viewmodel (new Vue({el:"#app7"})) // Quick note on using the array syntax here: // a lot people don't know this with javaScript, in this example, we could say: // vm.$http.post or your can format it with vm.$http['post'] // this options is very good when you referencing a variable like [requestType], // because obviously when you use the dot-synatax: vm.$http.requestType that's will not going to work //this.el.action = 'tasks/1' this.vm .$http[this.getRequestType()](this.el.action) .then(this.onComplete.bind(this)) //change response in app/Exceptions/Handler.php //.success or .fail in jQuery .catch(this.onError.bind(this)); }, onComplete:function () { if(this.params.complete) { alert(this.params.complete); } }, onError: function(response) { alert(response.data.message); //flash message in real life }, //We need to figure out what the RequestType should be for this form. getRequestType: function() { //see the console, and find out the name of the hidden input of DELETE var method = this.el.querySelector('input[name="_method"]'); //if not, we just grab the method attribute of the form //that will be a 'DELETE'->'delete' or 'POST'->'post' return (method ? method.value : this.el.method).toLowerCase(); } }); new Vue({ el:'#app7', http: { headers: { 'X-CSRF-TOKEN': document.querySelector('input[name="_token"]').value } } }); var min = Vue.extend({ template: '#clock-min' }); var hour = Vue.extend({ template: '#clock-hour' }); var sec = Vue.extend({ template: '#clock-sec' }); var time = Vue.extend({ template: '#clock-time', props: ['myMessage'] }); Vue.component('clock', { template: '#clock-template', components: { 'min-component': min, 'hour-component': hour, 'sec-component': sec, 'time-component': time }, data () { return { time: "00:00:00" } }, ready ()
, methods: { startTime: function () { var min = this.$refs.min.$el, sec = this.$refs.sec.$el, hour = this.$refs.hour.$el; var now = new Date(), hValue = now.getHours(), mValue = now.getMinutes(), sValue = now.getSeconds(); setTimeout(this.startTime, 1000); // 数字时间 mValue = this.checkTime(mValue); sValue = this.checkTime(sValue); this.time = hValue + ":" + mValue + ":" + sValue; }, checkTime: function (i) { if (i < 10) {i = "0" + i}; return i; } } }); var parent = new Vue({ el: '#clock' })
{ this.startTime() }
identifier_body
app.js
import Vue from 'vue'; import VueResource from 'vue-resource'; // window.Vue = Vue; Vue.use(VueResource); import Alert from './components/Alert.vue'; // var Vue = require('vue'); // var VueResource = require('vue-resource'); // // window.Vue = Vue; // Vue.use(VueResource); // import Profile from './Profile.vue'; // new Vue({ // el: '#app', // // components: { Profile } // data:{ // message:'Hello Vue.js!' // } // }); // new Vue({ // el: '#app', // data: { // // message: 'Hello Vue.js!', // todos: [ // { text: 'Learn JavaScript' }, // { text: 'Learn Vue.js' }, // { text: 'Build Something Awesome' }, // { text:'QIBU'} // ] // } // }); // new Vue({ // el: '#app', // data: { // message: 'Hello Vue.js!' // }, // methods: { // reverseMessage: function () { // this.message = this.message.split('').reverse().join('') // } // } // }); new Vue({ el: '#app2', data: { message: 'Hello' } }); Vue.component('counter',{ template:'#counter-template', props:['subject'], data:function() { return { count:0 }; } }); new Vue({ el: '#app3', data: { points: 50, first: 'Xiajun', last: 'Yan', fullname: 'Xiajun Yan', }, computed: { skill: function() { if (this.points <= 50) { return 'B'; } return 'A'; }, fullname: function() { return this.first + ' ' + this.last; }, }, watch:{ first:function(first){ this.fullname = first + ' ' + this.last; }, last:function(last){ this.fullname = this.first + ' ' + last; }, }, methods: { handleIt: function() { alert('Handled'); }, minusCount: function() { this.count -= 1; } }, components: { counter: { template: '#counter-template', props: ['subject'], data: function() { return { count: 0 }; } } } }); new Vue({ el: '#app', data: { newTodo: '', todos: [{ text: 'Add some todos' }] }, methods: { addTodo: function() { var text = this.newTodo.trim() if (text) { this.todos.push({ text: text }) this.newTodo = '' } }, removeTodo: function(index) { this.todos.splice(index, 1) } } }); new Vue({ el:'#app4', data: { // store a list of plans plans:[ { name:'Enterprise', price:100 }, { name:'Pro', price:50 }, { name:'Personal', price:10 }, { name:'Free', price:0 }, ], active: {} }, components: { plan: { // template: '#plan-template', props:['plan','active'], computed: { isUpgrade : function() { return this.plan.price > this.active.price; //this.plan.price //this.active.price } }, methods: { setActivePlan: function(){ this.active = this.plan; } } } } }); new Vue({ el:"#app5", data: { tasks: [ { body: 'Go to the store', completed:false,price:100}, { body: 'Go to the bank', completed:false,price:100 }, { body: 'Go to the doctor', completed:true,price:100 } ] }, components: { taskcomp: { template:'#tasks-template', props:['list'], computed: { remaining: function() { // completedTasks = // inProgress = ! var vm = this; // return this.list.filter(function(task){ // return !vm.isCompleted(task); // }).length; return this.list.filter(this.isInProgress).length; } }, methods: { setPrice: function(task) { task.price = 50; }, isCompleted:function(task) { return task.completed; }, isInProgress:function(task) { return !this.isCompleted(task); }, deleteTask: function(task) { this.list.$remove(task); }, clearCompleted: function(){ this.list = this.list.filter(this.isInProgress); } } } } }); Vue.component('tasks',{ template: '#tasks-template', data: function(){ return { list: [] }; }, created: function() { this.fetchTaskList(); // var vm = this; // $.getJSON('api/tasks', function(tasks){ // console.log(tasks); // // console.log(data); // //assign these data to a list // vm.list = tasks; // }.bind(this)); }, //with api/tasks, no longer pass props through from our sever side. // props:['list'], //when this components is initially created, I want to go ahead and make a AJAX request. // created() is shorthand // created() { // // this.list = JSON.parse(this.list); // }, // methods: { fetchTaskList: function(){ var resource = this.$resource('api/tasks{/id}'); resource.get({}).then((response) => { this.list = response.data; }); // resource.update({id:5},{body:'Update task body'}).then((response) =>{ // this.list = response.data; // }); }, // this.$http.get('api/tasks').then((response) => { // console.log(response.); // }, (response) => { // // error callback // }); // } // $.getJSON('api/tasks', function(tasks){ // this.list = tasks; // }.bind(this)); deleteTask: function(task){ this.list.$remove(task); }, getTaskID:function(task) { console.log(task.id); } } }); new Vue({ el:'#app6' }); Vue.filter('jsonIt',function(value){ return JSON.stringify(value); }); Vue.filter('role',function(value,role){ return value.filter(function(item) { return item.role == role; }); });// people | role 'admin' Vue.component('message',{ template:'#message-template', data:function() { return { message :''}; }, methods: { storeMessage: function() { // console.log('Storing ' + this.message); // $dispatch, parent can listen for that event, which is handled this.$dispatch('new-message',this.message); // $broadcast this.message = ''; } } }); // new Vue({ // el:'#app', // data: { // people: [ // {name:'Joe',role:'admin'}, // {name:'Susan',role:'admin'}, // {name:'Frank',role:'student'}, // {name:'Jeffrey',role:'admin'}, // ], // messages: [] // }, // methods:{ // handleNewMessage: function(message) { // this.messages.push(message); // // console.log('Parent is handling ' + message); // } // }, // // events: { // // "new-message": function(message) { // // console.log('Parent is handling ' + message); // // } // // }, // components: { Alert }, // ready() { // // alert('Ready to go!'); // } // }) //Lesson // Vue.directive('ajax',{ // //The way of this work is: // //when you attach this ('ajax') directive to html tag, // //vue will instantly call this bind() method // bind: function() { // }, // //Next, as soon as that finishs, the update() method will // //instantly be called, and the way they work is : this will // //repeatly be called, whenever the bind value(value) changes // update: function(value) { // }, // //And finally, as you might expect, when the directive is unbind, // //will trigger this method, where you can remove any of their listeners or same stuffs like that // unbind: function() { // } // }); // // keep it simple // Vue.directive('ajax',function(value){}); // Vue.http.headers.common['X-CSRF-TOKEN'] = document.querySelector('input[name="_token"]').value; Vue.directive('ajax',{ params: ['complete'], bind: function() { // console.log(this.params); //First thing we need to do, directive object, which we can fetch. //In our situation, we need to add a event listener to the form element //if you using jQuery, you could say: $(this.el).on('---') //Let's listen for when the form is submitted, and then will trigger a method called "onSubmit" this.el.addEventListener('submit',this.onSubmit.bind(this)); }, update: function(value) { // alert('update'); }, //when we trigger this, onSubmit:function(e) { //"this"-->will no longer refer to which is bund in the bind() method (this.el.---), //"this" now refer to the form that will be submitted //so if we wanna say: no, within the context of this method, we still wanna this to refer //to it did before, JavaScript did it in a very weird stuffs: just make sure we bind the obeject: //.bind(this) in the above method // //We prevent the default action, so we do not submit the form, and instead we use //view resource to submit (this.vm in the following) e.preventDefault(); // Vue.http.post // // this.vm refers to the viewmodel (new Vue({el:"#app7"})) // Quick note on using the array syntax here: // a lot people don't know this with javaScript, in this example, we could say: // vm.$http.post or your can format it with vm.$http['post'] // this options is very good when you referencing a variable like [requestType], // because obviously when you use the dot-synatax: vm.$http.requestType that's will not going to work //this.el.action = 'tasks/1' this.vm .$http[this.getRequestType()](this.el.action) .then(this.onComplete.bind(this)) //change response in app/Exceptions/Handler.php //.success or .fail in jQuery .catch(this.onError.bind(this)); }, onComplete:function () { if(this.params.complete) { alert(this.params.complete); } }, onError: function(response) { alert(response.data.message); //flash message in real life }, //We need to figure out what the RequestType should be for this form. getRequestType: function() { //see the console, and find out the name of the hidden input of DELETE var method = this.el.querySelector('input[name="_method"]'); //if not, we just grab the method attribute of the form //that will be a 'DELETE'->'delete' or 'POST'->'post' return (method ? method.value : this.el.method).toLowerCase(); } }); new Vue({ el:'#app7', http: { headers: { 'X-CSRF-TOKEN': document.querySelector('input[name="_token"]').value } } }); var min = Vue.extend({ template: '#clock-min' }); var hour = Vue.extend({ template: '#clock-hour' }); var sec = Vue.extend({ template: '#clock-sec' }); var time = Vue.extend({ template: '#clock-time', props: ['myMessage'] }); Vue.component('clock', { template: '#clock-template', components: { 'min-component': min, 'hour-component': hour, 'sec-component': sec, 'time-component': time }, data () { return { time: "00:00:00" } },
() { this.startTime() }, methods: { startTime: function () { var min = this.$refs.min.$el, sec = this.$refs.sec.$el, hour = this.$refs.hour.$el; var now = new Date(), hValue = now.getHours(), mValue = now.getMinutes(), sValue = now.getSeconds(); setTimeout(this.startTime, 1000); // 数字时间 mValue = this.checkTime(mValue); sValue = this.checkTime(sValue); this.time = hValue + ":" + mValue + ":" + sValue; }, checkTime: function (i) { if (i < 10) {i = "0" + i}; return i; } } }); var parent = new Vue({ el: '#clock' })
ready
identifier_name
app.js
import Vue from 'vue'; import VueResource from 'vue-resource'; // window.Vue = Vue; Vue.use(VueResource); import Alert from './components/Alert.vue'; // var Vue = require('vue'); // var VueResource = require('vue-resource'); // // window.Vue = Vue; // Vue.use(VueResource); // import Profile from './Profile.vue'; // new Vue({ // el: '#app', // // components: { Profile } // data:{ // message:'Hello Vue.js!' // } // }); // new Vue({ // el: '#app', // data: { // // message: 'Hello Vue.js!', // todos: [ // { text: 'Learn JavaScript' }, // { text: 'Learn Vue.js' }, // { text: 'Build Something Awesome' }, // { text:'QIBU'} // ] // } // }); // new Vue({ // el: '#app', // data: { // message: 'Hello Vue.js!' // }, // methods: { // reverseMessage: function () { // this.message = this.message.split('').reverse().join('') // } // } // }); new Vue({ el: '#app2', data: { message: 'Hello' } }); Vue.component('counter',{ template:'#counter-template', props:['subject'], data:function() { return { count:0 }; } }); new Vue({ el: '#app3', data: { points: 50, first: 'Xiajun', last: 'Yan', fullname: 'Xiajun Yan', }, computed: { skill: function() { if (this.points <= 50) { return 'B'; } return 'A'; }, fullname: function() { return this.first + ' ' + this.last; }, }, watch:{ first:function(first){ this.fullname = first + ' ' + this.last; }, last:function(last){ this.fullname = this.first + ' ' + last; }, }, methods: { handleIt: function() { alert('Handled'); }, minusCount: function() { this.count -= 1; } }, components: { counter: { template: '#counter-template', props: ['subject'], data: function() { return { count: 0 }; } } } }); new Vue({ el: '#app', data: { newTodo: '', todos: [{ text: 'Add some todos' }] }, methods: { addTodo: function() { var text = this.newTodo.trim() if (text) { this.todos.push({ text: text }) this.newTodo = '' } }, removeTodo: function(index) { this.todos.splice(index, 1) } } }); new Vue({ el:'#app4', data: { // store a list of plans plans:[ { name:'Enterprise', price:100 }, { name:'Pro', price:50 }, { name:'Personal', price:10 }, { name:'Free', price:0 }, ], active: {} }, components: { plan: { // template: '#plan-template', props:['plan','active'], computed: { isUpgrade : function() { return this.plan.price > this.active.price; //this.plan.price //this.active.price } }, methods: { setActivePlan: function(){ this.active = this.plan; } } } } }); new Vue({ el:"#app5", data: { tasks: [ { body: 'Go to the store', completed:false,price:100}, { body: 'Go to the bank', completed:false,price:100 }, { body: 'Go to the doctor', completed:true,price:100 } ] }, components: { taskcomp: { template:'#tasks-template', props:['list'], computed: { remaining: function() { // completedTasks = // inProgress = ! var vm = this; // return this.list.filter(function(task){ // return !vm.isCompleted(task); // }).length; return this.list.filter(this.isInProgress).length; } }, methods: { setPrice: function(task) { task.price = 50; }, isCompleted:function(task) { return task.completed; },
deleteTask: function(task) { this.list.$remove(task); }, clearCompleted: function(){ this.list = this.list.filter(this.isInProgress); } } } } }); Vue.component('tasks',{ template: '#tasks-template', data: function(){ return { list: [] }; }, created: function() { this.fetchTaskList(); // var vm = this; // $.getJSON('api/tasks', function(tasks){ // console.log(tasks); // // console.log(data); // //assign these data to a list // vm.list = tasks; // }.bind(this)); }, //with api/tasks, no longer pass props through from our sever side. // props:['list'], //when this components is initially created, I want to go ahead and make a AJAX request. // created() is shorthand // created() { // // this.list = JSON.parse(this.list); // }, // methods: { fetchTaskList: function(){ var resource = this.$resource('api/tasks{/id}'); resource.get({}).then((response) => { this.list = response.data; }); // resource.update({id:5},{body:'Update task body'}).then((response) =>{ // this.list = response.data; // }); }, // this.$http.get('api/tasks').then((response) => { // console.log(response.); // }, (response) => { // // error callback // }); // } // $.getJSON('api/tasks', function(tasks){ // this.list = tasks; // }.bind(this)); deleteTask: function(task){ this.list.$remove(task); }, getTaskID:function(task) { console.log(task.id); } } }); new Vue({ el:'#app6' }); Vue.filter('jsonIt',function(value){ return JSON.stringify(value); }); Vue.filter('role',function(value,role){ return value.filter(function(item) { return item.role == role; }); });// people | role 'admin' Vue.component('message',{ template:'#message-template', data:function() { return { message :''}; }, methods: { storeMessage: function() { // console.log('Storing ' + this.message); // $dispatch, parent can listen for that event, which is handled this.$dispatch('new-message',this.message); // $broadcast this.message = ''; } } }); // new Vue({ // el:'#app', // data: { // people: [ // {name:'Joe',role:'admin'}, // {name:'Susan',role:'admin'}, // {name:'Frank',role:'student'}, // {name:'Jeffrey',role:'admin'}, // ], // messages: [] // }, // methods:{ // handleNewMessage: function(message) { // this.messages.push(message); // // console.log('Parent is handling ' + message); // } // }, // // events: { // // "new-message": function(message) { // // console.log('Parent is handling ' + message); // // } // // }, // components: { Alert }, // ready() { // // alert('Ready to go!'); // } // }) //Lesson // Vue.directive('ajax',{ // //The way of this work is: // //when you attach this ('ajax') directive to html tag, // //vue will instantly call this bind() method // bind: function() { // }, // //Next, as soon as that finishs, the update() method will // //instantly be called, and the way they work is : this will // //repeatly be called, whenever the bind value(value) changes // update: function(value) { // }, // //And finally, as you might expect, when the directive is unbind, // //will trigger this method, where you can remove any of their listeners or same stuffs like that // unbind: function() { // } // }); // // keep it simple // Vue.directive('ajax',function(value){}); // Vue.http.headers.common['X-CSRF-TOKEN'] = document.querySelector('input[name="_token"]').value; Vue.directive('ajax',{ params: ['complete'], bind: function() { // console.log(this.params); //First thing we need to do, directive object, which we can fetch. //In our situation, we need to add a event listener to the form element //if you using jQuery, you could say: $(this.el).on('---') //Let's listen for when the form is submitted, and then will trigger a method called "onSubmit" this.el.addEventListener('submit',this.onSubmit.bind(this)); }, update: function(value) { // alert('update'); }, //when we trigger this, onSubmit:function(e) { //"this"-->will no longer refer to which is bund in the bind() method (this.el.---), //"this" now refer to the form that will be submitted //so if we wanna say: no, within the context of this method, we still wanna this to refer //to it did before, JavaScript did it in a very weird stuffs: just make sure we bind the obeject: //.bind(this) in the above method // //We prevent the default action, so we do not submit the form, and instead we use //view resource to submit (this.vm in the following) e.preventDefault(); // Vue.http.post // // this.vm refers to the viewmodel (new Vue({el:"#app7"})) // Quick note on using the array syntax here: // a lot people don't know this with javaScript, in this example, we could say: // vm.$http.post or your can format it with vm.$http['post'] // this options is very good when you referencing a variable like [requestType], // because obviously when you use the dot-synatax: vm.$http.requestType that's will not going to work //this.el.action = 'tasks/1' this.vm .$http[this.getRequestType()](this.el.action) .then(this.onComplete.bind(this)) //change response in app/Exceptions/Handler.php //.success or .fail in jQuery .catch(this.onError.bind(this)); }, onComplete:function () { if(this.params.complete) { alert(this.params.complete); } }, onError: function(response) { alert(response.data.message); //flash message in real life }, //We need to figure out what the RequestType should be for this form. getRequestType: function() { //see the console, and find out the name of the hidden input of DELETE var method = this.el.querySelector('input[name="_method"]'); //if not, we just grab the method attribute of the form //that will be a 'DELETE'->'delete' or 'POST'->'post' return (method ? method.value : this.el.method).toLowerCase(); } }); new Vue({ el:'#app7', http: { headers: { 'X-CSRF-TOKEN': document.querySelector('input[name="_token"]').value } } }); var min = Vue.extend({ template: '#clock-min' }); var hour = Vue.extend({ template: '#clock-hour' }); var sec = Vue.extend({ template: '#clock-sec' }); var time = Vue.extend({ template: '#clock-time', props: ['myMessage'] }); Vue.component('clock', { template: '#clock-template', components: { 'min-component': min, 'hour-component': hour, 'sec-component': sec, 'time-component': time }, data () { return { time: "00:00:00" } }, ready () { this.startTime() }, methods: { startTime: function () { var min = this.$refs.min.$el, sec = this.$refs.sec.$el, hour = this.$refs.hour.$el; var now = new Date(), hValue = now.getHours(), mValue = now.getMinutes(), sValue = now.getSeconds(); setTimeout(this.startTime, 1000); // 数字时间 mValue = this.checkTime(mValue); sValue = this.checkTime(sValue); this.time = hValue + ":" + mValue + ":" + sValue; }, checkTime: function (i) { if (i < 10) {i = "0" + i}; return i; } } }); var parent = new Vue({ el: '#clock' })
isInProgress:function(task) { return !this.isCompleted(task); },
random_line_split
app.js
import Vue from 'vue'; import VueResource from 'vue-resource'; // window.Vue = Vue; Vue.use(VueResource); import Alert from './components/Alert.vue'; // var Vue = require('vue'); // var VueResource = require('vue-resource'); // // window.Vue = Vue; // Vue.use(VueResource); // import Profile from './Profile.vue'; // new Vue({ // el: '#app', // // components: { Profile } // data:{ // message:'Hello Vue.js!' // } // }); // new Vue({ // el: '#app', // data: { // // message: 'Hello Vue.js!', // todos: [ // { text: 'Learn JavaScript' }, // { text: 'Learn Vue.js' }, // { text: 'Build Something Awesome' }, // { text:'QIBU'} // ] // } // }); // new Vue({ // el: '#app', // data: { // message: 'Hello Vue.js!' // }, // methods: { // reverseMessage: function () { // this.message = this.message.split('').reverse().join('') // } // } // }); new Vue({ el: '#app2', data: { message: 'Hello' } }); Vue.component('counter',{ template:'#counter-template', props:['subject'], data:function() { return { count:0 }; } }); new Vue({ el: '#app3', data: { points: 50, first: 'Xiajun', last: 'Yan', fullname: 'Xiajun Yan', }, computed: { skill: function() { if (this.points <= 50)
return 'A'; }, fullname: function() { return this.first + ' ' + this.last; }, }, watch:{ first:function(first){ this.fullname = first + ' ' + this.last; }, last:function(last){ this.fullname = this.first + ' ' + last; }, }, methods: { handleIt: function() { alert('Handled'); }, minusCount: function() { this.count -= 1; } }, components: { counter: { template: '#counter-template', props: ['subject'], data: function() { return { count: 0 }; } } } }); new Vue({ el: '#app', data: { newTodo: '', todos: [{ text: 'Add some todos' }] }, methods: { addTodo: function() { var text = this.newTodo.trim() if (text) { this.todos.push({ text: text }) this.newTodo = '' } }, removeTodo: function(index) { this.todos.splice(index, 1) } } }); new Vue({ el:'#app4', data: { // store a list of plans plans:[ { name:'Enterprise', price:100 }, { name:'Pro', price:50 }, { name:'Personal', price:10 }, { name:'Free', price:0 }, ], active: {} }, components: { plan: { // template: '#plan-template', props:['plan','active'], computed: { isUpgrade : function() { return this.plan.price > this.active.price; //this.plan.price //this.active.price } }, methods: { setActivePlan: function(){ this.active = this.plan; } } } } }); new Vue({ el:"#app5", data: { tasks: [ { body: 'Go to the store', completed:false,price:100}, { body: 'Go to the bank', completed:false,price:100 }, { body: 'Go to the doctor', completed:true,price:100 } ] }, components: { taskcomp: { template:'#tasks-template', props:['list'], computed: { remaining: function() { // completedTasks = // inProgress = ! var vm = this; // return this.list.filter(function(task){ // return !vm.isCompleted(task); // }).length; return this.list.filter(this.isInProgress).length; } }, methods: { setPrice: function(task) { task.price = 50; }, isCompleted:function(task) { return task.completed; }, isInProgress:function(task) { return !this.isCompleted(task); }, deleteTask: function(task) { this.list.$remove(task); }, clearCompleted: function(){ this.list = this.list.filter(this.isInProgress); } } } } }); Vue.component('tasks',{ template: '#tasks-template', data: function(){ return { list: [] }; }, created: function() { this.fetchTaskList(); // var vm = this; // $.getJSON('api/tasks', function(tasks){ // console.log(tasks); // // console.log(data); // //assign these data to a list // vm.list = tasks; // }.bind(this)); }, //with api/tasks, no longer pass props through from our sever side. // props:['list'], //when this components is initially created, I want to go ahead and make a AJAX request. // created() is shorthand // created() { // // this.list = JSON.parse(this.list); // }, // methods: { fetchTaskList: function(){ var resource = this.$resource('api/tasks{/id}'); resource.get({}).then((response) => { this.list = response.data; }); // resource.update({id:5},{body:'Update task body'}).then((response) =>{ // this.list = response.data; // }); }, // this.$http.get('api/tasks').then((response) => { // console.log(response.); // }, (response) => { // // error callback // }); // } // $.getJSON('api/tasks', function(tasks){ // this.list = tasks; // }.bind(this)); deleteTask: function(task){ this.list.$remove(task); }, getTaskID:function(task) { console.log(task.id); } } }); new Vue({ el:'#app6' }); Vue.filter('jsonIt',function(value){ return JSON.stringify(value); }); Vue.filter('role',function(value,role){ return value.filter(function(item) { return item.role == role; }); });// people | role 'admin' Vue.component('message',{ template:'#message-template', data:function() { return { message :''}; }, methods: { storeMessage: function() { // console.log('Storing ' + this.message); // $dispatch, parent can listen for that event, which is handled this.$dispatch('new-message',this.message); // $broadcast this.message = ''; } } }); // new Vue({ // el:'#app', // data: { // people: [ // {name:'Joe',role:'admin'}, // {name:'Susan',role:'admin'}, // {name:'Frank',role:'student'}, // {name:'Jeffrey',role:'admin'}, // ], // messages: [] // }, // methods:{ // handleNewMessage: function(message) { // this.messages.push(message); // // console.log('Parent is handling ' + message); // } // }, // // events: { // // "new-message": function(message) { // // console.log('Parent is handling ' + message); // // } // // }, // components: { Alert }, // ready() { // // alert('Ready to go!'); // } // }) //Lesson // Vue.directive('ajax',{ // //The way of this work is: // //when you attach this ('ajax') directive to html tag, // //vue will instantly call this bind() method // bind: function() { // }, // //Next, as soon as that finishs, the update() method will // //instantly be called, and the way they work is : this will // //repeatly be called, whenever the bind value(value) changes // update: function(value) { // }, // //And finally, as you might expect, when the directive is unbind, // //will trigger this method, where you can remove any of their listeners or same stuffs like that // unbind: function() { // } // }); // // keep it simple // Vue.directive('ajax',function(value){}); // Vue.http.headers.common['X-CSRF-TOKEN'] = document.querySelector('input[name="_token"]').value; Vue.directive('ajax',{ params: ['complete'], bind: function() { // console.log(this.params); //First thing we need to do, directive object, which we can fetch. //In our situation, we need to add a event listener to the form element //if you using jQuery, you could say: $(this.el).on('---') //Let's listen for when the form is submitted, and then will trigger a method called "onSubmit" this.el.addEventListener('submit',this.onSubmit.bind(this)); }, update: function(value) { // alert('update'); }, //when we trigger this, onSubmit:function(e) { //"this"-->will no longer refer to which is bund in the bind() method (this.el.---), //"this" now refer to the form that will be submitted //so if we wanna say: no, within the context of this method, we still wanna this to refer //to it did before, JavaScript did it in a very weird stuffs: just make sure we bind the obeject: //.bind(this) in the above method // //We prevent the default action, so we do not submit the form, and instead we use //view resource to submit (this.vm in the following) e.preventDefault(); // Vue.http.post // // this.vm refers to the viewmodel (new Vue({el:"#app7"})) // Quick note on using the array syntax here: // a lot people don't know this with javaScript, in this example, we could say: // vm.$http.post or your can format it with vm.$http['post'] // this options is very good when you referencing a variable like [requestType], // because obviously when you use the dot-synatax: vm.$http.requestType that's will not going to work //this.el.action = 'tasks/1' this.vm .$http[this.getRequestType()](this.el.action) .then(this.onComplete.bind(this)) //change response in app/Exceptions/Handler.php //.success or .fail in jQuery .catch(this.onError.bind(this)); }, onComplete:function () { if(this.params.complete) { alert(this.params.complete); } }, onError: function(response) { alert(response.data.message); //flash message in real life }, //We need to figure out what the RequestType should be for this form. getRequestType: function() { //see the console, and find out the name of the hidden input of DELETE var method = this.el.querySelector('input[name="_method"]'); //if not, we just grab the method attribute of the form //that will be a 'DELETE'->'delete' or 'POST'->'post' return (method ? method.value : this.el.method).toLowerCase(); } }); new Vue({ el:'#app7', http: { headers: { 'X-CSRF-TOKEN': document.querySelector('input[name="_token"]').value } } }); var min = Vue.extend({ template: '#clock-min' }); var hour = Vue.extend({ template: '#clock-hour' }); var sec = Vue.extend({ template: '#clock-sec' }); var time = Vue.extend({ template: '#clock-time', props: ['myMessage'] }); Vue.component('clock', { template: '#clock-template', components: { 'min-component': min, 'hour-component': hour, 'sec-component': sec, 'time-component': time }, data () { return { time: "00:00:00" } }, ready () { this.startTime() }, methods: { startTime: function () { var min = this.$refs.min.$el, sec = this.$refs.sec.$el, hour = this.$refs.hour.$el; var now = new Date(), hValue = now.getHours(), mValue = now.getMinutes(), sValue = now.getSeconds(); setTimeout(this.startTime, 1000); // 数字时间 mValue = this.checkTime(mValue); sValue = this.checkTime(sValue); this.time = hValue + ":" + mValue + ":" + sValue; }, checkTime: function (i) { if (i < 10) {i = "0" + i}; return i; } } }); var parent = new Vue({ el: '#clock' })
{ return 'B'; }
conditional_block
analyzeMessageLogsRev3.py
import pandas as pd from messageLogs_functions import * from byteUtils import * from podStateAnalysis import * from messagePatternParsing import * from checkAction import * def analyzeMessageLogsRev3(thisPath, thisFile, outFile): # Rev3 uses the new checkAction code # this replaces code used by New (rev2) # deprecated: getPodSuccessfulActions # deprecated: basal_analysis code (assumed perfect message order) # This is time (sec) radio on Pod stays awake once comm is initiated radio_on_time = 30 filename = thisPath + '/' + thisFile # read the MessageLogs from the file commands, podDict = read_file(filename) # add quick and dirty fix for new Issue Reports (Aug 2019) tempRaw = commands[-1]['raw_value'] lastRaw = tempRaw.replace('\nstatus:','') commands[-1]['raw_value'] = lastRaw # add more stuff and return as a DataFrame df = generate_table(commands, radio_on_time) # set up a few reportable values here from df, time is in UTC first_command = df.iloc[0]['time'] last_command = df.iloc[-1]['time'] send_receive_commands = df.groupby(['type']).size() number_of_messages = len(df) thisPerson, thisFinish, thisAntenna = parse_info_from_filename(thisFile) thisFinish2 = 'Success' # default is 'Success' if thisFinish == 'WIP': thisFinish2 = 'WIP' # pod is still running lastDate = last_command.date() # Process df to generate the podState associated with every message # Updates to states occur with pod message (mostly 1d) status # (the state for extended_bolus_active is NOT included (always False)) # Includes values for requested bolus and TB # Note that .iloc for df and podState are identical podState, emptyMessageList, faultProcessedMsg = getPodState(df) # From the podState, extract some values to use in reports msgLogHrs = podState.iloc[-1]['timeCumSec']/3600 radioOnHrs = podState.iloc[-1]['radioOnCumSec']/3600 numberOfAssignID = len(podState[podState.message_type=='0x7']) numberOfSetUpPod = len(podState[podState.message_type=='0x3']) numberOfNonceResync = len(podState[podState.message_type=='06']) insulinDelivered = podState.iloc[-1]['insulinDelivered'] sourceString = 'from last 0x1d' # special handling if an 0x02 messages aka fault was received if len(faultProcessedMsg): hasFault = True thisFault = faultProcessedMsg['logged_fault'] checkInsulin = faultProcessedMsg['insulinDelivered'] rawFault = faultProcessedMsg['raw_value'] if checkInsulin >= insulinDelivered: insulinDelivered = checkInsulin sourceString = 'from 0x02 msg' else: hasFault = False rawFault = 'n/a' thisFault = thisFinish # checkAction returns actionFrame with indices and times for every action # completed actions and incomplete requests are separate columns # see also function getActionDict # actionFrame dataframe of processed analysis from podState (by action) # initIdx indices in podState to extract pod initilization actionFrame, initIdx = checkAction(podState) if outFile == 2: # print a few things then returns lot = podDict['lot'] tid = podDict['tid'] piv = podDict['piVersion'] print(f'{thisPerson},{thisAntenna},{thisFault},{first_command},{last_command},{msgLogHrs},{lot},{tid},{piv}') actionSummary = [] return df, podState, actionFrame, actionSummary if True: # print out summary information to command window # need this True to get the actionSummary used to fill csv file print('\n First command in Log :', first_command) print(' Last command in Log :', last_command) print(' Lot and TID :', podDict['lot'], podDict['tid']) print('__________________________________________\n') print(' Summary for {:s} with {:s} ending'.format(thisFile, thisFinish)) print(' Pod Lot: {:s}, PI: {:s}, PM: {:s}'.format(podDict['lot'], podDict['piVersion'], podDict['pmVersion'])) print(' Total elapsed time in log (hrs) : {:6.1f}'.format(msgLogHrs)) print(' Radio on estimate : {:6.1f}, {:5.1f}%'.format(radioOnHrs, 100*radioOnHrs/msgLogHrs)) print(' Number of messages : {:6d}'.format(number_of_messages)) print(' Number of nonce resyncs : {:6d}'.format(numberOfNonceResync)) print(' Insulin delivered (u) : {:6.2f} ({:s})'.format(insulinDelivered, sourceString)) if hasFault: thisFinish = thisFault thisFinish2 = 'Fault' if thisFault == '0x1C': print(' An 0x0202 message of {:s} reported - 80 hour time limit'.format(thisFault)) thisFinish2 = 'Success' elif thisFault == '0x18': print(' An 0x0202 message of {:s} reported - out of insulin'.format(thisFault)) thisFinish2 = 'Success' elif thisFault == '0x34': print(' An 0x0202 message of {:s} reported - this wipes out registers'.format(thisFault)) else: print(' An 0x0202 message of {:s} reported - details later'.format(thisFault)) print('\n Pod was initialized with {:d} messages, {:d} AssignID, {:d} SetUpPod required'.format(len(initIdx), \ numberOfAssignID, numberOfSetUpPod)) if emptyMessageList: print(' *** Detected {:d} empty message(s) during life of the pod'.format(len(emptyMessageList))) print(' *** indices:', emptyMessageList) # process the action frame (returns a dictionary plus total completed message count) actionSummary, totalCompletedMessages = processActionFrame(actionFrame, podState) printActionSummary(actionSummary) percentCompleted = 100*totalCompletedMessages/number_of_messages print(' #Messages in completed actions : {:5d} : {:.1f}%'.format( \ totalCompletedMessages, percentCompleted)) if hasFault: print('\nFault Details') printDict(faultProcessedMsg) # if an output filename is provided - write statistics to it (csv format) if outFile: # check if file exists isItThere = os.path.isfile(outFile) # now open the file stream_out = open(outFile,mode='at') # write the column headers if this is a new file if not isItThere: # set up a table format order headerString = 'Who, finish State, Finish2, lastMsg Date, podOn (hrs), radioOn (hrs), radioOn (%), ' + \ '#Messages, #Completed, % Completed, #Send, #Recv, ' + \ '#Nonce Resync, #TB, #Bolus, ' \ '#Basal, #Status Check, ' + \ '#Schedule Before TempBasal, #TB Spaced <30s, ' + \ '#Repeat TB Value, #Repeat TB <30s, ' + \ ' #RepTB 30s to 19min, #incomplete TB, ' + \ 'insulin Delivered, # Initialize Cmds, # AssignID (0x07), ' + \ '# SetUpPod (0x03), Pod Lot, PI Version, PM Version, ' + \ 'raw fault, filename' stream_out.write(headerString) stream_out.write('\n') # Extract items from actionSummary if actionSummary.get('TB'): subDict = actionSummary.get('TB') numberOfTB = subDict['countCompleted'] numberScheduleBeforeTempBasal = subDict['numSchBasalbeforeTB'] numberTBSepLessThan30sec = subDict['numShortTB'] numRepeatedTB = subDict['numRepeatedTB'] numRepeatedShortTB = subDict['numRepeatedShortTB'] numrepeated19MinTB = subDict['numrepeated19MinTB'] else: numberOfTB = 0 numberScheduleBeforeTempBasal = 0 numberTBSepLessThan30sec = 0 numRepeatedTB = 0 if actionSummary.get('Bolus'): subDict = actionSummary.get('Bolus')
if actionSummary.get('Basal'): subDict = actionSummary.get('Basal') numberOfBasal = subDict['countCompleted'] else: numberOfBasal = 0 if actionSummary.get('StatusCheck'): subDict = actionSummary.get('StatusCheck') numberOfStatusRequests = subDict['countCompleted'] else: numberOfStatusRequests = 0 if actionSummary.get('CancelTB'): subDict = actionSummary.get('CancelTB') numIncomplCancelTB = subDict['countIncomplete'] else: numIncomplCancelTB = 0 # write out the information for csv (don't want extra spaces for this ) stream_out.write(f'{thisPerson},{thisFinish},{thisFinish2},{lastDate},') stream_out.write('{:.1f},'.format(msgLogHrs)) stream_out.write('{:.2f},'.format(radioOnHrs)) stream_out.write('{:.2f},'.format(100*radioOnHrs/msgLogHrs)) stream_out.write('{:d},'.format(number_of_messages)) stream_out.write(f'{totalCompletedMessages},') stream_out.write('{:.2f},'.format(percentCompleted)) stream_out.write(f'{send_receive_commands[1]},{send_receive_commands[0]},') stream_out.write(f'{numberOfNonceResync},{numberOfTB},{numberOfBolus},{numberOfBasal},') stream_out.write(f'{numberOfStatusRequests},{numberScheduleBeforeTempBasal},') stream_out.write(f'{numberTBSepLessThan30sec},{numRepeatedTB},{numRepeatedShortTB},') stream_out.write(f'{numrepeated19MinTB},{numIncomplCancelTB},') stream_out.write('{:.2f},'.format(insulinDelivered)) stream_out.write('{:d}, {:d}, {:d},'.format(len(initIdx), numberOfAssignID, numberOfSetUpPod)) stream_out.write('{:s}, {:s}, {:s},'.format(podDict['lot'], podDict['piVersion'], podDict['pmVersion'])) stream_out.write(f'{rawFault},{thisFile}') stream_out.write('\n') stream_out.close() return df, podState, actionFrame, actionSummary
numberOfBolus = subDict['countCompleted'] else: numberOfBolus = 0
random_line_split
analyzeMessageLogsRev3.py
import pandas as pd from messageLogs_functions import * from byteUtils import * from podStateAnalysis import * from messagePatternParsing import * from checkAction import * def analyzeMessageLogsRev3(thisPath, thisFile, outFile): # Rev3 uses the new checkAction code # this replaces code used by New (rev2) # deprecated: getPodSuccessfulActions # deprecated: basal_analysis code (assumed perfect message order) # This is time (sec) radio on Pod stays awake once comm is initiated radio_on_time = 30 filename = thisPath + '/' + thisFile # read the MessageLogs from the file commands, podDict = read_file(filename) # add quick and dirty fix for new Issue Reports (Aug 2019) tempRaw = commands[-1]['raw_value'] lastRaw = tempRaw.replace('\nstatus:','') commands[-1]['raw_value'] = lastRaw # add more stuff and return as a DataFrame df = generate_table(commands, radio_on_time) # set up a few reportable values here from df, time is in UTC first_command = df.iloc[0]['time'] last_command = df.iloc[-1]['time'] send_receive_commands = df.groupby(['type']).size() number_of_messages = len(df) thisPerson, thisFinish, thisAntenna = parse_info_from_filename(thisFile) thisFinish2 = 'Success' # default is 'Success' if thisFinish == 'WIP': thisFinish2 = 'WIP' # pod is still running lastDate = last_command.date() # Process df to generate the podState associated with every message # Updates to states occur with pod message (mostly 1d) status # (the state for extended_bolus_active is NOT included (always False)) # Includes values for requested bolus and TB # Note that .iloc for df and podState are identical podState, emptyMessageList, faultProcessedMsg = getPodState(df) # From the podState, extract some values to use in reports msgLogHrs = podState.iloc[-1]['timeCumSec']/3600 radioOnHrs = podState.iloc[-1]['radioOnCumSec']/3600 numberOfAssignID = len(podState[podState.message_type=='0x7']) numberOfSetUpPod = len(podState[podState.message_type=='0x3']) numberOfNonceResync = len(podState[podState.message_type=='06']) insulinDelivered = podState.iloc[-1]['insulinDelivered'] sourceString = 'from last 0x1d' # special handling if an 0x02 messages aka fault was received if len(faultProcessedMsg): hasFault = True thisFault = faultProcessedMsg['logged_fault'] checkInsulin = faultProcessedMsg['insulinDelivered'] rawFault = faultProcessedMsg['raw_value'] if checkInsulin >= insulinDelivered: insulinDelivered = checkInsulin sourceString = 'from 0x02 msg' else: hasFault = False rawFault = 'n/a' thisFault = thisFinish # checkAction returns actionFrame with indices and times for every action # completed actions and incomplete requests are separate columns # see also function getActionDict # actionFrame dataframe of processed analysis from podState (by action) # initIdx indices in podState to extract pod initilization actionFrame, initIdx = checkAction(podState) if outFile == 2: # print a few things then returns lot = podDict['lot'] tid = podDict['tid'] piv = podDict['piVersion'] print(f'{thisPerson},{thisAntenna},{thisFault},{first_command},{last_command},{msgLogHrs},{lot},{tid},{piv}') actionSummary = [] return df, podState, actionFrame, actionSummary if True: # print out summary information to command window # need this True to get the actionSummary used to fill csv file print('\n First command in Log :', first_command) print(' Last command in Log :', last_command) print(' Lot and TID :', podDict['lot'], podDict['tid']) print('__________________________________________\n') print(' Summary for {:s} with {:s} ending'.format(thisFile, thisFinish)) print(' Pod Lot: {:s}, PI: {:s}, PM: {:s}'.format(podDict['lot'], podDict['piVersion'], podDict['pmVersion'])) print(' Total elapsed time in log (hrs) : {:6.1f}'.format(msgLogHrs)) print(' Radio on estimate : {:6.1f}, {:5.1f}%'.format(radioOnHrs, 100*radioOnHrs/msgLogHrs)) print(' Number of messages : {:6d}'.format(number_of_messages)) print(' Number of nonce resyncs : {:6d}'.format(numberOfNonceResync)) print(' Insulin delivered (u) : {:6.2f} ({:s})'.format(insulinDelivered, sourceString)) if hasFault: thisFinish = thisFault thisFinish2 = 'Fault' if thisFault == '0x1C': print(' An 0x0202 message of {:s} reported - 80 hour time limit'.format(thisFault)) thisFinish2 = 'Success' elif thisFault == '0x18': print(' An 0x0202 message of {:s} reported - out of insulin'.format(thisFault)) thisFinish2 = 'Success' elif thisFault == '0x34': print(' An 0x0202 message of {:s} reported - this wipes out registers'.format(thisFault)) else: print(' An 0x0202 message of {:s} reported - details later'.format(thisFault)) print('\n Pod was initialized with {:d} messages, {:d} AssignID, {:d} SetUpPod required'.format(len(initIdx), \ numberOfAssignID, numberOfSetUpPod)) if emptyMessageList: print(' *** Detected {:d} empty message(s) during life of the pod'.format(len(emptyMessageList))) print(' *** indices:', emptyMessageList) # process the action frame (returns a dictionary plus total completed message count) actionSummary, totalCompletedMessages = processActionFrame(actionFrame, podState) printActionSummary(actionSummary) percentCompleted = 100*totalCompletedMessages/number_of_messages print(' #Messages in completed actions : {:5d} : {:.1f}%'.format( \ totalCompletedMessages, percentCompleted)) if hasFault: print('\nFault Details') printDict(faultProcessedMsg) # if an output filename is provided - write statistics to it (csv format) if outFile: # check if file exists isItThere = os.path.isfile(outFile) # now open the file stream_out = open(outFile,mode='at') # write the column headers if this is a new file if not isItThere: # set up a table format order
# Extract items from actionSummary if actionSummary.get('TB'): subDict = actionSummary.get('TB') numberOfTB = subDict['countCompleted'] numberScheduleBeforeTempBasal = subDict['numSchBasalbeforeTB'] numberTBSepLessThan30sec = subDict['numShortTB'] numRepeatedTB = subDict['numRepeatedTB'] numRepeatedShortTB = subDict['numRepeatedShortTB'] numrepeated19MinTB = subDict['numrepeated19MinTB'] else: numberOfTB = 0 numberScheduleBeforeTempBasal = 0 numberTBSepLessThan30sec = 0 numRepeatedTB = 0 if actionSummary.get('Bolus'): subDict = actionSummary.get('Bolus') numberOfBolus = subDict['countCompleted'] else: numberOfBolus = 0 if actionSummary.get('Basal'): subDict = actionSummary.get('Basal') numberOfBasal = subDict['countCompleted'] else: numberOfBasal = 0 if actionSummary.get('StatusCheck'): subDict = actionSummary.get('StatusCheck') numberOfStatusRequests = subDict['countCompleted'] else: numberOfStatusRequests = 0 if actionSummary.get('CancelTB'): subDict = actionSummary.get('CancelTB') numIncomplCancelTB = subDict['countIncomplete'] else: numIncomplCancelTB = 0 # write out the information for csv (don't want extra spaces for this ) stream_out.write(f'{thisPerson},{thisFinish},{thisFinish2},{lastDate},') stream_out.write('{:.1f},'.format(msgLogHrs)) stream_out.write('{:.2f},'.format(radioOnHrs)) stream_out.write('{:.2f},'.format(100*radioOnHrs/msgLogHrs)) stream_out.write('{:d},'.format(number_of_messages)) stream_out.write(f'{totalCompletedMessages},') stream_out.write('{:.2f},'.format(percentCompleted)) stream_out.write(f'{send_receive_commands[1]},{send_receive_commands[0]},') stream_out.write(f'{numberOfNonceResync},{numberOfTB},{numberOfBolus},{numberOfBasal},') stream_out.write(f'{numberOfStatusRequests},{numberScheduleBeforeTempBasal},') stream_out.write(f'{numberTBSepLessThan30sec},{numRepeatedTB},{numRepeatedShortTB},') stream_out.write(f'{numrepeated19MinTB},{numIncomplCancelTB},') stream_out.write('{:.2f},'.format(insulinDelivered)) stream_out.write('{:d}, {:d}, {:d},'.format(len(initIdx), numberOfAssignID, numberOfSetUpPod)) stream_out.write('{:s}, {:s}, {:s},'.format(podDict['lot'], podDict['piVersion'], podDict['pmVersion'])) stream_out.write(f'{rawFault},{thisFile}') stream_out.write('\n') stream_out.close() return df, podState, actionFrame, actionSummary
headerString = 'Who, finish State, Finish2, lastMsg Date, podOn (hrs), radioOn (hrs), radioOn (%), ' + \ '#Messages, #Completed, % Completed, #Send, #Recv, ' + \ '#Nonce Resync, #TB, #Bolus, ' \ '#Basal, #Status Check, ' + \ '#Schedule Before TempBasal, #TB Spaced <30s, ' + \ '#Repeat TB Value, #Repeat TB <30s, ' + \ ' #RepTB 30s to 19min, #incomplete TB, ' + \ 'insulin Delivered, # Initialize Cmds, # AssignID (0x07), ' + \ '# SetUpPod (0x03), Pod Lot, PI Version, PM Version, ' + \ 'raw fault, filename' stream_out.write(headerString) stream_out.write('\n')
conditional_block
analyzeMessageLogsRev3.py
import pandas as pd from messageLogs_functions import * from byteUtils import * from podStateAnalysis import * from messagePatternParsing import * from checkAction import * def
(thisPath, thisFile, outFile): # Rev3 uses the new checkAction code # this replaces code used by New (rev2) # deprecated: getPodSuccessfulActions # deprecated: basal_analysis code (assumed perfect message order) # This is time (sec) radio on Pod stays awake once comm is initiated radio_on_time = 30 filename = thisPath + '/' + thisFile # read the MessageLogs from the file commands, podDict = read_file(filename) # add quick and dirty fix for new Issue Reports (Aug 2019) tempRaw = commands[-1]['raw_value'] lastRaw = tempRaw.replace('\nstatus:','') commands[-1]['raw_value'] = lastRaw # add more stuff and return as a DataFrame df = generate_table(commands, radio_on_time) # set up a few reportable values here from df, time is in UTC first_command = df.iloc[0]['time'] last_command = df.iloc[-1]['time'] send_receive_commands = df.groupby(['type']).size() number_of_messages = len(df) thisPerson, thisFinish, thisAntenna = parse_info_from_filename(thisFile) thisFinish2 = 'Success' # default is 'Success' if thisFinish == 'WIP': thisFinish2 = 'WIP' # pod is still running lastDate = last_command.date() # Process df to generate the podState associated with every message # Updates to states occur with pod message (mostly 1d) status # (the state for extended_bolus_active is NOT included (always False)) # Includes values for requested bolus and TB # Note that .iloc for df and podState are identical podState, emptyMessageList, faultProcessedMsg = getPodState(df) # From the podState, extract some values to use in reports msgLogHrs = podState.iloc[-1]['timeCumSec']/3600 radioOnHrs = podState.iloc[-1]['radioOnCumSec']/3600 numberOfAssignID = len(podState[podState.message_type=='0x7']) numberOfSetUpPod = len(podState[podState.message_type=='0x3']) numberOfNonceResync = len(podState[podState.message_type=='06']) insulinDelivered = podState.iloc[-1]['insulinDelivered'] sourceString = 'from last 0x1d' # special handling if an 0x02 messages aka fault was received if len(faultProcessedMsg): hasFault = True thisFault = faultProcessedMsg['logged_fault'] checkInsulin = faultProcessedMsg['insulinDelivered'] rawFault = faultProcessedMsg['raw_value'] if checkInsulin >= insulinDelivered: insulinDelivered = checkInsulin sourceString = 'from 0x02 msg' else: hasFault = False rawFault = 'n/a' thisFault = thisFinish # checkAction returns actionFrame with indices and times for every action # completed actions and incomplete requests are separate columns # see also function getActionDict # actionFrame dataframe of processed analysis from podState (by action) # initIdx indices in podState to extract pod initilization actionFrame, initIdx = checkAction(podState) if outFile == 2: # print a few things then returns lot = podDict['lot'] tid = podDict['tid'] piv = podDict['piVersion'] print(f'{thisPerson},{thisAntenna},{thisFault},{first_command},{last_command},{msgLogHrs},{lot},{tid},{piv}') actionSummary = [] return df, podState, actionFrame, actionSummary if True: # print out summary information to command window # need this True to get the actionSummary used to fill csv file print('\n First command in Log :', first_command) print(' Last command in Log :', last_command) print(' Lot and TID :', podDict['lot'], podDict['tid']) print('__________________________________________\n') print(' Summary for {:s} with {:s} ending'.format(thisFile, thisFinish)) print(' Pod Lot: {:s}, PI: {:s}, PM: {:s}'.format(podDict['lot'], podDict['piVersion'], podDict['pmVersion'])) print(' Total elapsed time in log (hrs) : {:6.1f}'.format(msgLogHrs)) print(' Radio on estimate : {:6.1f}, {:5.1f}%'.format(radioOnHrs, 100*radioOnHrs/msgLogHrs)) print(' Number of messages : {:6d}'.format(number_of_messages)) print(' Number of nonce resyncs : {:6d}'.format(numberOfNonceResync)) print(' Insulin delivered (u) : {:6.2f} ({:s})'.format(insulinDelivered, sourceString)) if hasFault: thisFinish = thisFault thisFinish2 = 'Fault' if thisFault == '0x1C': print(' An 0x0202 message of {:s} reported - 80 hour time limit'.format(thisFault)) thisFinish2 = 'Success' elif thisFault == '0x18': print(' An 0x0202 message of {:s} reported - out of insulin'.format(thisFault)) thisFinish2 = 'Success' elif thisFault == '0x34': print(' An 0x0202 message of {:s} reported - this wipes out registers'.format(thisFault)) else: print(' An 0x0202 message of {:s} reported - details later'.format(thisFault)) print('\n Pod was initialized with {:d} messages, {:d} AssignID, {:d} SetUpPod required'.format(len(initIdx), \ numberOfAssignID, numberOfSetUpPod)) if emptyMessageList: print(' *** Detected {:d} empty message(s) during life of the pod'.format(len(emptyMessageList))) print(' *** indices:', emptyMessageList) # process the action frame (returns a dictionary plus total completed message count) actionSummary, totalCompletedMessages = processActionFrame(actionFrame, podState) printActionSummary(actionSummary) percentCompleted = 100*totalCompletedMessages/number_of_messages print(' #Messages in completed actions : {:5d} : {:.1f}%'.format( \ totalCompletedMessages, percentCompleted)) if hasFault: print('\nFault Details') printDict(faultProcessedMsg) # if an output filename is provided - write statistics to it (csv format) if outFile: # check if file exists isItThere = os.path.isfile(outFile) # now open the file stream_out = open(outFile,mode='at') # write the column headers if this is a new file if not isItThere: # set up a table format order headerString = 'Who, finish State, Finish2, lastMsg Date, podOn (hrs), radioOn (hrs), radioOn (%), ' + \ '#Messages, #Completed, % Completed, #Send, #Recv, ' + \ '#Nonce Resync, #TB, #Bolus, ' \ '#Basal, #Status Check, ' + \ '#Schedule Before TempBasal, #TB Spaced <30s, ' + \ '#Repeat TB Value, #Repeat TB <30s, ' + \ ' #RepTB 30s to 19min, #incomplete TB, ' + \ 'insulin Delivered, # Initialize Cmds, # AssignID (0x07), ' + \ '# SetUpPod (0x03), Pod Lot, PI Version, PM Version, ' + \ 'raw fault, filename' stream_out.write(headerString) stream_out.write('\n') # Extract items from actionSummary if actionSummary.get('TB'): subDict = actionSummary.get('TB') numberOfTB = subDict['countCompleted'] numberScheduleBeforeTempBasal = subDict['numSchBasalbeforeTB'] numberTBSepLessThan30sec = subDict['numShortTB'] numRepeatedTB = subDict['numRepeatedTB'] numRepeatedShortTB = subDict['numRepeatedShortTB'] numrepeated19MinTB = subDict['numrepeated19MinTB'] else: numberOfTB = 0 numberScheduleBeforeTempBasal = 0 numberTBSepLessThan30sec = 0 numRepeatedTB = 0 if actionSummary.get('Bolus'): subDict = actionSummary.get('Bolus') numberOfBolus = subDict['countCompleted'] else: numberOfBolus = 0 if actionSummary.get('Basal'): subDict = actionSummary.get('Basal') numberOfBasal = subDict['countCompleted'] else: numberOfBasal = 0 if actionSummary.get('StatusCheck'): subDict = actionSummary.get('StatusCheck') numberOfStatusRequests = subDict['countCompleted'] else: numberOfStatusRequests = 0 if actionSummary.get('CancelTB'): subDict = actionSummary.get('CancelTB') numIncomplCancelTB = subDict['countIncomplete'] else: numIncomplCancelTB = 0 # write out the information for csv (don't want extra spaces for this ) stream_out.write(f'{thisPerson},{thisFinish},{thisFinish2},{lastDate},') stream_out.write('{:.1f},'.format(msgLogHrs)) stream_out.write('{:.2f},'.format(radioOnHrs)) stream_out.write('{:.2f},'.format(100*radioOnHrs/msgLogHrs)) stream_out.write('{:d},'.format(number_of_messages)) stream_out.write(f'{totalCompletedMessages},') stream_out.write('{:.2f},'.format(percentCompleted)) stream_out.write(f'{send_receive_commands[1]},{send_receive_commands[0]},') stream_out.write(f'{numberOfNonceResync},{numberOfTB},{numberOfBolus},{numberOfBasal},') stream_out.write(f'{numberOfStatusRequests},{numberScheduleBeforeTempBasal},') stream_out.write(f'{numberTBSepLessThan30sec},{numRepeatedTB},{numRepeatedShortTB},') stream_out.write(f'{numrepeated19MinTB},{numIncomplCancelTB},') stream_out.write('{:.2f},'.format(insulinDelivered)) stream_out.write('{:d}, {:d}, {:d},'.format(len(initIdx), numberOfAssignID, numberOfSetUpPod)) stream_out.write('{:s}, {:s}, {:s},'.format(podDict['lot'], podDict['piVersion'], podDict['pmVersion'])) stream_out.write(f'{rawFault},{thisFile}') stream_out.write('\n') stream_out.close() return df, podState, actionFrame, actionSummary
analyzeMessageLogsRev3
identifier_name
analyzeMessageLogsRev3.py
import pandas as pd from messageLogs_functions import * from byteUtils import * from podStateAnalysis import * from messagePatternParsing import * from checkAction import * def analyzeMessageLogsRev3(thisPath, thisFile, outFile): # Rev3 uses the new checkAction code # this replaces code used by New (rev2) # deprecated: getPodSuccessfulActions # deprecated: basal_analysis code (assumed perfect message order) # This is time (sec) radio on Pod stays awake once comm is initiated
radio_on_time = 30 filename = thisPath + '/' + thisFile # read the MessageLogs from the file commands, podDict = read_file(filename) # add quick and dirty fix for new Issue Reports (Aug 2019) tempRaw = commands[-1]['raw_value'] lastRaw = tempRaw.replace('\nstatus:','') commands[-1]['raw_value'] = lastRaw # add more stuff and return as a DataFrame df = generate_table(commands, radio_on_time) # set up a few reportable values here from df, time is in UTC first_command = df.iloc[0]['time'] last_command = df.iloc[-1]['time'] send_receive_commands = df.groupby(['type']).size() number_of_messages = len(df) thisPerson, thisFinish, thisAntenna = parse_info_from_filename(thisFile) thisFinish2 = 'Success' # default is 'Success' if thisFinish == 'WIP': thisFinish2 = 'WIP' # pod is still running lastDate = last_command.date() # Process df to generate the podState associated with every message # Updates to states occur with pod message (mostly 1d) status # (the state for extended_bolus_active is NOT included (always False)) # Includes values for requested bolus and TB # Note that .iloc for df and podState are identical podState, emptyMessageList, faultProcessedMsg = getPodState(df) # From the podState, extract some values to use in reports msgLogHrs = podState.iloc[-1]['timeCumSec']/3600 radioOnHrs = podState.iloc[-1]['radioOnCumSec']/3600 numberOfAssignID = len(podState[podState.message_type=='0x7']) numberOfSetUpPod = len(podState[podState.message_type=='0x3']) numberOfNonceResync = len(podState[podState.message_type=='06']) insulinDelivered = podState.iloc[-1]['insulinDelivered'] sourceString = 'from last 0x1d' # special handling if an 0x02 messages aka fault was received if len(faultProcessedMsg): hasFault = True thisFault = faultProcessedMsg['logged_fault'] checkInsulin = faultProcessedMsg['insulinDelivered'] rawFault = faultProcessedMsg['raw_value'] if checkInsulin >= insulinDelivered: insulinDelivered = checkInsulin sourceString = 'from 0x02 msg' else: hasFault = False rawFault = 'n/a' thisFault = thisFinish # checkAction returns actionFrame with indices and times for every action # completed actions and incomplete requests are separate columns # see also function getActionDict # actionFrame dataframe of processed analysis from podState (by action) # initIdx indices in podState to extract pod initilization actionFrame, initIdx = checkAction(podState) if outFile == 2: # print a few things then returns lot = podDict['lot'] tid = podDict['tid'] piv = podDict['piVersion'] print(f'{thisPerson},{thisAntenna},{thisFault},{first_command},{last_command},{msgLogHrs},{lot},{tid},{piv}') actionSummary = [] return df, podState, actionFrame, actionSummary if True: # print out summary information to command window # need this True to get the actionSummary used to fill csv file print('\n First command in Log :', first_command) print(' Last command in Log :', last_command) print(' Lot and TID :', podDict['lot'], podDict['tid']) print('__________________________________________\n') print(' Summary for {:s} with {:s} ending'.format(thisFile, thisFinish)) print(' Pod Lot: {:s}, PI: {:s}, PM: {:s}'.format(podDict['lot'], podDict['piVersion'], podDict['pmVersion'])) print(' Total elapsed time in log (hrs) : {:6.1f}'.format(msgLogHrs)) print(' Radio on estimate : {:6.1f}, {:5.1f}%'.format(radioOnHrs, 100*radioOnHrs/msgLogHrs)) print(' Number of messages : {:6d}'.format(number_of_messages)) print(' Number of nonce resyncs : {:6d}'.format(numberOfNonceResync)) print(' Insulin delivered (u) : {:6.2f} ({:s})'.format(insulinDelivered, sourceString)) if hasFault: thisFinish = thisFault thisFinish2 = 'Fault' if thisFault == '0x1C': print(' An 0x0202 message of {:s} reported - 80 hour time limit'.format(thisFault)) thisFinish2 = 'Success' elif thisFault == '0x18': print(' An 0x0202 message of {:s} reported - out of insulin'.format(thisFault)) thisFinish2 = 'Success' elif thisFault == '0x34': print(' An 0x0202 message of {:s} reported - this wipes out registers'.format(thisFault)) else: print(' An 0x0202 message of {:s} reported - details later'.format(thisFault)) print('\n Pod was initialized with {:d} messages, {:d} AssignID, {:d} SetUpPod required'.format(len(initIdx), \ numberOfAssignID, numberOfSetUpPod)) if emptyMessageList: print(' *** Detected {:d} empty message(s) during life of the pod'.format(len(emptyMessageList))) print(' *** indices:', emptyMessageList) # process the action frame (returns a dictionary plus total completed message count) actionSummary, totalCompletedMessages = processActionFrame(actionFrame, podState) printActionSummary(actionSummary) percentCompleted = 100*totalCompletedMessages/number_of_messages print(' #Messages in completed actions : {:5d} : {:.1f}%'.format( \ totalCompletedMessages, percentCompleted)) if hasFault: print('\nFault Details') printDict(faultProcessedMsg) # if an output filename is provided - write statistics to it (csv format) if outFile: # check if file exists isItThere = os.path.isfile(outFile) # now open the file stream_out = open(outFile,mode='at') # write the column headers if this is a new file if not isItThere: # set up a table format order headerString = 'Who, finish State, Finish2, lastMsg Date, podOn (hrs), radioOn (hrs), radioOn (%), ' + \ '#Messages, #Completed, % Completed, #Send, #Recv, ' + \ '#Nonce Resync, #TB, #Bolus, ' \ '#Basal, #Status Check, ' + \ '#Schedule Before TempBasal, #TB Spaced <30s, ' + \ '#Repeat TB Value, #Repeat TB <30s, ' + \ ' #RepTB 30s to 19min, #incomplete TB, ' + \ 'insulin Delivered, # Initialize Cmds, # AssignID (0x07), ' + \ '# SetUpPod (0x03), Pod Lot, PI Version, PM Version, ' + \ 'raw fault, filename' stream_out.write(headerString) stream_out.write('\n') # Extract items from actionSummary if actionSummary.get('TB'): subDict = actionSummary.get('TB') numberOfTB = subDict['countCompleted'] numberScheduleBeforeTempBasal = subDict['numSchBasalbeforeTB'] numberTBSepLessThan30sec = subDict['numShortTB'] numRepeatedTB = subDict['numRepeatedTB'] numRepeatedShortTB = subDict['numRepeatedShortTB'] numrepeated19MinTB = subDict['numrepeated19MinTB'] else: numberOfTB = 0 numberScheduleBeforeTempBasal = 0 numberTBSepLessThan30sec = 0 numRepeatedTB = 0 if actionSummary.get('Bolus'): subDict = actionSummary.get('Bolus') numberOfBolus = subDict['countCompleted'] else: numberOfBolus = 0 if actionSummary.get('Basal'): subDict = actionSummary.get('Basal') numberOfBasal = subDict['countCompleted'] else: numberOfBasal = 0 if actionSummary.get('StatusCheck'): subDict = actionSummary.get('StatusCheck') numberOfStatusRequests = subDict['countCompleted'] else: numberOfStatusRequests = 0 if actionSummary.get('CancelTB'): subDict = actionSummary.get('CancelTB') numIncomplCancelTB = subDict['countIncomplete'] else: numIncomplCancelTB = 0 # write out the information for csv (don't want extra spaces for this ) stream_out.write(f'{thisPerson},{thisFinish},{thisFinish2},{lastDate},') stream_out.write('{:.1f},'.format(msgLogHrs)) stream_out.write('{:.2f},'.format(radioOnHrs)) stream_out.write('{:.2f},'.format(100*radioOnHrs/msgLogHrs)) stream_out.write('{:d},'.format(number_of_messages)) stream_out.write(f'{totalCompletedMessages},') stream_out.write('{:.2f},'.format(percentCompleted)) stream_out.write(f'{send_receive_commands[1]},{send_receive_commands[0]},') stream_out.write(f'{numberOfNonceResync},{numberOfTB},{numberOfBolus},{numberOfBasal},') stream_out.write(f'{numberOfStatusRequests},{numberScheduleBeforeTempBasal},') stream_out.write(f'{numberTBSepLessThan30sec},{numRepeatedTB},{numRepeatedShortTB},') stream_out.write(f'{numrepeated19MinTB},{numIncomplCancelTB},') stream_out.write('{:.2f},'.format(insulinDelivered)) stream_out.write('{:d}, {:d}, {:d},'.format(len(initIdx), numberOfAssignID, numberOfSetUpPod)) stream_out.write('{:s}, {:s}, {:s},'.format(podDict['lot'], podDict['piVersion'], podDict['pmVersion'])) stream_out.write(f'{rawFault},{thisFile}') stream_out.write('\n') stream_out.close() return df, podState, actionFrame, actionSummary
identifier_body
cockpit.js
import "./cockpit.html" import "./cockpit_table.html" // import "../common/navbar-position.html" //import { ReactiveVar } from 'meteor/reactive-var'; let loguser; let pro_id; let _changeProject2; let _this_btn; let pagenum; let limit=10; let templ; //显隐提示 function prompt(val, shObj) { if
.Pages = new Meteor.Pagination("Project",{ // perPage: 2, // itemTemplate: "cockpit_table", // //templateName: 'Project', // //itemTemplate: 'cockpit' // //sort: { // // title: 1 // //}, // //filters: { // // count: { // // $gt: 10 // // } // //}, // //availableSettings: { // // perPage: true, // // sort: true // //} //}); //let that_pro; //Meteor.subscribe('cockpitTable'); //Meteor.subscribe('userTable'); Template.cockpit.onDestroyed(function () { _changeProject2.stop(); pro_id = null; //that_pro=null; }); Template.cockpit.rendered = function () { $('#data_1').datepicker({ todayBtn: "linked", keyboardNavigation: false, forceParse: false, calendarWeeks: true, autoclose: true }); $('#data_2').datepicker({ todayBtn: "linked", keyboardNavigation: false, forceParse: false, calendarWeeks: true, autoclose: true }); //初始化 //_changeProject2=Meteor.subscribe('project',loguser,function(){ // that_pro=Project.find({}).fetch(); //}); //日期 //$('.data_1 .input-group.date').datepicker({ // todayBtn: "linked", // keyboardNavigation: false, // forceParse: false, // calendarWeeks: true, // autoclose: true //}); //$('.M-box1').pagination({ // totalData:100, // showData:1, // coping:true, // callback:function(api){ // console.log(api.getCurrent()); // $('.now').text(api.getCurrent()); // } //}); //分页 //this.Pages = new Meteor.Pagination("Project",{ // perPage: 2, // itemTemplate: "cockpit_table", // templateName: 'Project', // //itemTemplate: 'cockpit' // //sort: { // // title: 1 // //}, // //filters: { // // count: { // // $gt: 10 // // } // //}, // //availableSettings: { // // perPage: true, // // sort: true // //} //}); }; Template.cockpit.onCreated(function () { /* * find() 返回值是一个游标。游标是一种从动数据源 *输出内容,可以对游标使用 fetch() 来把游标转换成数组 * */ // //var userPhone=$.cookie('user_phone'); //var userType=$.cookie('user_type'); //得到登录用户的id //var _loguserId=FlowRouter.getParam('_id'); //console.log(_loguserId); loguser = sessionStorage.getItem('loguser'); //Session.get('loguser2'); console.log(loguser); if (!loguser) { FlowRouter.go('/login'); } //console.log(loguser); templ=this; //订阅数据 $('#mengban').show(); _changeProject2 = this.subscribe('project', loguser, function () { $('#mengban').hide(); var totle = Project.find().count(); console.log(totle); $('.M-box1').pagination({ totalData:totle, showData:limit, coping:true, callback:function(api){ console.log(api.getCurrent()); pagenum=api.getCurrent(); console.log(pagenum); templ.nowpageData.set(api.getCurrent()); } }); }); //this.subscribe('cockpitTable_user',userPhone,userType); this.subscribe('allusers'); //this.subscribe('dictionaries'); //单条项目 var _data = Project.find({}).fetch(); //ReactiveDict本地变量 this.editorData = new ReactiveVar(_data); //当前页码 this.nowpageData = new ReactiveVar(); //Meteor.call('getnum',_data,function(){ // //}); //页码本地变量 //this.pages = new ReactiveVar(); }); Template.cockpit.helpers({ //项目集合 cockpitTable: function () { var page = Template.instance().nowpageData.get(); var bendiPT = Project.find({},{skip:(page-1)*limit,limit:limit}).fetch(); //Template.instance().searchData.set(that_pro); //var bendiPT=Template.instance().searchData.get(); for (var i = 0; i < bendiPT.length; i++) { bendiPT[i].ordinal = i + 1; //Meteor.call('proProgress',bendiPT[i]._id,function(error,res){ // bendiPT[i].progress=res['result']; //}); if (bendiPT[i].supervisionEngineer) { bendiPT[i].supervisionEngineer = Users.find({ '_id': bendiPT[i].supervisionEngineer }).fetch()[0].username; } if (bendiPT[i].backup == 0) { bendiPT[i].backup = '无'; } else { bendiPT[i].backup = Dictionaries.find({ "ecode": "backUp", 'value': bendiPT[i].backup }).fetch()[0].name; } if (bendiPT[i].weekly) { bendiPT[i].weekly ='开启'; }else{ bendiPT[i].weekly ='关闭'; } if (bendiPT[i].monthly) { bendiPT[i].monthly ='开启'; }else{ bendiPT[i].monthly ='关闭'; } } return bendiPT; }, //cockpitTable: function() { // //return CockpitTable.find(); // var bendiCT=Project.find().fetch(); // for(var i=0;i<bendiCT.length;i++){ // bendiCT[i].ordinal=i+1; // } // return bendiCT; //}, //单条项目集合 editorTable: function () { var _data = Template.instance().editorData.get(); return _data; }, //周报是否勾选 isweekly: function (a) { var pro=Project.find({_id:a}).fetch(); if(pro[0]){ if(pro[0].weekly==0) { return false; }else{ return true; } } }, //月报是否勾选 ismonthly: function (a) { var pro=Project.find({_id:a}).fetch(); if(pro[0]){ if(pro[0].monthly==0) { return false; }else{ return true; } } }, //用户表中监理工程师集合 userTableJLG: function () { return Users.find({ "type": 2, 'state': 1 }); }, //工程师选中判断 engSelect: function (a) { //console.log(a); var supervisionEngineer = Template.instance().editorData.get()[0].supervisionEngineer; var engname = Users.find({ '_id': supervisionEngineer }).fetch()[0].username; if (a == engname) { return true; } else { return false; } }, //字典表中备份方案集合 backUp: function () { return Dictionaries.find({ "ecode": "backUp" }); }, //备份方案选中方法 backUpSelect: function (a) { var backup = Template.instance().editorData.get()[0].backup; if (a == backup) { return true; } else { return false; } }, //验收判断 accState: function (a) { //0是已验收,1是未验收 if (a == 1) { return false; } else if (a == 0) { return true; } }, //是否显示操作判断 isHandle: function () { var loguser = sessionStorage.getItem('loguser'); //var _loguserId=FlowRouter.getParam('_id'); //var loguserType=Users.find({'_id':_loguserId}).fetch()[0].type; var user = Users.findOne({ '_id': loguser }); if (user) { var loguserType = user.type; //if(loguserType==1 || loguserType==0){ if (loguserType == 1 || loguserType == 0) { return true; } else { return false; } } return false; } }); Template.cockpit.onRendered(function () { //日期 //$('.data_1 .input-group.date').datepicker({ // todayBtn: "linked", // keyboardNavigation: false, // forceParse: false, // calendarWeeks: true, // autoclose: true //}); //$('.modal').appendTo("body"); //var proSum=CockpitTable.find().count(); //console.log(1111); }); Template.cockpit.events({ //'click .add': function(e) { // //alert(1); // e.preventDefault(); // var addPro=$('#myModalAddpro'); // //var proname=addpro.find('.modal-body').find('input:first-child').val(); // //const target = e.target; // //const text = target.text.value; // //console.log(text); // console.log(pro_id); // CockpitTable.insert({'number':10,'proName':'11'}); // //var proNumber=CockpitTable.find({}).sort({number:-1}).limit(1); // //CockpitTable.find({}, { sort: { number: -1 } }).limit(1); //}, //验收项目 "click .acc": function (e) { e.preventDefault(); pro_id = this._id; var state = this.state; if (state == 0) { state = 1; //Project.update({_id:pro_id},{ $set : { "state" : state,"accTime" : null} }); } else if (state == 1) { state = 0; //var timestamp = ((Date.parse(new Date()))/ 1000).toString(); //Project.update({_id:pro_id},{ $set : { "state" : state,"accTime" : timestamp} }); } $('#mengban').show(); Meteor.call('accPro', pro_id, state, function (error, res) { $('#mengban').hide(); }); //function base64_decode(base64str, file) { // // create buffer object from base64 encoded string, it is important to tell the constructor that the string is base64 encoded // var bitmap = new Buffer(base64str, 'base64'); // // write buffer to file // fs.writeFileSync(file, bitmap); // console.log('******** File created from base64 encoded string ********'); //} // //base64_decode('iVBORw0KGgoAAAANSUhEUgAAADQAAAAlCAYAAAAN8srVAAACTUlEQVR42u3Wv2sTcRiA8VPBxUKwEAxU3NxPIoFAl1bIkkmwYKAKRbqbRSWQCGJ+rMUibjo4FARBl0AgUIh/QXFxFIpKJHAQKA56r0/hDbyEK5VrDH2hBx+ud+Ga9+G+uSQQkVOv0+lMZNBFHoFRwABZb0F9CCITVdRjQd9b0CoOTNSGiRkidBWkljGGINb9CCECd0FqE7GJqkxeMxccK8UbJzppUPGIO5SfR9DCjINsTIR1RDbKXvAakuB9yqAsvuLaDIN6Jqag5/IaIxjYCxaxDzFGyKUMegdBb4ZBGfQmMUaIXeSmLyhDjHspl9wdiPHgJEGlUumf2UGml96HlJ+hRQwhRoSleQfZgfawlDJoB5KgO4OgDLrIT4UUMEA2xdNpro/t6aA+BJGJKuqxoJ9ikLmzQas4MFEbJmYIHz99GNRaxhiCWPcjhAjcBalNxCaqgsBrUPGIO5T3GGRjIqwjslHegnompqDn8hojGHgLyqA3iTFC7CLnLOh4Z0Gn3FnQf2O3ZrN5iZ9aVw81Go3zQfLmI4iIx/gBUXvtdnvNXZDGbEMI2Gf/BFsQPXffVRADr+jgn1hylwPdOL6Bn7w2brVaV9wEMfALBheGDu3QGvVQ79RtT0FvGDyu1WoXE4JWNKjiack916HXEoJecT7GLTdBLLXrDPwbEX+Xq9XqucPHNzFVzv3B93q9fsHbU+4uhAhh/wXfIMaWqyBdXjfxluE/63fQM/Yt8/je9hQ0vdnQpybqJRZcB2nUI4J+QVB2H6RRHzUoTPo/fwGr9gNcek8bXAAAAABJRU5ErkJggg==', 'copy.jpg'); //Meteor.call('imageUpload2'); }, //编辑项目 "click .editor": function (e) { e.preventDefault(); pro_id = this._id; _this_btn = $(e.target); $('#myModalEditorPro .proNameNone').hide(); $('#myModalEditorPro .beginDateNone').hide(); Template.instance().editorData.set(Project.find({ _id: pro_id }).fetch()); }, //编辑项目开始时间控件 "click #data_1":function(){ $('#data_1').datepicker({ todayBtn: "linked", keyboardNavigation: false, forceParse: false, calendarWeeks: true, autoclose: true }); }, //编辑项目到期时间控件 "click #data_2":function(){ $('#data_2').datepicker({ todayBtn: "linked", keyboardNavigation: false, forceParse: false, calendarWeeks: true, autoclose: true }); }, //周报复选框 "click .checkboxWeekly": function (e) { e.preventDefault(); var weekly_checked=e.target.checked; $('#mengban').show(); Meteor.call('changeWeekly',pro_id,weekly_checked,function(){ $('#mengban').hide(); }); }, //月报复选框 "click .checkboxMonthly": function (e) { e.preventDefault(); var monthly_checked=e.target.checked; $('#mengban').show(); Meteor.call('changeMonthly',pro_id,monthly_checked,function(){ $('#mengban').hide(); }); }, //确认编辑项目 'click .editorpro': function (e) { e.preventDefault(); console.log(pro_id); var editorPro = $('#myModalEditorPro'); var proName = $.trim(editorPro.find('.proName').val()); var beginDate = $.trim(editorPro.find('.beginDate').val()); var endDate = $.trim(editorPro.find('.endDate').val()); var enger = editorPro.find('.enger option:selected').val(); var phone = editorPro.find('.tel').html(); var backup = editorPro.find('.backup option:selected').val(); prompt(proName, $('#myModalEditorPro .proNameNone')); prompt(beginDate, $('#myModalEditorPro .beginDateNone')); prompt(endDate, $('#myModalEditorPro .endDateNone')); if (proName && beginDate && endDate) { $('#mengban').show(); Meteor.call('updateProCK', pro_id, proName, beginDate,endDate, enger, phone, backup, function (error, res) { $('#mengban').hide(); if (typeof error != 'undefined') { console.log(error); } else { if (res['success'] == true) { //$('body').removeClass('modal-open'); //$('body').css("paddingRight",'0'); //$('#myModalEditorPro').removeClass('in'); $('.modal-backdrop').remove(); $('.modal').hide(); _this_btn.click(); //alert(res['msg']); return; } else { alert(res['msg']); } } }); } ////得到选择的监理工程师在用户表中的电话 //var choosedJLG=Users.find({'userName':enger,'type':2}).fetch(); //var supTel=choosedJLG[0].phone; // //Project.update( // {_id:pro_id}, // { $set : { // "proName" : proName, // "beginDate" : beginDate, // "supervisionEngineer" : enger, // "supTel" : supTel, // "backup" : backup, // } // }); }, //监理工程师选择过程中改变下面的电话 'change .enger': function (e) { e.preventDefault(); var editorPro = $('#myModalEditorPro'); var enger = editorPro.find('.enger option:selected').val(); //得到选择的监理工程师在用户表中的电话 if (enger != '请选择') { var choosedJLG = Users.find({ 'username': enger, 'type': 2 }).fetch(); var supTel = choosedJLG[0].phone; } else { var supTel = ''; } editorPro.find('.tel').html(supTel); }, //点击日期input //'click .data_1': function() { // $('.data_1 .input-group.date').datepicker({ // todayBtn: "linked", // keyboardNavigation: false // //forceParse: false, // //calendarWeeks: true, // //autoclose: true // }); // //$('.data_1 .date').datepicker(); // //}, //点击项目列表的详情按钮,跳到对应的项目 'click .jumpPro': function (e) { e.preventDefault(); pro_id = this._id; sessionStorage.setItem('choosepro', pro_id); Session.set("choosepro", pro_id); FlowRouter.go('/projectOverview'); //FlowRouter.go('/projectBasic'); }, //搜索 //'click .toSearchPro': function(e) { //'change .searchPro': function(e) { // e.preventDefault(); // var searchHTML=$('.searchPro').val(); // var choosePro=$('.choosePro'); // var searchTd=choosePro.find('td:not(:last-child)'); // console.log(searchHTML); // console.log(searchTd); // // if(searchHTML){ // choosePro.css('display','none'); // for(var i=0;i<searchTd.length;i++){ // if($(searchTd[i]).html().indexOf(searchHTML)!=-1){ // $(searchTd[i]).parents('tr').css('display','table-row'); // } // } // }else{ // choosePro.css('display','table-row'); // } //}, 'click .toSearchPro': function (e) { e.preventDefault(); var searchHTML = $('.searchPro').val(); $('#mengban').show(); _changeProject2.stop(); _changeProject2 = Meteor.subscribe('project', loguser, searchHTML, function () { $('#mengban').hide(); var totle = Project.find().count(); console.log(totle); $('.M-box1').pagination({ totalData:totle, showData:limit, coping:true, callback:function(api){ console.log(api.getCurrent()); pagenum=api.getCurrent(); console.log(pagenum); templ.nowpageData.set(api.getCurrent()); } }); //_changeProject2.stop(); //_changeProject2=tem; }); }, 'keydown .searchPro': function (e) { if (e && e.keyCode == 13) { // enter 键 var searchHTML = $('.searchPro').val(); $('#mengban').show(); _changeProject2.stop(); _changeProject2 = Meteor.subscribe('project', loguser, searchHTML, function () { $('#mengban').hide(); var totle = Project.find().count(); console.log(totle); $('.M-box1').pagination({ totalData:totle, showData:limit, coping:true, callback:function(api){ console.log(api.getCurrent()); pagenum=api.getCurrent(); console.log(pagenum); templ.nowpageData.set(api.getCurrent()); } }); //_changeProject2.stop(); //_changeProject2=tem; }); } }, //'click.bbb .M-box1 a': function (e) { // console.log('翻页了'); // Template.instance().nowpageData.set(pagenum); //} });
(!val) { shObj.show(); } else { shObj.hide(); } } //this
identifier_body
cockpit.js
import "./cockpit.html" import "./cockpit_table.html" // import "../common/navbar-position.html" //import { ReactiveVar } from 'meteor/reactive-var'; let loguser; let pro_id; let _changeProject2; let _this_btn; let pagenum; let limit=10; let templ; //显隐提示 function prompt(val, shObj) { if (!val) { shObj.show(); } else { shObj.hide(); } } //this.Pages = new Meteor.Pagination("Project",{ // perPage: 2, // itemTemplate: "cockpit_table", // //templateName: 'Project', // //itemTemplate: 'cockpit' // //sort: { // // title: 1 // //}, // //filters: { // // count: { // // $gt: 10 // // } // //}, // //availableSettings: { // // perPage: true, // // sort: true // //} //}); //let that_pro; //Meteor.subscribe('cockpitTable'); //Meteor.subscribe('userTable'); Template.cockpit.onDestroyed(function () { _changeProject2.stop(); pro_id = null; //that_pro=null; }); Template.cockpit.rendered = function () { $('#data_1').datepicker({ todayBtn: "linked", keyboardNavigation: false, forceParse: false, calendarWeeks: true, autoclose: true }); $('#data_2').datepicker({ todayBtn: "linked", keyboardNavigation: false, forceParse: false, calendarWeeks: true, autoclose: true }); //初始化 //_changeProject2=Meteor.subscribe('project',loguser,function(){ // that_pro=Project.find({}).fetch(); //}); //日期 //$('.data_1 .input-group.date').datepicker({ // todayBtn: "linked", // keyboardNavigation: false, // forceParse: false, // calendarWeeks: true, // autoclose: true //}); //$('.M-box1').pagination({ // totalData:100, // showData:1, // coping:true, // callback:function(api){ // console.log(api.getCurrent()); // $('.now').text(api.getCurrent()); // } //}); //分页 //this.Pages = new Meteor.Pagination("Project",{ // perPage: 2, // itemTemplate: "cockpit_table", // templateName: 'Project', // //itemTemplate: 'cockpit' // //sort: { // // title: 1 // //}, // //filters: { // // count: { // // $gt: 10 // // } // //}, // //availableSettings: { // // perPage: true, // // sort: true // //} //}); }; Template.cockpit.onCreated(function () { /* * find() 返回值是一个游标。游标是一种从动数据源 *输出内容,可以对游标使用 fetch() 来把游标转换成数组 * */ // //var userPhone=$.cookie('user_phone'); //var userType=$.cookie('user_type'); //得到登录用户的id //var _loguserId=FlowRouter.getParam('_id'); //console.log(_loguserId); loguser = sessionStorage.getItem('loguser'); //Session.get('loguser2'); console.log(loguser); if (!loguser) { FlowRouter.go('/login'); } //console.log(loguser); templ=this; //订阅数据 $('#mengban').show(); _changeProject2 = this.subscribe('project', loguser, function () { $('#mengban').hide(); var totle = Project.find().count(); console.log(totle); $('.M-box1').pagination({ totalData:totle, showData:limit, coping:true, callback:function(api){ console.log(api.getCurrent()); pagenum=api.getCurrent(); console.log(pagenum); templ.nowpageData.set(api.getCurrent()); } }); }); //this.subscribe('cockpitTable_user',userPhone,userType); this.subscribe('allusers'); //this.subscribe('dictionaries'); //单条项目 var _data = Project.find({}).fetch(); //ReactiveDict本地变量 this.editorData = new ReactiveVar(_data); //当前页码 this.nowpageData = new ReactiveVar(); //Meteor.call('getnum',_data,function(){ // //}); //页码本地变量 //this.pages = new ReactiveVar(); }); Template.cockpit.helpers({ //项目集合 cockpitTable: function () { var page = Template.instance().nowpageData.get(); var bendiPT = Project.find({},{skip:(page-1)*limit,limit:limit}).fetch(); //Template.instance().searchData.set(that_pro); //var bendiPT=Template.instance().searchData.get(); for (var i = 0; i < bendiPT.length; i++) { bendiPT[i].ordinal = i + 1; //Meteor.call('proProgress',bendiPT[i]._id,function(error,res){ // bendiPT[i].progress=res['result']; //}); if (bendiPT[i].supervisionEngineer) { bendiPT[i].supervisionEngineer = Users.find({ '_id': bendiPT[i].supervisionEngineer }).fetch()[0].username; } if (bendiPT[i].backup == 0) { bendiPT[i].backup = '无'; } else { bendiPT[i].backup = Dictionaries.find({ "ecode": "backUp", 'value': bendiPT[i].backup }).fetch()[0].name; } if (bendiPT[i].weekly) { bendiPT[i].weekly ='开启'; }else{ bendiPT[i].weekly ='关闭'; } if (bendiPT[i].monthly) { bendiPT[i].monthly ='开启'; }else{ bendiPT[i].monthly ='关闭'; } } return bendiPT; }, //cockpitTable: function() { // //return CockpitTable.find(); // var bendiCT=Project.find().fetch(); // for(var i=0;i<bendiCT.length;i++){ // bendiCT[i].ordinal=i+1;
// return bendiCT; //}, //单条项目集合 editorTable: function () { var _data = Template.instance().editorData.get(); return _data; }, //周报是否勾选 isweekly: function (a) { var pro=Project.find({_id:a}).fetch(); if(pro[0]){ if(pro[0].weekly==0) { return false; }else{ return true; } } }, //月报是否勾选 ismonthly: function (a) { var pro=Project.find({_id:a}).fetch(); if(pro[0]){ if(pro[0].monthly==0) { return false; }else{ return true; } } }, //用户表中监理工程师集合 userTableJLG: function () { return Users.find({ "type": 2, 'state': 1 }); }, //工程师选中判断 engSelect: function (a) { //console.log(a); var supervisionEngineer = Template.instance().editorData.get()[0].supervisionEngineer; var engname = Users.find({ '_id': supervisionEngineer }).fetch()[0].username; if (a == engname) { return true; } else { return false; } }, //字典表中备份方案集合 backUp: function () { return Dictionaries.find({ "ecode": "backUp" }); }, //备份方案选中方法 backUpSelect: function (a) { var backup = Template.instance().editorData.get()[0].backup; if (a == backup) { return true; } else { return false; } }, //验收判断 accState: function (a) { //0是已验收,1是未验收 if (a == 1) { return false; } else if (a == 0) { return true; } }, //是否显示操作判断 isHandle: function () { var loguser = sessionStorage.getItem('loguser'); //var _loguserId=FlowRouter.getParam('_id'); //var loguserType=Users.find({'_id':_loguserId}).fetch()[0].type; var user = Users.findOne({ '_id': loguser }); if (user) { var loguserType = user.type; //if(loguserType==1 || loguserType==0){ if (loguserType == 1 || loguserType == 0) { return true; } else { return false; } } return false; } }); Template.cockpit.onRendered(function () { //日期 //$('.data_1 .input-group.date').datepicker({ // todayBtn: "linked", // keyboardNavigation: false, // forceParse: false, // calendarWeeks: true, // autoclose: true //}); //$('.modal').appendTo("body"); //var proSum=CockpitTable.find().count(); //console.log(1111); }); Template.cockpit.events({ //'click .add': function(e) { // //alert(1); // e.preventDefault(); // var addPro=$('#myModalAddpro'); // //var proname=addpro.find('.modal-body').find('input:first-child').val(); // //const target = e.target; // //const text = target.text.value; // //console.log(text); // console.log(pro_id); // CockpitTable.insert({'number':10,'proName':'11'}); // //var proNumber=CockpitTable.find({}).sort({number:-1}).limit(1); // //CockpitTable.find({}, { sort: { number: -1 } }).limit(1); //}, //验收项目 "click .acc": function (e) { e.preventDefault(); pro_id = this._id; var state = this.state; if (state == 0) { state = 1; //Project.update({_id:pro_id},{ $set : { "state" : state,"accTime" : null} }); } else if (state == 1) { state = 0; //var timestamp = ((Date.parse(new Date()))/ 1000).toString(); //Project.update({_id:pro_id},{ $set : { "state" : state,"accTime" : timestamp} }); } $('#mengban').show(); Meteor.call('accPro', pro_id, state, function (error, res) { $('#mengban').hide(); }); //function base64_decode(base64str, file) { // // create buffer object from base64 encoded string, it is important to tell the constructor that the string is base64 encoded // var bitmap = new Buffer(base64str, 'base64'); // // write buffer to file // fs.writeFileSync(file, bitmap); // console.log('******** File created from base64 encoded string ********'); //} // //base64_decode('iVBORw0KGgoAAAANSUhEUgAAADQAAAAlCAYAAAAN8srVAAACTUlEQVR42u3Wv2sTcRiA8VPBxUKwEAxU3NxPIoFAl1bIkkmwYKAKRbqbRSWQCGJ+rMUibjo4FARBl0AgUIh/QXFxFIpKJHAQKA56r0/hDbyEK5VrDH2hBx+ud+Ga9+G+uSQQkVOv0+lMZNBFHoFRwABZb0F9CCITVdRjQd9b0CoOTNSGiRkidBWkljGGINb9CCECd0FqE7GJqkxeMxccK8UbJzppUPGIO5SfR9DCjINsTIR1RDbKXvAakuB9yqAsvuLaDIN6Jqag5/IaIxjYCxaxDzFGyKUMegdBb4ZBGfQmMUaIXeSmLyhDjHspl9wdiPHgJEGlUumf2UGml96HlJ+hRQwhRoSleQfZgfawlDJoB5KgO4OgDLrIT4UUMEA2xdNpro/t6aA+BJGJKuqxoJ9ikLmzQas4MFEbJmYIHz99GNRaxhiCWPcjhAjcBalNxCaqgsBrUPGIO5T3GGRjIqwjslHegnompqDn8hojGHgLyqA3iTFC7CLnLOh4Z0Gn3FnQf2O3ZrN5iZ9aVw81Go3zQfLmI4iIx/gBUXvtdnvNXZDGbEMI2Gf/BFsQPXffVRADr+jgn1hylwPdOL6Bn7w2brVaV9wEMfALBheGDu3QGvVQ79RtT0FvGDyu1WoXE4JWNKjiack916HXEoJecT7GLTdBLLXrDPwbEX+Xq9XqucPHNzFVzv3B93q9fsHbU+4uhAhh/wXfIMaWqyBdXjfxluE/63fQM/Yt8/je9hQ0vdnQpybqJRZcB2nUI4J+QVB2H6RRHzUoTPo/fwGr9gNcek8bXAAAAABJRU5ErkJggg==', 'copy.jpg'); //Meteor.call('imageUpload2'); }, //编辑项目 "click .editor": function (e) { e.preventDefault(); pro_id = this._id; _this_btn = $(e.target); $('#myModalEditorPro .proNameNone').hide(); $('#myModalEditorPro .beginDateNone').hide(); Template.instance().editorData.set(Project.find({ _id: pro_id }).fetch()); }, //编辑项目开始时间控件 "click #data_1":function(){ $('#data_1').datepicker({ todayBtn: "linked", keyboardNavigation: false, forceParse: false, calendarWeeks: true, autoclose: true }); }, //编辑项目到期时间控件 "click #data_2":function(){ $('#data_2').datepicker({ todayBtn: "linked", keyboardNavigation: false, forceParse: false, calendarWeeks: true, autoclose: true }); }, //周报复选框 "click .checkboxWeekly": function (e) { e.preventDefault(); var weekly_checked=e.target.checked; $('#mengban').show(); Meteor.call('changeWeekly',pro_id,weekly_checked,function(){ $('#mengban').hide(); }); }, //月报复选框 "click .checkboxMonthly": function (e) { e.preventDefault(); var monthly_checked=e.target.checked; $('#mengban').show(); Meteor.call('changeMonthly',pro_id,monthly_checked,function(){ $('#mengban').hide(); }); }, //确认编辑项目 'click .editorpro': function (e) { e.preventDefault(); console.log(pro_id); var editorPro = $('#myModalEditorPro'); var proName = $.trim(editorPro.find('.proName').val()); var beginDate = $.trim(editorPro.find('.beginDate').val()); var endDate = $.trim(editorPro.find('.endDate').val()); var enger = editorPro.find('.enger option:selected').val(); var phone = editorPro.find('.tel').html(); var backup = editorPro.find('.backup option:selected').val(); prompt(proName, $('#myModalEditorPro .proNameNone')); prompt(beginDate, $('#myModalEditorPro .beginDateNone')); prompt(endDate, $('#myModalEditorPro .endDateNone')); if (proName && beginDate && endDate) { $('#mengban').show(); Meteor.call('updateProCK', pro_id, proName, beginDate,endDate, enger, phone, backup, function (error, res) { $('#mengban').hide(); if (typeof error != 'undefined') { console.log(error); } else { if (res['success'] == true) { //$('body').removeClass('modal-open'); //$('body').css("paddingRight",'0'); //$('#myModalEditorPro').removeClass('in'); $('.modal-backdrop').remove(); $('.modal').hide(); _this_btn.click(); //alert(res['msg']); return; } else { alert(res['msg']); } } }); } ////得到选择的监理工程师在用户表中的电话 //var choosedJLG=Users.find({'userName':enger,'type':2}).fetch(); //var supTel=choosedJLG[0].phone; // //Project.update( // {_id:pro_id}, // { $set : { // "proName" : proName, // "beginDate" : beginDate, // "supervisionEngineer" : enger, // "supTel" : supTel, // "backup" : backup, // } // }); }, //监理工程师选择过程中改变下面的电话 'change .enger': function (e) { e.preventDefault(); var editorPro = $('#myModalEditorPro'); var enger = editorPro.find('.enger option:selected').val(); //得到选择的监理工程师在用户表中的电话 if (enger != '请选择') { var choosedJLG = Users.find({ 'username': enger, 'type': 2 }).fetch(); var supTel = choosedJLG[0].phone; } else { var supTel = ''; } editorPro.find('.tel').html(supTel); }, //点击日期input //'click .data_1': function() { // $('.data_1 .input-group.date').datepicker({ // todayBtn: "linked", // keyboardNavigation: false // //forceParse: false, // //calendarWeeks: true, // //autoclose: true // }); // //$('.data_1 .date').datepicker(); // //}, //点击项目列表的详情按钮,跳到对应的项目 'click .jumpPro': function (e) { e.preventDefault(); pro_id = this._id; sessionStorage.setItem('choosepro', pro_id); Session.set("choosepro", pro_id); FlowRouter.go('/projectOverview'); //FlowRouter.go('/projectBasic'); }, //搜索 //'click .toSearchPro': function(e) { //'change .searchPro': function(e) { // e.preventDefault(); // var searchHTML=$('.searchPro').val(); // var choosePro=$('.choosePro'); // var searchTd=choosePro.find('td:not(:last-child)'); // console.log(searchHTML); // console.log(searchTd); // // if(searchHTML){ // choosePro.css('display','none'); // for(var i=0;i<searchTd.length;i++){ // if($(searchTd[i]).html().indexOf(searchHTML)!=-1){ // $(searchTd[i]).parents('tr').css('display','table-row'); // } // } // }else{ // choosePro.css('display','table-row'); // } //}, 'click .toSearchPro': function (e) { e.preventDefault(); var searchHTML = $('.searchPro').val(); $('#mengban').show(); _changeProject2.stop(); _changeProject2 = Meteor.subscribe('project', loguser, searchHTML, function () { $('#mengban').hide(); var totle = Project.find().count(); console.log(totle); $('.M-box1').pagination({ totalData:totle, showData:limit, coping:true, callback:function(api){ console.log(api.getCurrent()); pagenum=api.getCurrent(); console.log(pagenum); templ.nowpageData.set(api.getCurrent()); } }); //_changeProject2.stop(); //_changeProject2=tem; }); }, 'keydown .searchPro': function (e) { if (e && e.keyCode == 13) { // enter 键 var searchHTML = $('.searchPro').val(); $('#mengban').show(); _changeProject2.stop(); _changeProject2 = Meteor.subscribe('project', loguser, searchHTML, function () { $('#mengban').hide(); var totle = Project.find().count(); console.log(totle); $('.M-box1').pagination({ totalData:totle, showData:limit, coping:true, callback:function(api){ console.log(api.getCurrent()); pagenum=api.getCurrent(); console.log(pagenum); templ.nowpageData.set(api.getCurrent()); } }); //_changeProject2.stop(); //_changeProject2=tem; }); } }, //'click.bbb .M-box1 a': function (e) { // console.log('翻页了'); // Template.instance().nowpageData.set(pagenum); //} });
// }
random_line_split
cockpit.js
import "./cockpit.html" import "./cockpit_table.html" // import "../common/navbar-position.html" //import { ReactiveVar } from 'meteor/reactive-var'; let loguser; let pro_id; let _changeProject2; let _this_btn; let pagenum; let limit=10; let templ; //显隐提示 function prompt(val, shObj) { if (!val) { shObj.show(); } else { shObj.hide(); } } //this.Pages = new Meteor.Pagination("Project",{ // perPage: 2, // itemTemplate: "cockpit_table", // //templateName: 'Project', // //itemTemplate: 'cockpit' // //sort: { // // title: 1 // //}, // //filters: { // // count: { // // $gt: 10 // // } // //}, // //availableSettings: { // // perPage: true, // // sort: true // //} //}); //let that_pro; //Meteor.subscribe('cockpitTable'); //Meteor.subscribe('userTable'); Template.cockpit.onDestroyed(function () { _changeProject2.stop(); pro_id = null; //that_pro=null; }); Template.cockpit.rendered = function () { $('#data_1').datepicker({ todayBtn: "linked", keyboardNavigation: false, forceParse: false, calendarWeeks: true, autoclose: true }); $('#data_2').datepicker({ todayBtn: "linked", keyboardNavigation: false, forceParse: false, calendarWeeks: true, autoclose: true }); //初始化 //_changeProject2=Meteor.subscribe('project',loguser,function(){ // that_pro=Project.find({}).fetch(); //}); //日期 //$('.data_1 .input-group.date').datepicker({ // todayBtn: "linked", // keyboardNavigation: false, // forceParse: false, // calendarWeeks: true, // autoclose: true //}); //$('.M-box1').pagination({ // totalData:100, // showData:1, // coping:true, // callback:function(api){ // console.log(api.getCurrent()); // $('.now').text(api.getCurrent()); // } //}); //分页 //this.Pages = new Meteor.Pagination("Project",{ // perPage: 2, // itemTemplate: "cockpit_table", // templateName: 'Project', // //itemTemplate: 'cockpit' // //sort: { // // title: 1 // //}, // //filters: { // // count: { // // $gt: 10 // // } // //}, // //availableSettings: { // // perPage: true, // // sort: true // //} //}); }; Template.cockpit.onCreated(function () { /* * find() 返回值是一个游标。游标是一种从动数据源 *输出内容,可以对游标使用 fetch() 来把游标转换成数组 * */ // //var userPhone=$.cookie('user_phone'); //var userType=$.cookie('user_type'); //得到登录用户的id //var _loguserId=FlowRouter.getParam('_id'); //console.log(_loguserId); loguser = sessionStorage.getItem('loguser'); //Session.get('loguser2'); console.log(loguser); if (!loguser) { FlowRouter.go('/login'); } //console.log(loguser); templ=this; //订阅数据 $('#mengban').show(); _changeProject2 = this.subscribe('project', loguser, function () { $('#mengban').hide(); var totle = Project.find().count(); console.log(totle); $('.M-box1').pagination({ totalData:totle, showData:limit, coping:true, callback:function(api){ console.log(api.getCurrent()); pagenum=api.getCurrent(); console.log(pagenum); templ.nowpageData.set(api.getCurrent()); } }); }); //this.subscribe('cockpitTable_user',userPhone,userType); this.subscribe('allusers'); //this.subscribe('dictionaries'); //单条项目 var _data = Project.find({}).fetch(); //ReactiveDict本地变量 this.editorData = new ReactiveVar(_data); //当前页码 this.nowpageData = new ReactiveVar(); //Meteor.call('getnum',_data,function(){ // //}); //页码本地变量 //this.pages = new ReactiveVar(); }); Template.cockpit.helpers({ //项目集合 cockpitTable: function () { var page = Template.instance().nowpageData.get(); var bendiPT = Project.find({},{skip:(page-1)*limit,limit:limit}).fetch(); //Template.instance().searchData.set(that_pro); //var bendiPT=Template.instance().searchData.get(); for (var i = 0; i < bendiPT.length; i++) { bendiPT[i].ordinal = i + 1; //Meteor.call('proProgress',bendiPT[i]._id,function(error,res){ // bendiPT[i].progress=res['result']; //}); if (bendiPT[i].supervisionEngineer) { bendiPT[i].supervisionEngineer = Users.find({ '_id': bendiPT[i].supervisionEngineer }).fetch()[0].username; } if (bendiPT[i].backup == 0) { bendiPT[i].backup = '无'; } else { bendiPT[i].backup = Dictionaries.find({ "ecode": "backUp", 'value': bendiPT[i].backup }).fetch()[0].name; } if (bendiPT[i].weekly) { bendiPT[i].weekly ='开启'; }else{ bendiPT[i].weekly ='关闭'; } if (bendiPT[i].monthly) { bendiPT[i].monthly ='开启'; }else{ bendiPT[i].monthly ='关闭'; } } return bendiPT; }, //cockpitTable: function() { // //return CockpitTable.find(); // var bendiCT=Project.find().fetch(); // for(var i=0;i<bendiCT.length;i++){ // bendiCT[i].ordinal=i+1; // } // return bendiCT; //}, //单条项目集合 editorTable: function () { var _data = Template.instance().editorData.get(); return _data; }, //周报是否勾选 isweekly: function (a) { var pro=Project.find({_id:a}).fetch(); if(pro[0]){ if(pro[0].weekly==0) { return false; }else{ return true; } } }, //月报是否勾选 ismonthly: function (a) { var pro=Project.find({_id:a}).fetch(); if(pro[0]){ if(pro[0].monthly==0) { return false; }else{ return true; } } }, //用户表中监理工程师集合 userTableJLG: function () { return Users.find({ "type": 2, 'state': 1 }); }, //工程师选中判断 engSelect: function (a) { //console.log(a); var supervisionEngineer = Template.instance().editorData.get()[0].supervisionEngineer; var engname = Users.find({ '_id': supervisionEngineer }).fetch()[0].username; if (a == engname) { return true; } else { return false; } }, //字典表中备份方案集合 backUp: function () { return Dictionaries.find({ "ecode": "backUp" }); }, //备份方案选中方法 backUpSelect: function (a) { var backup = Template.instance().editorData.get()[0].backup; if (a == backup) { return true; } else { return false; } }, //验收判断 accState: function (a) { //0是已验收,1是未验收 if (a == 1) { return false; } else if (a == 0) { return true; } }, //是否显示操作判断 isHandle: function () { var loguser = sessionStorage.getItem('loguser'); //var _loguserId=FlowRouter.getParam('_id'); //var loguserType=Users.find({'_id':_loguserId}).fetch()[0].type; var user = Users.findOne({ '_id': loguser }); if (user) { var loguserType = user.type; //if(loguserType==1 || loguserType==0){ if (loguserType == 1 || loguserType == 0) { return true; } else { return false; } } return false; } }); Template.cockpit.onRendered(function () { //日期 //$('.data_1 .input-group.date').datepicker({ // todayBtn: "linked", // keyboardNavigation: false, // forceParse: false, // calendarWeeks: true, // autoclose: true //}); //$('.modal').appendTo("body"); //var proSum=CockpitTable.find().count(); //console.log(1111); }); Template.cockpit.events({ //'click .add': function(e) { // //alert(1); // e.preventDefault(); // var addPro=$('#myModalAddpro'); // //var proname=addpro.find('.modal-body').find('input:first-child').val(); // //const target = e.target; // //const text = target.text.value; // //console.log(text); // console.log(pro_id); // CockpitTable.insert({'number':10,'proName':'11'}); // //var proNumber=CockpitTable.find({}).sort({number:-1}).limit(1); // //CockpitTable.find({}, { sort: { number: -1 } }).limit(1); //}, //验收项目 "click .acc": function (e) { e.preventDefault(); pro_id = this._id; var state = this.state; if (state == 0) { state = 1; //Project.update({_id:pro_id},{ $set : { "state" : state,"accTime" : null} }); } else if (state == 1) { state = 0; //var timestamp = ((Date.parse(new Date()))/ 1000).toString(); //Project.update({_id:pro_id},{ $set : { "state" : state,"accTime" : timestamp} });
an').hide(); }); //function base64_decode(base64str, file) { // // create buffer object from base64 encoded string, it is important to tell the constructor that the string is base64 encoded // var bitmap = new Buffer(base64str, 'base64'); // // write buffer to file // fs.writeFileSync(file, bitmap); // console.log('******** File created from base64 encoded string ********'); //} // //base64_decode('iVBORw0KGgoAAAANSUhEUgAAADQAAAAlCAYAAAAN8srVAAACTUlEQVR42u3Wv2sTcRiA8VPBxUKwEAxU3NxPIoFAl1bIkkmwYKAKRbqbRSWQCGJ+rMUibjo4FARBl0AgUIh/QXFxFIpKJHAQKA56r0/hDbyEK5VrDH2hBx+ud+Ga9+G+uSQQkVOv0+lMZNBFHoFRwABZb0F9CCITVdRjQd9b0CoOTNSGiRkidBWkljGGINb9CCECd0FqE7GJqkxeMxccK8UbJzppUPGIO5SfR9DCjINsTIR1RDbKXvAakuB9yqAsvuLaDIN6Jqag5/IaIxjYCxaxDzFGyKUMegdBb4ZBGfQmMUaIXeSmLyhDjHspl9wdiPHgJEGlUumf2UGml96HlJ+hRQwhRoSleQfZgfawlDJoB5KgO4OgDLrIT4UUMEA2xdNpro/t6aA+BJGJKuqxoJ9ikLmzQas4MFEbJmYIHz99GNRaxhiCWPcjhAjcBalNxCaqgsBrUPGIO5T3GGRjIqwjslHegnompqDn8hojGHgLyqA3iTFC7CLnLOh4Z0Gn3FnQf2O3ZrN5iZ9aVw81Go3zQfLmI4iIx/gBUXvtdnvNXZDGbEMI2Gf/BFsQPXffVRADr+jgn1hylwPdOL6Bn7w2brVaV9wEMfALBheGDu3QGvVQ79RtT0FvGDyu1WoXE4JWNKjiack916HXEoJecT7GLTdBLLXrDPwbEX+Xq9XqucPHNzFVzv3B93q9fsHbU+4uhAhh/wXfIMaWqyBdXjfxluE/63fQM/Yt8/je9hQ0vdnQpybqJRZcB2nUI4J+QVB2H6RRHzUoTPo/fwGr9gNcek8bXAAAAABJRU5ErkJggg==', 'copy.jpg'); //Meteor.call('imageUpload2'); }, //编辑项目 "click .editor": function (e) { e.preventDefault(); pro_id = this._id; _this_btn = $(e.target); $('#myModalEditorPro .proNameNone').hide(); $('#myModalEditorPro .beginDateNone').hide(); Template.instance().editorData.set(Project.find({ _id: pro_id }).fetch()); }, //编辑项目开始时间控件 "click #data_1":function(){ $('#data_1').datepicker({ todayBtn: "linked", keyboardNavigation: false, forceParse: false, calendarWeeks: true, autoclose: true }); }, //编辑项目到期时间控件 "click #data_2":function(){ $('#data_2').datepicker({ todayBtn: "linked", keyboardNavigation: false, forceParse: false, calendarWeeks: true, autoclose: true }); }, //周报复选框 "click .checkboxWeekly": function (e) { e.preventDefault(); var weekly_checked=e.target.checked; $('#mengban').show(); Meteor.call('changeWeekly',pro_id,weekly_checked,function(){ $('#mengban').hide(); }); }, //月报复选框 "click .checkboxMonthly": function (e) { e.preventDefault(); var monthly_checked=e.target.checked; $('#mengban').show(); Meteor.call('changeMonthly',pro_id,monthly_checked,function(){ $('#mengban').hide(); }); }, //确认编辑项目 'click .editorpro': function (e) { e.preventDefault(); console.log(pro_id); var editorPro = $('#myModalEditorPro'); var proName = $.trim(editorPro.find('.proName').val()); var beginDate = $.trim(editorPro.find('.beginDate').val()); var endDate = $.trim(editorPro.find('.endDate').val()); var enger = editorPro.find('.enger option:selected').val(); var phone = editorPro.find('.tel').html(); var backup = editorPro.find('.backup option:selected').val(); prompt(proName, $('#myModalEditorPro .proNameNone')); prompt(beginDate, $('#myModalEditorPro .beginDateNone')); prompt(endDate, $('#myModalEditorPro .endDateNone')); if (proName && beginDate && endDate) { $('#mengban').show(); Meteor.call('updateProCK', pro_id, proName, beginDate,endDate, enger, phone, backup, function (error, res) { $('#mengban').hide(); if (typeof error != 'undefined') { console.log(error); } else { if (res['success'] == true) { //$('body').removeClass('modal-open'); //$('body').css("paddingRight",'0'); //$('#myModalEditorPro').removeClass('in'); $('.modal-backdrop').remove(); $('.modal').hide(); _this_btn.click(); //alert(res['msg']); return; } else { alert(res['msg']); } } }); } ////得到选择的监理工程师在用户表中的电话 //var choosedJLG=Users.find({'userName':enger,'type':2}).fetch(); //var supTel=choosedJLG[0].phone; // //Project.update( // {_id:pro_id}, // { $set : { // "proName" : proName, // "beginDate" : beginDate, // "supervisionEngineer" : enger, // "supTel" : supTel, // "backup" : backup, // } // }); }, //监理工程师选择过程中改变下面的电话 'change .enger': function (e) { e.preventDefault(); var editorPro = $('#myModalEditorPro'); var enger = editorPro.find('.enger option:selected').val(); //得到选择的监理工程师在用户表中的电话 if (enger != '请选择') { var choosedJLG = Users.find({ 'username': enger, 'type': 2 }).fetch(); var supTel = choosedJLG[0].phone; } else { var supTel = ''; } editorPro.find('.tel').html(supTel); }, //点击日期input //'click .data_1': function() { // $('.data_1 .input-group.date').datepicker({ // todayBtn: "linked", // keyboardNavigation: false // //forceParse: false, // //calendarWeeks: true, // //autoclose: true // }); // //$('.data_1 .date').datepicker(); // //}, //点击项目列表的详情按钮,跳到对应的项目 'click .jumpPro': function (e) { e.preventDefault(); pro_id = this._id; sessionStorage.setItem('choosepro', pro_id); Session.set("choosepro", pro_id); FlowRouter.go('/projectOverview'); //FlowRouter.go('/projectBasic'); }, //搜索 //'click .toSearchPro': function(e) { //'change .searchPro': function(e) { // e.preventDefault(); // var searchHTML=$('.searchPro').val(); // var choosePro=$('.choosePro'); // var searchTd=choosePro.find('td:not(:last-child)'); // console.log(searchHTML); // console.log(searchTd); // // if(searchHTML){ // choosePro.css('display','none'); // for(var i=0;i<searchTd.length;i++){ // if($(searchTd[i]).html().indexOf(searchHTML)!=-1){ // $(searchTd[i]).parents('tr').css('display','table-row'); // } // } // }else{ // choosePro.css('display','table-row'); // } //}, 'click .toSearchPro': function (e) { e.preventDefault(); var searchHTML = $('.searchPro').val(); $('#mengban').show(); _changeProject2.stop(); _changeProject2 = Meteor.subscribe('project', loguser, searchHTML, function () { $('#mengban').hide(); var totle = Project.find().count(); console.log(totle); $('.M-box1').pagination({ totalData:totle, showData:limit, coping:true, callback:function(api){ console.log(api.getCurrent()); pagenum=api.getCurrent(); console.log(pagenum); templ.nowpageData.set(api.getCurrent()); } }); //_changeProject2.stop(); //_changeProject2=tem; }); }, 'keydown .searchPro': function (e) { if (e && e.keyCode == 13) { // enter 键 var searchHTML = $('.searchPro').val(); $('#mengban').show(); _changeProject2.stop(); _changeProject2 = Meteor.subscribe('project', loguser, searchHTML, function () { $('#mengban').hide(); var totle = Project.find().count(); console.log(totle); $('.M-box1').pagination({ totalData:totle, showData:limit, coping:true, callback:function(api){ console.log(api.getCurrent()); pagenum=api.getCurrent(); console.log(pagenum); templ.nowpageData.set(api.getCurrent()); } }); //_changeProject2.stop(); //_changeProject2=tem; }); } }, //'click.bbb .M-box1 a': function (e) { // console.log('翻页了'); // Template.instance().nowpageData.set(pagenum); //} });
} $('#mengban').show(); Meteor.call('accPro', pro_id, state, function (error, res) { $('#mengb
conditional_block
cockpit.js
import "./cockpit.html" import "./cockpit_table.html" // import "../common/navbar-position.html" //import { ReactiveVar } from 'meteor/reactive-var'; let loguser; let pro_id; let _changeProject2; let _this_btn; let pagenum; let limit=10; let templ; //显隐提示 function prompt(v
Obj) { if (!val) { shObj.show(); } else { shObj.hide(); } } //this.Pages = new Meteor.Pagination("Project",{ // perPage: 2, // itemTemplate: "cockpit_table", // //templateName: 'Project', // //itemTemplate: 'cockpit' // //sort: { // // title: 1 // //}, // //filters: { // // count: { // // $gt: 10 // // } // //}, // //availableSettings: { // // perPage: true, // // sort: true // //} //}); //let that_pro; //Meteor.subscribe('cockpitTable'); //Meteor.subscribe('userTable'); Template.cockpit.onDestroyed(function () { _changeProject2.stop(); pro_id = null; //that_pro=null; }); Template.cockpit.rendered = function () { $('#data_1').datepicker({ todayBtn: "linked", keyboardNavigation: false, forceParse: false, calendarWeeks: true, autoclose: true }); $('#data_2').datepicker({ todayBtn: "linked", keyboardNavigation: false, forceParse: false, calendarWeeks: true, autoclose: true }); //初始化 //_changeProject2=Meteor.subscribe('project',loguser,function(){ // that_pro=Project.find({}).fetch(); //}); //日期 //$('.data_1 .input-group.date').datepicker({ // todayBtn: "linked", // keyboardNavigation: false, // forceParse: false, // calendarWeeks: true, // autoclose: true //}); //$('.M-box1').pagination({ // totalData:100, // showData:1, // coping:true, // callback:function(api){ // console.log(api.getCurrent()); // $('.now').text(api.getCurrent()); // } //}); //分页 //this.Pages = new Meteor.Pagination("Project",{ // perPage: 2, // itemTemplate: "cockpit_table", // templateName: 'Project', // //itemTemplate: 'cockpit' // //sort: { // // title: 1 // //}, // //filters: { // // count: { // // $gt: 10 // // } // //}, // //availableSettings: { // // perPage: true, // // sort: true // //} //}); }; Template.cockpit.onCreated(function () { /* * find() 返回值是一个游标。游标是一种从动数据源 *输出内容,可以对游标使用 fetch() 来把游标转换成数组 * */ // //var userPhone=$.cookie('user_phone'); //var userType=$.cookie('user_type'); //得到登录用户的id //var _loguserId=FlowRouter.getParam('_id'); //console.log(_loguserId); loguser = sessionStorage.getItem('loguser'); //Session.get('loguser2'); console.log(loguser); if (!loguser) { FlowRouter.go('/login'); } //console.log(loguser); templ=this; //订阅数据 $('#mengban').show(); _changeProject2 = this.subscribe('project', loguser, function () { $('#mengban').hide(); var totle = Project.find().count(); console.log(totle); $('.M-box1').pagination({ totalData:totle, showData:limit, coping:true, callback:function(api){ console.log(api.getCurrent()); pagenum=api.getCurrent(); console.log(pagenum); templ.nowpageData.set(api.getCurrent()); } }); }); //this.subscribe('cockpitTable_user',userPhone,userType); this.subscribe('allusers'); //this.subscribe('dictionaries'); //单条项目 var _data = Project.find({}).fetch(); //ReactiveDict本地变量 this.editorData = new ReactiveVar(_data); //当前页码 this.nowpageData = new ReactiveVar(); //Meteor.call('getnum',_data,function(){ // //}); //页码本地变量 //this.pages = new ReactiveVar(); }); Template.cockpit.helpers({ //项目集合 cockpitTable: function () { var page = Template.instance().nowpageData.get(); var bendiPT = Project.find({},{skip:(page-1)*limit,limit:limit}).fetch(); //Template.instance().searchData.set(that_pro); //var bendiPT=Template.instance().searchData.get(); for (var i = 0; i < bendiPT.length; i++) { bendiPT[i].ordinal = i + 1; //Meteor.call('proProgress',bendiPT[i]._id,function(error,res){ // bendiPT[i].progress=res['result']; //}); if (bendiPT[i].supervisionEngineer) { bendiPT[i].supervisionEngineer = Users.find({ '_id': bendiPT[i].supervisionEngineer }).fetch()[0].username; } if (bendiPT[i].backup == 0) { bendiPT[i].backup = '无'; } else { bendiPT[i].backup = Dictionaries.find({ "ecode": "backUp", 'value': bendiPT[i].backup }).fetch()[0].name; } if (bendiPT[i].weekly) { bendiPT[i].weekly ='开启'; }else{ bendiPT[i].weekly ='关闭'; } if (bendiPT[i].monthly) { bendiPT[i].monthly ='开启'; }else{ bendiPT[i].monthly ='关闭'; } } return bendiPT; }, //cockpitTable: function() { // //return CockpitTable.find(); // var bendiCT=Project.find().fetch(); // for(var i=0;i<bendiCT.length;i++){ // bendiCT[i].ordinal=i+1; // } // return bendiCT; //}, //单条项目集合 editorTable: function () { var _data = Template.instance().editorData.get(); return _data; }, //周报是否勾选 isweekly: function (a) { var pro=Project.find({_id:a}).fetch(); if(pro[0]){ if(pro[0].weekly==0) { return false; }else{ return true; } } }, //月报是否勾选 ismonthly: function (a) { var pro=Project.find({_id:a}).fetch(); if(pro[0]){ if(pro[0].monthly==0) { return false; }else{ return true; } } }, //用户表中监理工程师集合 userTableJLG: function () { return Users.find({ "type": 2, 'state': 1 }); }, //工程师选中判断 engSelect: function (a) { //console.log(a); var supervisionEngineer = Template.instance().editorData.get()[0].supervisionEngineer; var engname = Users.find({ '_id': supervisionEngineer }).fetch()[0].username; if (a == engname) { return true; } else { return false; } }, //字典表中备份方案集合 backUp: function () { return Dictionaries.find({ "ecode": "backUp" }); }, //备份方案选中方法 backUpSelect: function (a) { var backup = Template.instance().editorData.get()[0].backup; if (a == backup) { return true; } else { return false; } }, //验收判断 accState: function (a) { //0是已验收,1是未验收 if (a == 1) { return false; } else if (a == 0) { return true; } }, //是否显示操作判断 isHandle: function () { var loguser = sessionStorage.getItem('loguser'); //var _loguserId=FlowRouter.getParam('_id'); //var loguserType=Users.find({'_id':_loguserId}).fetch()[0].type; var user = Users.findOne({ '_id': loguser }); if (user) { var loguserType = user.type; //if(loguserType==1 || loguserType==0){ if (loguserType == 1 || loguserType == 0) { return true; } else { return false; } } return false; } }); Template.cockpit.onRendered(function () { //日期 //$('.data_1 .input-group.date').datepicker({ // todayBtn: "linked", // keyboardNavigation: false, // forceParse: false, // calendarWeeks: true, // autoclose: true //}); //$('.modal').appendTo("body"); //var proSum=CockpitTable.find().count(); //console.log(1111); }); Template.cockpit.events({ //'click .add': function(e) { // //alert(1); // e.preventDefault(); // var addPro=$('#myModalAddpro'); // //var proname=addpro.find('.modal-body').find('input:first-child').val(); // //const target = e.target; // //const text = target.text.value; // //console.log(text); // console.log(pro_id); // CockpitTable.insert({'number':10,'proName':'11'}); // //var proNumber=CockpitTable.find({}).sort({number:-1}).limit(1); // //CockpitTable.find({}, { sort: { number: -1 } }).limit(1); //}, //验收项目 "click .acc": function (e) { e.preventDefault(); pro_id = this._id; var state = this.state; if (state == 0) { state = 1; //Project.update({_id:pro_id},{ $set : { "state" : state,"accTime" : null} }); } else if (state == 1) { state = 0; //var timestamp = ((Date.parse(new Date()))/ 1000).toString(); //Project.update({_id:pro_id},{ $set : { "state" : state,"accTime" : timestamp} }); } $('#mengban').show(); Meteor.call('accPro', pro_id, state, function (error, res) { $('#mengban').hide(); }); //function base64_decode(base64str, file) { // // create buffer object from base64 encoded string, it is important to tell the constructor that the string is base64 encoded // var bitmap = new Buffer(base64str, 'base64'); // // write buffer to file // fs.writeFileSync(file, bitmap); // console.log('******** File created from base64 encoded string ********'); //} // //base64_decode('iVBORw0KGgoAAAANSUhEUgAAADQAAAAlCAYAAAAN8srVAAACTUlEQVR42u3Wv2sTcRiA8VPBxUKwEAxU3NxPIoFAl1bIkkmwYKAKRbqbRSWQCGJ+rMUibjo4FARBl0AgUIh/QXFxFIpKJHAQKA56r0/hDbyEK5VrDH2hBx+ud+Ga9+G+uSQQkVOv0+lMZNBFHoFRwABZb0F9CCITVdRjQd9b0CoOTNSGiRkidBWkljGGINb9CCECd0FqE7GJqkxeMxccK8UbJzppUPGIO5SfR9DCjINsTIR1RDbKXvAakuB9yqAsvuLaDIN6Jqag5/IaIxjYCxaxDzFGyKUMegdBb4ZBGfQmMUaIXeSmLyhDjHspl9wdiPHgJEGlUumf2UGml96HlJ+hRQwhRoSleQfZgfawlDJoB5KgO4OgDLrIT4UUMEA2xdNpro/t6aA+BJGJKuqxoJ9ikLmzQas4MFEbJmYIHz99GNRaxhiCWPcjhAjcBalNxCaqgsBrUPGIO5T3GGRjIqwjslHegnompqDn8hojGHgLyqA3iTFC7CLnLOh4Z0Gn3FnQf2O3ZrN5iZ9aVw81Go3zQfLmI4iIx/gBUXvtdnvNXZDGbEMI2Gf/BFsQPXffVRADr+jgn1hylwPdOL6Bn7w2brVaV9wEMfALBheGDu3QGvVQ79RtT0FvGDyu1WoXE4JWNKjiack916HXEoJecT7GLTdBLLXrDPwbEX+Xq9XqucPHNzFVzv3B93q9fsHbU+4uhAhh/wXfIMaWqyBdXjfxluE/63fQM/Yt8/je9hQ0vdnQpybqJRZcB2nUI4J+QVB2H6RRHzUoTPo/fwGr9gNcek8bXAAAAABJRU5ErkJggg==', 'copy.jpg'); //Meteor.call('imageUpload2'); }, //编辑项目 "click .editor": function (e) { e.preventDefault(); pro_id = this._id; _this_btn = $(e.target); $('#myModalEditorPro .proNameNone').hide(); $('#myModalEditorPro .beginDateNone').hide(); Template.instance().editorData.set(Project.find({ _id: pro_id }).fetch()); }, //编辑项目开始时间控件 "click #data_1":function(){ $('#data_1').datepicker({ todayBtn: "linked", keyboardNavigation: false, forceParse: false, calendarWeeks: true, autoclose: true }); }, //编辑项目到期时间控件 "click #data_2":function(){ $('#data_2').datepicker({ todayBtn: "linked", keyboardNavigation: false, forceParse: false, calendarWeeks: true, autoclose: true }); }, //周报复选框 "click .checkboxWeekly": function (e) { e.preventDefault(); var weekly_checked=e.target.checked; $('#mengban').show(); Meteor.call('changeWeekly',pro_id,weekly_checked,function(){ $('#mengban').hide(); }); }, //月报复选框 "click .checkboxMonthly": function (e) { e.preventDefault(); var monthly_checked=e.target.checked; $('#mengban').show(); Meteor.call('changeMonthly',pro_id,monthly_checked,function(){ $('#mengban').hide(); }); }, //确认编辑项目 'click .editorpro': function (e) { e.preventDefault(); console.log(pro_id); var editorPro = $('#myModalEditorPro'); var proName = $.trim(editorPro.find('.proName').val()); var beginDate = $.trim(editorPro.find('.beginDate').val()); var endDate = $.trim(editorPro.find('.endDate').val()); var enger = editorPro.find('.enger option:selected').val(); var phone = editorPro.find('.tel').html(); var backup = editorPro.find('.backup option:selected').val(); prompt(proName, $('#myModalEditorPro .proNameNone')); prompt(beginDate, $('#myModalEditorPro .beginDateNone')); prompt(endDate, $('#myModalEditorPro .endDateNone')); if (proName && beginDate && endDate) { $('#mengban').show(); Meteor.call('updateProCK', pro_id, proName, beginDate,endDate, enger, phone, backup, function (error, res) { $('#mengban').hide(); if (typeof error != 'undefined') { console.log(error); } else { if (res['success'] == true) { //$('body').removeClass('modal-open'); //$('body').css("paddingRight",'0'); //$('#myModalEditorPro').removeClass('in'); $('.modal-backdrop').remove(); $('.modal').hide(); _this_btn.click(); //alert(res['msg']); return; } else { alert(res['msg']); } } }); } ////得到选择的监理工程师在用户表中的电话 //var choosedJLG=Users.find({'userName':enger,'type':2}).fetch(); //var supTel=choosedJLG[0].phone; // //Project.update( // {_id:pro_id}, // { $set : { // "proName" : proName, // "beginDate" : beginDate, // "supervisionEngineer" : enger, // "supTel" : supTel, // "backup" : backup, // } // }); }, //监理工程师选择过程中改变下面的电话 'change .enger': function (e) { e.preventDefault(); var editorPro = $('#myModalEditorPro'); var enger = editorPro.find('.enger option:selected').val(); //得到选择的监理工程师在用户表中的电话 if (enger != '请选择') { var choosedJLG = Users.find({ 'username': enger, 'type': 2 }).fetch(); var supTel = choosedJLG[0].phone; } else { var supTel = ''; } editorPro.find('.tel').html(supTel); }, //点击日期input //'click .data_1': function() { // $('.data_1 .input-group.date').datepicker({ // todayBtn: "linked", // keyboardNavigation: false // //forceParse: false, // //calendarWeeks: true, // //autoclose: true // }); // //$('.data_1 .date').datepicker(); // //}, //点击项目列表的详情按钮,跳到对应的项目 'click .jumpPro': function (e) { e.preventDefault(); pro_id = this._id; sessionStorage.setItem('choosepro', pro_id); Session.set("choosepro", pro_id); FlowRouter.go('/projectOverview'); //FlowRouter.go('/projectBasic'); }, //搜索 //'click .toSearchPro': function(e) { //'change .searchPro': function(e) { // e.preventDefault(); // var searchHTML=$('.searchPro').val(); // var choosePro=$('.choosePro'); // var searchTd=choosePro.find('td:not(:last-child)'); // console.log(searchHTML); // console.log(searchTd); // // if(searchHTML){ // choosePro.css('display','none'); // for(var i=0;i<searchTd.length;i++){ // if($(searchTd[i]).html().indexOf(searchHTML)!=-1){ // $(searchTd[i]).parents('tr').css('display','table-row'); // } // } // }else{ // choosePro.css('display','table-row'); // } //}, 'click .toSearchPro': function (e) { e.preventDefault(); var searchHTML = $('.searchPro').val(); $('#mengban').show(); _changeProject2.stop(); _changeProject2 = Meteor.subscribe('project', loguser, searchHTML, function () { $('#mengban').hide(); var totle = Project.find().count(); console.log(totle); $('.M-box1').pagination({ totalData:totle, showData:limit, coping:true, callback:function(api){ console.log(api.getCurrent()); pagenum=api.getCurrent(); console.log(pagenum); templ.nowpageData.set(api.getCurrent()); } }); //_changeProject2.stop(); //_changeProject2=tem; }); }, 'keydown .searchPro': function (e) { if (e && e.keyCode == 13) { // enter 键 var searchHTML = $('.searchPro').val(); $('#mengban').show(); _changeProject2.stop(); _changeProject2 = Meteor.subscribe('project', loguser, searchHTML, function () { $('#mengban').hide(); var totle = Project.find().count(); console.log(totle); $('.M-box1').pagination({ totalData:totle, showData:limit, coping:true, callback:function(api){ console.log(api.getCurrent()); pagenum=api.getCurrent(); console.log(pagenum); templ.nowpageData.set(api.getCurrent()); } }); //_changeProject2.stop(); //_changeProject2=tem; }); } }, //'click.bbb .M-box1 a': function (e) { // console.log('翻页了'); // Template.instance().nowpageData.set(pagenum); //} });
al, sh
identifier_name
config.rs
use crate::utility::location::Location; use crate::exit_on_bad_config; use origen_metal::{config, scrub_path}; use origen_metal::config::{Environment, File}; use std::collections::HashMap; use std::path::{Path, PathBuf}; use crate::om::glob::glob; use std::process::exit; use super::target; const PUBLISHER_OPTIONS: &[&str] = &["system", "package_app", "upload_app"]; const BYPASS_APP_CONFIG_ENV_VAR: &str = "origen_app_bypass_config_lookup"; const APP_CONFIG_PATHS: &str = "origen_app_config_paths"; macro_rules! use_app_config { () => {{ !std::env::var_os($crate::core::application::config::BYPASS_APP_CONFIG_ENV_VAR).is_some() }} } #[derive(Debug, Deserialize)] pub struct CurrentState { pub target: Option<Vec<String>> } impl CurrentState { pub fn build(root: &PathBuf) -> Self { let file = root.join(".origen").join("application.toml"); let mut s = config::Config::builder().set_default("target", None::<Vec<String>>).unwrap(); if file.exists() { s = s.add_source(File::with_name(&format!("{}", file.display()))); } let cb = exit_on_bad_config!(s.build()); let slf: Self = exit_on_bad_config!(cb.try_deserialize()); slf } pub fn apply_to(&mut self, config: &mut Config) { if let Some(t) = self.target.as_ref() { config.target = Some(t.to_owned()) } else { if let Some(t) = &config.target { let clean_defaults = target::set_at_root(t.iter().map( |s| s.as_str() ).collect(), config.root.as_ref().unwrap()); self.target = Some(clean_defaults); } } } pub fn build_and_apply(config: &mut Config) { if use_app_config!() { let mut slf = Self::build(config.root.as_ref().unwrap()); slf.apply_to(config); } } } #[derive(Debug, Deserialize)] // If you add an attribute to this you must also update: // * pyapi/src/lib.rs to convert it to Python // * default function below to define the default value // * add an example of it to src/app_generators/templates/app/config/application.toml pub struct Config { pub name: String, pub target: Option<Vec<String>>, pub mode: String, /// Don't use this unless you know what you're doing, use origen::STATUS::output_dir() instead, since /// that accounts for the output directory being overridden by the current command pub output_directory: Option<String>, /// Don't use this unless you know what you're doing, use origen::STATUS::reference_dir() instead, since /// that accounts for the reference directory being overridden by the current command pub reference_directory: Option<String>, pub website_output_directory: Option<String>, pub website_source_directory: Option<String>, pub website_release_location: Option<Location>, pub website_release_name: Option<String>, pub root: Option<PathBuf>, pub revision_control: Option<HashMap<String, String>>, pub unit_tester: Option<HashMap<String, String>>, pub publisher: Option<HashMap<String, String>>, pub linter: Option<HashMap<String, String>>, pub release_scribe: Option<HashMap<String, String>>, pub app_session_root: Option<String>, pub commands: Option<Vec<String>>, } impl Config { pub fn refresh(&mut self) { let latest = Self::build(self.root.as_ref().unwrap(), false); self.name = latest.name; self.target = latest.target; self.mode = latest.mode; self.reference_directory = latest.reference_directory; self.website_output_directory = latest.website_output_directory; self.website_source_directory = latest.website_source_directory; self.website_release_location = latest.website_release_location; self.website_release_name = latest.website_release_name; self.revision_control = latest.revision_control; self.unit_tester = latest.unit_tester; self.publisher = latest.publisher; self.linter = latest.linter; self.release_scribe = latest.release_scribe; self.app_session_root = latest.app_session_root; self.commands = latest.commands; } /// Builds a new config from all application.toml files found at the given app root pub fn build(root: &Path, default_only: bool) -> Config { log_trace!("Building app config"); let mut s = config::Config::builder() .set_default("target", None::<Vec<String>>) .unwrap() .set_default("mode", "development".to_string()) .unwrap() .set_default("revision_control", None::<HashMap<String, String>>) .unwrap() .set_default("unit_tester", None::<HashMap<String, String>>) .unwrap() .set_default("publisher", None::<HashMap<String, String>>) .unwrap() .set_default("linter", None::<HashMap<String, String>>) .unwrap() .set_default("release_scribe", None::<HashMap<String, String>>) .unwrap() .set_default("app_session_root", None::<String>) .unwrap() .set_default("commands", None::<Vec<String>>) .unwrap(); let mut files: Vec<PathBuf> = Vec::new(); if let Some(paths) = std::env::var_os(APP_CONFIG_PATHS) { log_trace!("Found custom config paths: {:?}", paths); for path in std::env::split_paths(&paths) { log_trace!("Looking for Origen app config file at '{}'", path.display()); if path.is_file() { if let Some(ext) = path.extension() { if ext == "toml" { files.push(path); } else { log_error!( "Expected file {} to have extension '.toml'. Found '{}'", path.display(), ext.to_string_lossy() ) } } else { // accept a file without an extension. will be interpreted as a .toml files.push(path); } } else if path.is_dir() { let f = path.join("application.toml"); if f.exists() { files.push(f); } } else { log_error!( "Config path {} either does not exists or is not accessible", path.display() ); exit(1); } } } if use_app_config!() { let file = root.join("config").join("application.toml"); if file.exists() { files.push(file); } } else { // Bypass Origen's default configuration lookup - use only the enumerated configs log_trace!("Bypassing Origen's App Config Lookup"); } for f in files.iter().rev() { log_trace!("Loading Origen config file from '{}'", f.display()); s = s.add_source(File::with_name(&format!("{}", f.display()))); } s = s.add_source(Environment::with_prefix("origen_app").list_separator(",").with_list_parse_key("target").with_list_parse_key("commands").try_parsing(true)); let cb = exit_on_bad_config!(s.build()); let mut c: Self = exit_on_bad_config!(cb.try_deserialize()); c.root = Some(root.to_path_buf()); // TODO // if let Some(l) = loc { // c.website_release_location = Some(Location::new(&l)); // } log_trace!("Completed building app config"); c.validate_options(); if !default_only { CurrentState::build_and_apply(&mut c); } c } pub fn validate_options(&self) { log_trace!("Validating available options..."); if let Some(targets) = self.target.as_ref() { log_trace!("\tValidating default target..."); for t in targets { target::clean_name(t, "targets", true, self.root.as_ref().unwrap()); } log_trace!("\tValidating default target!"); } log_trace!("\tValidating publisher options..."); for unknown in self.validate_publisher_options() { log_warning!("Unknown Publisher Option '{}'", unknown); } log_trace!("\tFinished validating publisher options"); log_trace!("Finished checking configs!"); } pub fn validate_publisher_options(&self) -> Vec<String>
pub fn cmd_paths(&self) -> Vec<PathBuf> { let mut retn = vec!(); if let Some(cmds) = self.commands.as_ref() { // Load in only the commands explicitly given for cmds_toml in cmds { let ct = self.root.as_ref().unwrap().join("config").join(cmds_toml); if ct.exists() { retn.push(ct.to_owned()); } else { log_error!("Can not locate app commands file '{}'", scrub_path!(ct).display()) } } } else { // Load in any commands from: // 1) app_root/commands.toml // 2) app_root/commands/*/**.toml let commands_toml = self.root.as_ref().unwrap().join("config").join("commands.toml"); // println!("commands toml: {}", commands_toml.display()); if commands_toml.exists() { retn.push(commands_toml); } let mut commands_dir = self.root.as_ref().unwrap().join("config/commands"); if commands_dir.exists() { commands_dir = commands_dir.join("**/*.toml"); for entry in glob(commands_dir.to_str().unwrap()).unwrap() { match entry { Ok(e) => retn.push(e), Err(e) => log_error!("Error processing commands toml: {}", e) } } } } retn } }
{ let mut unknowns: Vec<String> = vec![]; if let Some(p) = &self.publisher { for (opt, _) in p.iter() { if !PUBLISHER_OPTIONS.contains(&opt.as_str()) { unknowns.push(opt.clone()); } } } unknowns }
identifier_body
config.rs
use crate::utility::location::Location; use crate::exit_on_bad_config; use origen_metal::{config, scrub_path}; use origen_metal::config::{Environment, File}; use std::collections::HashMap; use std::path::{Path, PathBuf}; use crate::om::glob::glob; use std::process::exit; use super::target; const PUBLISHER_OPTIONS: &[&str] = &["system", "package_app", "upload_app"]; const BYPASS_APP_CONFIG_ENV_VAR: &str = "origen_app_bypass_config_lookup"; const APP_CONFIG_PATHS: &str = "origen_app_config_paths"; macro_rules! use_app_config { () => {{ !std::env::var_os($crate::core::application::config::BYPASS_APP_CONFIG_ENV_VAR).is_some() }} } #[derive(Debug, Deserialize)] pub struct
{ pub target: Option<Vec<String>> } impl CurrentState { pub fn build(root: &PathBuf) -> Self { let file = root.join(".origen").join("application.toml"); let mut s = config::Config::builder().set_default("target", None::<Vec<String>>).unwrap(); if file.exists() { s = s.add_source(File::with_name(&format!("{}", file.display()))); } let cb = exit_on_bad_config!(s.build()); let slf: Self = exit_on_bad_config!(cb.try_deserialize()); slf } pub fn apply_to(&mut self, config: &mut Config) { if let Some(t) = self.target.as_ref() { config.target = Some(t.to_owned()) } else { if let Some(t) = &config.target { let clean_defaults = target::set_at_root(t.iter().map( |s| s.as_str() ).collect(), config.root.as_ref().unwrap()); self.target = Some(clean_defaults); } } } pub fn build_and_apply(config: &mut Config) { if use_app_config!() { let mut slf = Self::build(config.root.as_ref().unwrap()); slf.apply_to(config); } } } #[derive(Debug, Deserialize)] // If you add an attribute to this you must also update: // * pyapi/src/lib.rs to convert it to Python // * default function below to define the default value // * add an example of it to src/app_generators/templates/app/config/application.toml pub struct Config { pub name: String, pub target: Option<Vec<String>>, pub mode: String, /// Don't use this unless you know what you're doing, use origen::STATUS::output_dir() instead, since /// that accounts for the output directory being overridden by the current command pub output_directory: Option<String>, /// Don't use this unless you know what you're doing, use origen::STATUS::reference_dir() instead, since /// that accounts for the reference directory being overridden by the current command pub reference_directory: Option<String>, pub website_output_directory: Option<String>, pub website_source_directory: Option<String>, pub website_release_location: Option<Location>, pub website_release_name: Option<String>, pub root: Option<PathBuf>, pub revision_control: Option<HashMap<String, String>>, pub unit_tester: Option<HashMap<String, String>>, pub publisher: Option<HashMap<String, String>>, pub linter: Option<HashMap<String, String>>, pub release_scribe: Option<HashMap<String, String>>, pub app_session_root: Option<String>, pub commands: Option<Vec<String>>, } impl Config { pub fn refresh(&mut self) { let latest = Self::build(self.root.as_ref().unwrap(), false); self.name = latest.name; self.target = latest.target; self.mode = latest.mode; self.reference_directory = latest.reference_directory; self.website_output_directory = latest.website_output_directory; self.website_source_directory = latest.website_source_directory; self.website_release_location = latest.website_release_location; self.website_release_name = latest.website_release_name; self.revision_control = latest.revision_control; self.unit_tester = latest.unit_tester; self.publisher = latest.publisher; self.linter = latest.linter; self.release_scribe = latest.release_scribe; self.app_session_root = latest.app_session_root; self.commands = latest.commands; } /// Builds a new config from all application.toml files found at the given app root pub fn build(root: &Path, default_only: bool) -> Config { log_trace!("Building app config"); let mut s = config::Config::builder() .set_default("target", None::<Vec<String>>) .unwrap() .set_default("mode", "development".to_string()) .unwrap() .set_default("revision_control", None::<HashMap<String, String>>) .unwrap() .set_default("unit_tester", None::<HashMap<String, String>>) .unwrap() .set_default("publisher", None::<HashMap<String, String>>) .unwrap() .set_default("linter", None::<HashMap<String, String>>) .unwrap() .set_default("release_scribe", None::<HashMap<String, String>>) .unwrap() .set_default("app_session_root", None::<String>) .unwrap() .set_default("commands", None::<Vec<String>>) .unwrap(); let mut files: Vec<PathBuf> = Vec::new(); if let Some(paths) = std::env::var_os(APP_CONFIG_PATHS) { log_trace!("Found custom config paths: {:?}", paths); for path in std::env::split_paths(&paths) { log_trace!("Looking for Origen app config file at '{}'", path.display()); if path.is_file() { if let Some(ext) = path.extension() { if ext == "toml" { files.push(path); } else { log_error!( "Expected file {} to have extension '.toml'. Found '{}'", path.display(), ext.to_string_lossy() ) } } else { // accept a file without an extension. will be interpreted as a .toml files.push(path); } } else if path.is_dir() { let f = path.join("application.toml"); if f.exists() { files.push(f); } } else { log_error!( "Config path {} either does not exists or is not accessible", path.display() ); exit(1); } } } if use_app_config!() { let file = root.join("config").join("application.toml"); if file.exists() { files.push(file); } } else { // Bypass Origen's default configuration lookup - use only the enumerated configs log_trace!("Bypassing Origen's App Config Lookup"); } for f in files.iter().rev() { log_trace!("Loading Origen config file from '{}'", f.display()); s = s.add_source(File::with_name(&format!("{}", f.display()))); } s = s.add_source(Environment::with_prefix("origen_app").list_separator(",").with_list_parse_key("target").with_list_parse_key("commands").try_parsing(true)); let cb = exit_on_bad_config!(s.build()); let mut c: Self = exit_on_bad_config!(cb.try_deserialize()); c.root = Some(root.to_path_buf()); // TODO // if let Some(l) = loc { // c.website_release_location = Some(Location::new(&l)); // } log_trace!("Completed building app config"); c.validate_options(); if !default_only { CurrentState::build_and_apply(&mut c); } c } pub fn validate_options(&self) { log_trace!("Validating available options..."); if let Some(targets) = self.target.as_ref() { log_trace!("\tValidating default target..."); for t in targets { target::clean_name(t, "targets", true, self.root.as_ref().unwrap()); } log_trace!("\tValidating default target!"); } log_trace!("\tValidating publisher options..."); for unknown in self.validate_publisher_options() { log_warning!("Unknown Publisher Option '{}'", unknown); } log_trace!("\tFinished validating publisher options"); log_trace!("Finished checking configs!"); } pub fn validate_publisher_options(&self) -> Vec<String> { let mut unknowns: Vec<String> = vec![]; if let Some(p) = &self.publisher { for (opt, _) in p.iter() { if !PUBLISHER_OPTIONS.contains(&opt.as_str()) { unknowns.push(opt.clone()); } } } unknowns } pub fn cmd_paths(&self) -> Vec<PathBuf> { let mut retn = vec!(); if let Some(cmds) = self.commands.as_ref() { // Load in only the commands explicitly given for cmds_toml in cmds { let ct = self.root.as_ref().unwrap().join("config").join(cmds_toml); if ct.exists() { retn.push(ct.to_owned()); } else { log_error!("Can not locate app commands file '{}'", scrub_path!(ct).display()) } } } else { // Load in any commands from: // 1) app_root/commands.toml // 2) app_root/commands/*/**.toml let commands_toml = self.root.as_ref().unwrap().join("config").join("commands.toml"); // println!("commands toml: {}", commands_toml.display()); if commands_toml.exists() { retn.push(commands_toml); } let mut commands_dir = self.root.as_ref().unwrap().join("config/commands"); if commands_dir.exists() { commands_dir = commands_dir.join("**/*.toml"); for entry in glob(commands_dir.to_str().unwrap()).unwrap() { match entry { Ok(e) => retn.push(e), Err(e) => log_error!("Error processing commands toml: {}", e) } } } } retn } }
CurrentState
identifier_name
config.rs
use crate::utility::location::Location; use crate::exit_on_bad_config; use origen_metal::{config, scrub_path}; use origen_metal::config::{Environment, File}; use std::collections::HashMap; use std::path::{Path, PathBuf}; use crate::om::glob::glob; use std::process::exit; use super::target; const PUBLISHER_OPTIONS: &[&str] = &["system", "package_app", "upload_app"]; const BYPASS_APP_CONFIG_ENV_VAR: &str = "origen_app_bypass_config_lookup"; const APP_CONFIG_PATHS: &str = "origen_app_config_paths"; macro_rules! use_app_config { () => {{ !std::env::var_os($crate::core::application::config::BYPASS_APP_CONFIG_ENV_VAR).is_some() }} } #[derive(Debug, Deserialize)] pub struct CurrentState { pub target: Option<Vec<String>> } impl CurrentState { pub fn build(root: &PathBuf) -> Self { let file = root.join(".origen").join("application.toml"); let mut s = config::Config::builder().set_default("target", None::<Vec<String>>).unwrap(); if file.exists() { s = s.add_source(File::with_name(&format!("{}", file.display()))); } let cb = exit_on_bad_config!(s.build()); let slf: Self = exit_on_bad_config!(cb.try_deserialize()); slf } pub fn apply_to(&mut self, config: &mut Config) { if let Some(t) = self.target.as_ref() { config.target = Some(t.to_owned()) } else { if let Some(t) = &config.target { let clean_defaults = target::set_at_root(t.iter().map( |s| s.as_str() ).collect(), config.root.as_ref().unwrap()); self.target = Some(clean_defaults); } } } pub fn build_and_apply(config: &mut Config) { if use_app_config!() { let mut slf = Self::build(config.root.as_ref().unwrap()); slf.apply_to(config); } } } #[derive(Debug, Deserialize)] // If you add an attribute to this you must also update: // * pyapi/src/lib.rs to convert it to Python // * default function below to define the default value // * add an example of it to src/app_generators/templates/app/config/application.toml pub struct Config { pub name: String, pub target: Option<Vec<String>>, pub mode: String, /// Don't use this unless you know what you're doing, use origen::STATUS::output_dir() instead, since /// that accounts for the output directory being overridden by the current command pub output_directory: Option<String>, /// Don't use this unless you know what you're doing, use origen::STATUS::reference_dir() instead, since /// that accounts for the reference directory being overridden by the current command pub reference_directory: Option<String>, pub website_output_directory: Option<String>, pub website_source_directory: Option<String>, pub website_release_location: Option<Location>, pub website_release_name: Option<String>, pub root: Option<PathBuf>, pub revision_control: Option<HashMap<String, String>>, pub unit_tester: Option<HashMap<String, String>>, pub publisher: Option<HashMap<String, String>>, pub linter: Option<HashMap<String, String>>, pub release_scribe: Option<HashMap<String, String>>, pub app_session_root: Option<String>, pub commands: Option<Vec<String>>, } impl Config { pub fn refresh(&mut self) { let latest = Self::build(self.root.as_ref().unwrap(), false); self.name = latest.name; self.target = latest.target; self.mode = latest.mode; self.reference_directory = latest.reference_directory; self.website_output_directory = latest.website_output_directory; self.website_source_directory = latest.website_source_directory; self.website_release_location = latest.website_release_location; self.website_release_name = latest.website_release_name; self.revision_control = latest.revision_control; self.unit_tester = latest.unit_tester; self.publisher = latest.publisher; self.linter = latest.linter; self.release_scribe = latest.release_scribe; self.app_session_root = latest.app_session_root; self.commands = latest.commands; } /// Builds a new config from all application.toml files found at the given app root pub fn build(root: &Path, default_only: bool) -> Config { log_trace!("Building app config"); let mut s = config::Config::builder() .set_default("target", None::<Vec<String>>) .unwrap() .set_default("mode", "development".to_string()) .unwrap() .set_default("revision_control", None::<HashMap<String, String>>) .unwrap() .set_default("unit_tester", None::<HashMap<String, String>>) .unwrap() .set_default("publisher", None::<HashMap<String, String>>) .unwrap() .set_default("linter", None::<HashMap<String, String>>) .unwrap() .set_default("release_scribe", None::<HashMap<String, String>>) .unwrap() .set_default("app_session_root", None::<String>) .unwrap() .set_default("commands", None::<Vec<String>>) .unwrap(); let mut files: Vec<PathBuf> = Vec::new(); if let Some(paths) = std::env::var_os(APP_CONFIG_PATHS) { log_trace!("Found custom config paths: {:?}", paths); for path in std::env::split_paths(&paths) { log_trace!("Looking for Origen app config file at '{}'", path.display()); if path.is_file() { if let Some(ext) = path.extension() { if ext == "toml" { files.push(path); } else { log_error!( "Expected file {} to have extension '.toml'. Found '{}'", path.display(), ext.to_string_lossy() ) } } else { // accept a file without an extension. will be interpreted as a .toml files.push(path); } } else if path.is_dir() { let f = path.join("application.toml"); if f.exists() { files.push(f); } } else
} } if use_app_config!() { let file = root.join("config").join("application.toml"); if file.exists() { files.push(file); } } else { // Bypass Origen's default configuration lookup - use only the enumerated configs log_trace!("Bypassing Origen's App Config Lookup"); } for f in files.iter().rev() { log_trace!("Loading Origen config file from '{}'", f.display()); s = s.add_source(File::with_name(&format!("{}", f.display()))); } s = s.add_source(Environment::with_prefix("origen_app").list_separator(",").with_list_parse_key("target").with_list_parse_key("commands").try_parsing(true)); let cb = exit_on_bad_config!(s.build()); let mut c: Self = exit_on_bad_config!(cb.try_deserialize()); c.root = Some(root.to_path_buf()); // TODO // if let Some(l) = loc { // c.website_release_location = Some(Location::new(&l)); // } log_trace!("Completed building app config"); c.validate_options(); if !default_only { CurrentState::build_and_apply(&mut c); } c } pub fn validate_options(&self) { log_trace!("Validating available options..."); if let Some(targets) = self.target.as_ref() { log_trace!("\tValidating default target..."); for t in targets { target::clean_name(t, "targets", true, self.root.as_ref().unwrap()); } log_trace!("\tValidating default target!"); } log_trace!("\tValidating publisher options..."); for unknown in self.validate_publisher_options() { log_warning!("Unknown Publisher Option '{}'", unknown); } log_trace!("\tFinished validating publisher options"); log_trace!("Finished checking configs!"); } pub fn validate_publisher_options(&self) -> Vec<String> { let mut unknowns: Vec<String> = vec![]; if let Some(p) = &self.publisher { for (opt, _) in p.iter() { if !PUBLISHER_OPTIONS.contains(&opt.as_str()) { unknowns.push(opt.clone()); } } } unknowns } pub fn cmd_paths(&self) -> Vec<PathBuf> { let mut retn = vec!(); if let Some(cmds) = self.commands.as_ref() { // Load in only the commands explicitly given for cmds_toml in cmds { let ct = self.root.as_ref().unwrap().join("config").join(cmds_toml); if ct.exists() { retn.push(ct.to_owned()); } else { log_error!("Can not locate app commands file '{}'", scrub_path!(ct).display()) } } } else { // Load in any commands from: // 1) app_root/commands.toml // 2) app_root/commands/*/**.toml let commands_toml = self.root.as_ref().unwrap().join("config").join("commands.toml"); // println!("commands toml: {}", commands_toml.display()); if commands_toml.exists() { retn.push(commands_toml); } let mut commands_dir = self.root.as_ref().unwrap().join("config/commands"); if commands_dir.exists() { commands_dir = commands_dir.join("**/*.toml"); for entry in glob(commands_dir.to_str().unwrap()).unwrap() { match entry { Ok(e) => retn.push(e), Err(e) => log_error!("Error processing commands toml: {}", e) } } } } retn } }
{ log_error!( "Config path {} either does not exists or is not accessible", path.display() ); exit(1); }
conditional_block
config.rs
use crate::utility::location::Location; use crate::exit_on_bad_config; use origen_metal::{config, scrub_path}; use origen_metal::config::{Environment, File}; use std::collections::HashMap; use std::path::{Path, PathBuf}; use crate::om::glob::glob; use std::process::exit; use super::target; const PUBLISHER_OPTIONS: &[&str] = &["system", "package_app", "upload_app"]; const BYPASS_APP_CONFIG_ENV_VAR: &str = "origen_app_bypass_config_lookup"; const APP_CONFIG_PATHS: &str = "origen_app_config_paths"; macro_rules! use_app_config { () => {{ !std::env::var_os($crate::core::application::config::BYPASS_APP_CONFIG_ENV_VAR).is_some() }} } #[derive(Debug, Deserialize)] pub struct CurrentState { pub target: Option<Vec<String>> } impl CurrentState { pub fn build(root: &PathBuf) -> Self { let file = root.join(".origen").join("application.toml"); let mut s = config::Config::builder().set_default("target", None::<Vec<String>>).unwrap(); if file.exists() { s = s.add_source(File::with_name(&format!("{}", file.display()))); } let cb = exit_on_bad_config!(s.build()); let slf: Self = exit_on_bad_config!(cb.try_deserialize()); slf } pub fn apply_to(&mut self, config: &mut Config) { if let Some(t) = self.target.as_ref() { config.target = Some(t.to_owned()) } else { if let Some(t) = &config.target { let clean_defaults = target::set_at_root(t.iter().map( |s| s.as_str() ).collect(), config.root.as_ref().unwrap()); self.target = Some(clean_defaults); }
let mut slf = Self::build(config.root.as_ref().unwrap()); slf.apply_to(config); } } } #[derive(Debug, Deserialize)] // If you add an attribute to this you must also update: // * pyapi/src/lib.rs to convert it to Python // * default function below to define the default value // * add an example of it to src/app_generators/templates/app/config/application.toml pub struct Config { pub name: String, pub target: Option<Vec<String>>, pub mode: String, /// Don't use this unless you know what you're doing, use origen::STATUS::output_dir() instead, since /// that accounts for the output directory being overridden by the current command pub output_directory: Option<String>, /// Don't use this unless you know what you're doing, use origen::STATUS::reference_dir() instead, since /// that accounts for the reference directory being overridden by the current command pub reference_directory: Option<String>, pub website_output_directory: Option<String>, pub website_source_directory: Option<String>, pub website_release_location: Option<Location>, pub website_release_name: Option<String>, pub root: Option<PathBuf>, pub revision_control: Option<HashMap<String, String>>, pub unit_tester: Option<HashMap<String, String>>, pub publisher: Option<HashMap<String, String>>, pub linter: Option<HashMap<String, String>>, pub release_scribe: Option<HashMap<String, String>>, pub app_session_root: Option<String>, pub commands: Option<Vec<String>>, } impl Config { pub fn refresh(&mut self) { let latest = Self::build(self.root.as_ref().unwrap(), false); self.name = latest.name; self.target = latest.target; self.mode = latest.mode; self.reference_directory = latest.reference_directory; self.website_output_directory = latest.website_output_directory; self.website_source_directory = latest.website_source_directory; self.website_release_location = latest.website_release_location; self.website_release_name = latest.website_release_name; self.revision_control = latest.revision_control; self.unit_tester = latest.unit_tester; self.publisher = latest.publisher; self.linter = latest.linter; self.release_scribe = latest.release_scribe; self.app_session_root = latest.app_session_root; self.commands = latest.commands; } /// Builds a new config from all application.toml files found at the given app root pub fn build(root: &Path, default_only: bool) -> Config { log_trace!("Building app config"); let mut s = config::Config::builder() .set_default("target", None::<Vec<String>>) .unwrap() .set_default("mode", "development".to_string()) .unwrap() .set_default("revision_control", None::<HashMap<String, String>>) .unwrap() .set_default("unit_tester", None::<HashMap<String, String>>) .unwrap() .set_default("publisher", None::<HashMap<String, String>>) .unwrap() .set_default("linter", None::<HashMap<String, String>>) .unwrap() .set_default("release_scribe", None::<HashMap<String, String>>) .unwrap() .set_default("app_session_root", None::<String>) .unwrap() .set_default("commands", None::<Vec<String>>) .unwrap(); let mut files: Vec<PathBuf> = Vec::new(); if let Some(paths) = std::env::var_os(APP_CONFIG_PATHS) { log_trace!("Found custom config paths: {:?}", paths); for path in std::env::split_paths(&paths) { log_trace!("Looking for Origen app config file at '{}'", path.display()); if path.is_file() { if let Some(ext) = path.extension() { if ext == "toml" { files.push(path); } else { log_error!( "Expected file {} to have extension '.toml'. Found '{}'", path.display(), ext.to_string_lossy() ) } } else { // accept a file without an extension. will be interpreted as a .toml files.push(path); } } else if path.is_dir() { let f = path.join("application.toml"); if f.exists() { files.push(f); } } else { log_error!( "Config path {} either does not exists or is not accessible", path.display() ); exit(1); } } } if use_app_config!() { let file = root.join("config").join("application.toml"); if file.exists() { files.push(file); } } else { // Bypass Origen's default configuration lookup - use only the enumerated configs log_trace!("Bypassing Origen's App Config Lookup"); } for f in files.iter().rev() { log_trace!("Loading Origen config file from '{}'", f.display()); s = s.add_source(File::with_name(&format!("{}", f.display()))); } s = s.add_source(Environment::with_prefix("origen_app").list_separator(",").with_list_parse_key("target").with_list_parse_key("commands").try_parsing(true)); let cb = exit_on_bad_config!(s.build()); let mut c: Self = exit_on_bad_config!(cb.try_deserialize()); c.root = Some(root.to_path_buf()); // TODO // if let Some(l) = loc { // c.website_release_location = Some(Location::new(&l)); // } log_trace!("Completed building app config"); c.validate_options(); if !default_only { CurrentState::build_and_apply(&mut c); } c } pub fn validate_options(&self) { log_trace!("Validating available options..."); if let Some(targets) = self.target.as_ref() { log_trace!("\tValidating default target..."); for t in targets { target::clean_name(t, "targets", true, self.root.as_ref().unwrap()); } log_trace!("\tValidating default target!"); } log_trace!("\tValidating publisher options..."); for unknown in self.validate_publisher_options() { log_warning!("Unknown Publisher Option '{}'", unknown); } log_trace!("\tFinished validating publisher options"); log_trace!("Finished checking configs!"); } pub fn validate_publisher_options(&self) -> Vec<String> { let mut unknowns: Vec<String> = vec![]; if let Some(p) = &self.publisher { for (opt, _) in p.iter() { if !PUBLISHER_OPTIONS.contains(&opt.as_str()) { unknowns.push(opt.clone()); } } } unknowns } pub fn cmd_paths(&self) -> Vec<PathBuf> { let mut retn = vec!(); if let Some(cmds) = self.commands.as_ref() { // Load in only the commands explicitly given for cmds_toml in cmds { let ct = self.root.as_ref().unwrap().join("config").join(cmds_toml); if ct.exists() { retn.push(ct.to_owned()); } else { log_error!("Can not locate app commands file '{}'", scrub_path!(ct).display()) } } } else { // Load in any commands from: // 1) app_root/commands.toml // 2) app_root/commands/*/**.toml let commands_toml = self.root.as_ref().unwrap().join("config").join("commands.toml"); // println!("commands toml: {}", commands_toml.display()); if commands_toml.exists() { retn.push(commands_toml); } let mut commands_dir = self.root.as_ref().unwrap().join("config/commands"); if commands_dir.exists() { commands_dir = commands_dir.join("**/*.toml"); for entry in glob(commands_dir.to_str().unwrap()).unwrap() { match entry { Ok(e) => retn.push(e), Err(e) => log_error!("Error processing commands toml: {}", e) } } } } retn } }
} } pub fn build_and_apply(config: &mut Config) { if use_app_config!() {
random_line_split
client_handler.go
// Copyright 2019 The go-ethereum Authors // This file is part of the go-ethereum library. // // The go-ethereum library is free software: you can redistribute it and/or modify // it under the terms of the GNU Lesser General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // // The go-ethereum library is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Lesser General Public License for more details. // // You should have received a copy of the GNU Lesser General Public License // along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>. package les import ( "math/big" "sync" "time" "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common/mclock" "github.com/ethereum/go-ethereum/core/types" "github.com/ethereum/go-ethereum/eth/downloader" "github.com/ethereum/go-ethereum/light" "github.com/ethereum/go-ethereum/log" "github.com/ethereum/go-ethereum/p2p" "github.com/ethereum/go-ethereum/params" ) // clientHandler is responsible for receiving and processing all incoming server // responses. type clientHandler struct { ulc *ulc checkpoint *params.TrustedCheckpoint fetcher *lightFetcher downloader *downloader.Downloader backend *LightEthereum closeCh chan struct{} wg sync.WaitGroup // WaitGroup used to track all connected peers. syncDone func() // Test hooks when syncing is done. } func newClientHandler(ulcServers []string, ulcFraction int, checkpoint *params.TrustedCheckpoint, backend *LightEthereum) *clientHandler { handler := &clientHandler{ checkpoint: checkpoint, backend: backend, closeCh: make(chan struct{}), } if ulcServers != nil { ulc, err := newULC(ulcServers, ulcFraction) if err != nil { log.Error("Failed to initialize ultra light client") } handler.ulc = ulc log.Info("Enable ultra light client mode") } var height uint64 if checkpoint != nil { height = (checkpoint.SectionIndex+1)*params.CHTFrequency - 1 } handler.fetcher = newLightFetcher(handler) handler.downloader = downloader.New(height, backend.chainDb, nil, backend.eventMux, nil, backend.blockchain, handler.removePeer) handler.backend.peers.notify((*downloaderPeerNotify)(handler)) return handler } func (h *clientHandler) stop() { close(h.closeCh) h.downloader.Terminate() h.fetcher.close() h.wg.Wait() } // runPeer is the p2p protocol run function for the given version. func (h *clientHandler) runPeer(version uint, p *p2p.Peer, rw p2p.MsgReadWriter) error { trusted := false if h.ulc != nil { trusted = h.ulc.trusted(p.ID()) } peer := newPeer(int(version), h.backend.config.NetworkId, trusted, p, newMeteredMsgWriter(rw, int(version))) peer.poolEntry = h.backend.serverPool.connect(peer, peer.Node()) if peer.poolEntry == nil { return p2p.DiscRequested } h.wg.Add(1) defer h.wg.Done() err := h.handle(peer) h.backend.serverPool.disconnect(peer.poolEntry) return err } func (h *clientHandler) handle(p *peer) error { if h.backend.peers.Len() >= h.backend.config.LightPeers && !p.Peer.Info().Network.Trusted { return p2p.DiscTooManyPeers } p.Log().Debug("Light Ethereum peer connected", "name", p.Name()) // Execute the LES handshake var ( head = h.backend.blockchain.CurrentHeader() hash = head.Hash() number = head.Number.Uint64() td = h.backend.blockchain.GetTd(hash, number) ) if err := p.Handshake(td, hash, number, h.backend.blockchain.Genesis().Hash(), nil); err != nil { p.Log().Debug("Light Ethereum handshake failed", "err", err) return err } // Register the peer locally if err := h.backend.peers.Register(p); err != nil { p.Log().Error("Light Ethereum peer registration failed", "err", err) return err } serverConnectionGauge.Update(int64(h.backend.peers.Len())) connectedAt := mclock.Now() defer func() { h.backend.peers.Unregister(p.id) connectionTimer.Update(time.Duration(mclock.Now() - connectedAt)) serverConnectionGauge.Update(int64(h.backend.peers.Len())) }() h.fetcher.announce(p, p.headInfo) // pool entry can be nil during the unit test. if p.poolEntry != nil { h.backend.serverPool.registered(p.poolEntry) } // Spawn a main loop to handle all incoming messages. for { if err := h.handleMsg(p); err != nil { p.Log().Debug("Light Ethereum message handling failed", "err", err) p.fcServer.DumpLogs() return err } } } // handleMsg is invoked whenever an inbound message is received from a remote // peer. The remote connection is torn down upon returning any error. func (h *clientHandler) handleMsg(p *peer) error { // Read the next message from the remote peer, and ensure it's fully consumed msg, err := p.rw.ReadMsg() if err != nil { return err } p.Log().Trace("Light Ethereum message arrived", "code", msg.Code, "bytes", msg.Size) if msg.Size > ProtocolMaxMsgSize { return errResp(ErrMsgTooLarge, "%v > %v", msg.Size, ProtocolMaxMsgSize) } defer msg.Discard() var deliverMsg *Msg // Handle the message depending on its contents switch msg.Code { case AnnounceMsg: p.Log().Trace("Received announce message") var req announceData if err := msg.Decode(&req); err != nil { return errResp(ErrDecode, "%v: %v", msg, err) } if err := req.sanityCheck(); err != nil { return err } update, size := req.Update.decode() if p.rejectUpdate(size) { return errResp(ErrRequestRejected, "") } p.updateFlowControl(update) if req.Hash != (common.Hash{}) { if p.announceType == announceTypeNone { return errResp(ErrUnexpectedResponse, "") } if p.announceType == announceTypeSigned { if err := req.checkSignature(p.ID(), update); err != nil { p.Log().Trace("Invalid announcement signature", "err", err) return err } p.Log().Trace("Valid announcement signature") } p.Log().Trace("Announce message content", "number", req.Number, "hash", req.Hash, "td", req.Td, "reorg", req.ReorgDepth) h.fetcher.announce(p, &req) } case BlockHeadersMsg: p.Log().Trace("Received block header response message") var resp struct { ReqID, BV uint64 Headers []*types.Header } if err := msg.Decode(&resp); err != nil { return errResp(ErrDecode, "msg %v: %v", msg, err) } p.fcServer.ReceivedReply(resp.ReqID, resp.BV) if h.fetcher.requestedID(resp.ReqID) { h.fetcher.deliverHeaders(p, resp.ReqID, resp.Headers) } else { if err := h.downloader.DeliverHeaders(p.id, resp.Headers); err != nil { log.Debug("Failed to deliver headers", "err", err) } } case BlockBodiesMsg: p.Log().Trace("Received block bodies response") var resp struct { ReqID, BV uint64 Data []*types.Body } if err := msg.Decode(&resp); err != nil { return errResp(ErrDecode, "msg %v: %v", msg, err) } p.fcServer.ReceivedReply(resp.ReqID, resp.BV) deliverMsg = &Msg{ MsgType: MsgBlockBodies, ReqID: resp.ReqID, Obj: resp.Data, } case CodeMsg: p.Log().Trace("Received code response") var resp struct { ReqID, BV uint64 Data [][]byte } if err := msg.Decode(&resp); err != nil { return errResp(ErrDecode, "msg %v: %v", msg, err) } p.fcServer.ReceivedReply(resp.ReqID, resp.BV) deliverMsg = &Msg{ MsgType: MsgCode, ReqID: resp.ReqID, Obj: resp.Data, } case ReceiptsMsg: p.Log().Trace("Received receipts response") var resp struct { ReqID, BV uint64 Receipts []types.Receipts
} p.fcServer.ReceivedReply(resp.ReqID, resp.BV) deliverMsg = &Msg{ MsgType: MsgReceipts, ReqID: resp.ReqID, Obj: resp.Receipts, } case ProofsV2Msg: p.Log().Trace("Received les/2 proofs response") var resp struct { ReqID, BV uint64 Data light.NodeList } if err := msg.Decode(&resp); err != nil { return errResp(ErrDecode, "msg %v: %v", msg, err) } p.fcServer.ReceivedReply(resp.ReqID, resp.BV) deliverMsg = &Msg{ MsgType: MsgProofsV2, ReqID: resp.ReqID, Obj: resp.Data, } case HelperTrieProofsMsg: p.Log().Trace("Received helper trie proof response") var resp struct { ReqID, BV uint64 Data HelperTrieResps } if err := msg.Decode(&resp); err != nil { return errResp(ErrDecode, "msg %v: %v", msg, err) } p.fcServer.ReceivedReply(resp.ReqID, resp.BV) deliverMsg = &Msg{ MsgType: MsgHelperTrieProofs, ReqID: resp.ReqID, Obj: resp.Data, } case TxStatusMsg: p.Log().Trace("Received tx status response") var resp struct { ReqID, BV uint64 Status []light.TxStatus } if err := msg.Decode(&resp); err != nil { return errResp(ErrDecode, "msg %v: %v", msg, err) } p.fcServer.ReceivedReply(resp.ReqID, resp.BV) deliverMsg = &Msg{ MsgType: MsgTxStatus, ReqID: resp.ReqID, Obj: resp.Status, } case StopMsg: p.freezeServer(true) h.backend.retriever.frozen(p) p.Log().Debug("Service stopped") case ResumeMsg: var bv uint64 if err := msg.Decode(&bv); err != nil { return errResp(ErrDecode, "msg %v: %v", msg, err) } p.fcServer.ResumeFreeze(bv) p.freezeServer(false) p.Log().Debug("Service resumed") default: p.Log().Trace("Received invalid message", "code", msg.Code) return errResp(ErrInvalidMsgCode, "%v", msg.Code) } // Deliver the received response to retriever. if deliverMsg != nil { if err := h.backend.retriever.deliver(p, deliverMsg); err != nil { p.responseErrors++ if p.responseErrors > maxResponseErrors { return err } } } return nil } func (h *clientHandler) removePeer(id string) { h.backend.peers.Unregister(id) } type peerConnection struct { handler *clientHandler peer *peer } func (pc *peerConnection) Head() (common.Hash, *big.Int) { return pc.peer.HeadAndTd() } func (pc *peerConnection) RequestHeadersByHash(origin common.Hash, amount int, skip int, reverse bool) error { rq := &distReq{ getCost: func(dp distPeer) uint64 { peer := dp.(*peer) return peer.GetRequestCost(GetBlockHeadersMsg, amount) }, canSend: func(dp distPeer) bool { return dp.(*peer) == pc.peer }, request: func(dp distPeer) func() { reqID := genReqID() peer := dp.(*peer) cost := peer.GetRequestCost(GetBlockHeadersMsg, amount) peer.fcServer.QueuedRequest(reqID, cost) return func() { peer.RequestHeadersByHash(reqID, cost, origin, amount, skip, reverse) } }, } _, ok := <-pc.handler.backend.reqDist.queue(rq) if !ok { return light.ErrNoPeers } return nil } func (pc *peerConnection) RequestHeadersByNumber(origin uint64, amount int, skip int, reverse bool) error { rq := &distReq{ getCost: func(dp distPeer) uint64 { peer := dp.(*peer) return peer.GetRequestCost(GetBlockHeadersMsg, amount) }, canSend: func(dp distPeer) bool { return dp.(*peer) == pc.peer }, request: func(dp distPeer) func() { reqID := genReqID() peer := dp.(*peer) cost := peer.GetRequestCost(GetBlockHeadersMsg, amount) peer.fcServer.QueuedRequest(reqID, cost) return func() { peer.RequestHeadersByNumber(reqID, cost, origin, amount, skip, reverse) } }, } _, ok := <-pc.handler.backend.reqDist.queue(rq) if !ok { return light.ErrNoPeers } return nil } // downloaderPeerNotify implements peerSetNotify type downloaderPeerNotify clientHandler func (d *downloaderPeerNotify) registerPeer(p *peer) { h := (*clientHandler)(d) pc := &peerConnection{ handler: h, peer: p, } h.downloader.RegisterLightPeer(p.id, ethVersion, pc) } func (d *downloaderPeerNotify) unregisterPeer(p *peer) { h := (*clientHandler)(d) h.downloader.UnregisterPeer(p.id) }
} if err := msg.Decode(&resp); err != nil { return errResp(ErrDecode, "msg %v: %v", msg, err)
random_line_split
client_handler.go
// Copyright 2019 The go-ethereum Authors // This file is part of the go-ethereum library. // // The go-ethereum library is free software: you can redistribute it and/or modify // it under the terms of the GNU Lesser General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // // The go-ethereum library is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Lesser General Public License for more details. // // You should have received a copy of the GNU Lesser General Public License // along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>. package les import ( "math/big" "sync" "time" "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common/mclock" "github.com/ethereum/go-ethereum/core/types" "github.com/ethereum/go-ethereum/eth/downloader" "github.com/ethereum/go-ethereum/light" "github.com/ethereum/go-ethereum/log" "github.com/ethereum/go-ethereum/p2p" "github.com/ethereum/go-ethereum/params" ) // clientHandler is responsible for receiving and processing all incoming server // responses. type clientHandler struct { ulc *ulc checkpoint *params.TrustedCheckpoint fetcher *lightFetcher downloader *downloader.Downloader backend *LightEthereum closeCh chan struct{} wg sync.WaitGroup // WaitGroup used to track all connected peers. syncDone func() // Test hooks when syncing is done. } func newClientHandler(ulcServers []string, ulcFraction int, checkpoint *params.TrustedCheckpoint, backend *LightEthereum) *clientHandler { handler := &clientHandler{ checkpoint: checkpoint, backend: backend, closeCh: make(chan struct{}), } if ulcServers != nil { ulc, err := newULC(ulcServers, ulcFraction) if err != nil { log.Error("Failed to initialize ultra light client") } handler.ulc = ulc log.Info("Enable ultra light client mode") } var height uint64 if checkpoint != nil { height = (checkpoint.SectionIndex+1)*params.CHTFrequency - 1 } handler.fetcher = newLightFetcher(handler) handler.downloader = downloader.New(height, backend.chainDb, nil, backend.eventMux, nil, backend.blockchain, handler.removePeer) handler.backend.peers.notify((*downloaderPeerNotify)(handler)) return handler } func (h *clientHandler) stop()
// runPeer is the p2p protocol run function for the given version. func (h *clientHandler) runPeer(version uint, p *p2p.Peer, rw p2p.MsgReadWriter) error { trusted := false if h.ulc != nil { trusted = h.ulc.trusted(p.ID()) } peer := newPeer(int(version), h.backend.config.NetworkId, trusted, p, newMeteredMsgWriter(rw, int(version))) peer.poolEntry = h.backend.serverPool.connect(peer, peer.Node()) if peer.poolEntry == nil { return p2p.DiscRequested } h.wg.Add(1) defer h.wg.Done() err := h.handle(peer) h.backend.serverPool.disconnect(peer.poolEntry) return err } func (h *clientHandler) handle(p *peer) error { if h.backend.peers.Len() >= h.backend.config.LightPeers && !p.Peer.Info().Network.Trusted { return p2p.DiscTooManyPeers } p.Log().Debug("Light Ethereum peer connected", "name", p.Name()) // Execute the LES handshake var ( head = h.backend.blockchain.CurrentHeader() hash = head.Hash() number = head.Number.Uint64() td = h.backend.blockchain.GetTd(hash, number) ) if err := p.Handshake(td, hash, number, h.backend.blockchain.Genesis().Hash(), nil); err != nil { p.Log().Debug("Light Ethereum handshake failed", "err", err) return err } // Register the peer locally if err := h.backend.peers.Register(p); err != nil { p.Log().Error("Light Ethereum peer registration failed", "err", err) return err } serverConnectionGauge.Update(int64(h.backend.peers.Len())) connectedAt := mclock.Now() defer func() { h.backend.peers.Unregister(p.id) connectionTimer.Update(time.Duration(mclock.Now() - connectedAt)) serverConnectionGauge.Update(int64(h.backend.peers.Len())) }() h.fetcher.announce(p, p.headInfo) // pool entry can be nil during the unit test. if p.poolEntry != nil { h.backend.serverPool.registered(p.poolEntry) } // Spawn a main loop to handle all incoming messages. for { if err := h.handleMsg(p); err != nil { p.Log().Debug("Light Ethereum message handling failed", "err", err) p.fcServer.DumpLogs() return err } } } // handleMsg is invoked whenever an inbound message is received from a remote // peer. The remote connection is torn down upon returning any error. func (h *clientHandler) handleMsg(p *peer) error { // Read the next message from the remote peer, and ensure it's fully consumed msg, err := p.rw.ReadMsg() if err != nil { return err } p.Log().Trace("Light Ethereum message arrived", "code", msg.Code, "bytes", msg.Size) if msg.Size > ProtocolMaxMsgSize { return errResp(ErrMsgTooLarge, "%v > %v", msg.Size, ProtocolMaxMsgSize) } defer msg.Discard() var deliverMsg *Msg // Handle the message depending on its contents switch msg.Code { case AnnounceMsg: p.Log().Trace("Received announce message") var req announceData if err := msg.Decode(&req); err != nil { return errResp(ErrDecode, "%v: %v", msg, err) } if err := req.sanityCheck(); err != nil { return err } update, size := req.Update.decode() if p.rejectUpdate(size) { return errResp(ErrRequestRejected, "") } p.updateFlowControl(update) if req.Hash != (common.Hash{}) { if p.announceType == announceTypeNone { return errResp(ErrUnexpectedResponse, "") } if p.announceType == announceTypeSigned { if err := req.checkSignature(p.ID(), update); err != nil { p.Log().Trace("Invalid announcement signature", "err", err) return err } p.Log().Trace("Valid announcement signature") } p.Log().Trace("Announce message content", "number", req.Number, "hash", req.Hash, "td", req.Td, "reorg", req.ReorgDepth) h.fetcher.announce(p, &req) } case BlockHeadersMsg: p.Log().Trace("Received block header response message") var resp struct { ReqID, BV uint64 Headers []*types.Header } if err := msg.Decode(&resp); err != nil { return errResp(ErrDecode, "msg %v: %v", msg, err) } p.fcServer.ReceivedReply(resp.ReqID, resp.BV) if h.fetcher.requestedID(resp.ReqID) { h.fetcher.deliverHeaders(p, resp.ReqID, resp.Headers) } else { if err := h.downloader.DeliverHeaders(p.id, resp.Headers); err != nil { log.Debug("Failed to deliver headers", "err", err) } } case BlockBodiesMsg: p.Log().Trace("Received block bodies response") var resp struct { ReqID, BV uint64 Data []*types.Body } if err := msg.Decode(&resp); err != nil { return errResp(ErrDecode, "msg %v: %v", msg, err) } p.fcServer.ReceivedReply(resp.ReqID, resp.BV) deliverMsg = &Msg{ MsgType: MsgBlockBodies, ReqID: resp.ReqID, Obj: resp.Data, } case CodeMsg: p.Log().Trace("Received code response") var resp struct { ReqID, BV uint64 Data [][]byte } if err := msg.Decode(&resp); err != nil { return errResp(ErrDecode, "msg %v: %v", msg, err) } p.fcServer.ReceivedReply(resp.ReqID, resp.BV) deliverMsg = &Msg{ MsgType: MsgCode, ReqID: resp.ReqID, Obj: resp.Data, } case ReceiptsMsg: p.Log().Trace("Received receipts response") var resp struct { ReqID, BV uint64 Receipts []types.Receipts } if err := msg.Decode(&resp); err != nil { return errResp(ErrDecode, "msg %v: %v", msg, err) } p.fcServer.ReceivedReply(resp.ReqID, resp.BV) deliverMsg = &Msg{ MsgType: MsgReceipts, ReqID: resp.ReqID, Obj: resp.Receipts, } case ProofsV2Msg: p.Log().Trace("Received les/2 proofs response") var resp struct { ReqID, BV uint64 Data light.NodeList } if err := msg.Decode(&resp); err != nil { return errResp(ErrDecode, "msg %v: %v", msg, err) } p.fcServer.ReceivedReply(resp.ReqID, resp.BV) deliverMsg = &Msg{ MsgType: MsgProofsV2, ReqID: resp.ReqID, Obj: resp.Data, } case HelperTrieProofsMsg: p.Log().Trace("Received helper trie proof response") var resp struct { ReqID, BV uint64 Data HelperTrieResps } if err := msg.Decode(&resp); err != nil { return errResp(ErrDecode, "msg %v: %v", msg, err) } p.fcServer.ReceivedReply(resp.ReqID, resp.BV) deliverMsg = &Msg{ MsgType: MsgHelperTrieProofs, ReqID: resp.ReqID, Obj: resp.Data, } case TxStatusMsg: p.Log().Trace("Received tx status response") var resp struct { ReqID, BV uint64 Status []light.TxStatus } if err := msg.Decode(&resp); err != nil { return errResp(ErrDecode, "msg %v: %v", msg, err) } p.fcServer.ReceivedReply(resp.ReqID, resp.BV) deliverMsg = &Msg{ MsgType: MsgTxStatus, ReqID: resp.ReqID, Obj: resp.Status, } case StopMsg: p.freezeServer(true) h.backend.retriever.frozen(p) p.Log().Debug("Service stopped") case ResumeMsg: var bv uint64 if err := msg.Decode(&bv); err != nil { return errResp(ErrDecode, "msg %v: %v", msg, err) } p.fcServer.ResumeFreeze(bv) p.freezeServer(false) p.Log().Debug("Service resumed") default: p.Log().Trace("Received invalid message", "code", msg.Code) return errResp(ErrInvalidMsgCode, "%v", msg.Code) } // Deliver the received response to retriever. if deliverMsg != nil { if err := h.backend.retriever.deliver(p, deliverMsg); err != nil { p.responseErrors++ if p.responseErrors > maxResponseErrors { return err } } } return nil } func (h *clientHandler) removePeer(id string) { h.backend.peers.Unregister(id) } type peerConnection struct { handler *clientHandler peer *peer } func (pc *peerConnection) Head() (common.Hash, *big.Int) { return pc.peer.HeadAndTd() } func (pc *peerConnection) RequestHeadersByHash(origin common.Hash, amount int, skip int, reverse bool) error { rq := &distReq{ getCost: func(dp distPeer) uint64 { peer := dp.(*peer) return peer.GetRequestCost(GetBlockHeadersMsg, amount) }, canSend: func(dp distPeer) bool { return dp.(*peer) == pc.peer }, request: func(dp distPeer) func() { reqID := genReqID() peer := dp.(*peer) cost := peer.GetRequestCost(GetBlockHeadersMsg, amount) peer.fcServer.QueuedRequest(reqID, cost) return func() { peer.RequestHeadersByHash(reqID, cost, origin, amount, skip, reverse) } }, } _, ok := <-pc.handler.backend.reqDist.queue(rq) if !ok { return light.ErrNoPeers } return nil } func (pc *peerConnection) RequestHeadersByNumber(origin uint64, amount int, skip int, reverse bool) error { rq := &distReq{ getCost: func(dp distPeer) uint64 { peer := dp.(*peer) return peer.GetRequestCost(GetBlockHeadersMsg, amount) }, canSend: func(dp distPeer) bool { return dp.(*peer) == pc.peer }, request: func(dp distPeer) func() { reqID := genReqID() peer := dp.(*peer) cost := peer.GetRequestCost(GetBlockHeadersMsg, amount) peer.fcServer.QueuedRequest(reqID, cost) return func() { peer.RequestHeadersByNumber(reqID, cost, origin, amount, skip, reverse) } }, } _, ok := <-pc.handler.backend.reqDist.queue(rq) if !ok { return light.ErrNoPeers } return nil } // downloaderPeerNotify implements peerSetNotify type downloaderPeerNotify clientHandler func (d *downloaderPeerNotify) registerPeer(p *peer) { h := (*clientHandler)(d) pc := &peerConnection{ handler: h, peer: p, } h.downloader.RegisterLightPeer(p.id, ethVersion, pc) } func (d *downloaderPeerNotify) unregisterPeer(p *peer) { h := (*clientHandler)(d) h.downloader.UnregisterPeer(p.id) }
{ close(h.closeCh) h.downloader.Terminate() h.fetcher.close() h.wg.Wait() }
identifier_body
client_handler.go
// Copyright 2019 The go-ethereum Authors // This file is part of the go-ethereum library. // // The go-ethereum library is free software: you can redistribute it and/or modify // it under the terms of the GNU Lesser General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // // The go-ethereum library is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Lesser General Public License for more details. // // You should have received a copy of the GNU Lesser General Public License // along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>. package les import ( "math/big" "sync" "time" "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common/mclock" "github.com/ethereum/go-ethereum/core/types" "github.com/ethereum/go-ethereum/eth/downloader" "github.com/ethereum/go-ethereum/light" "github.com/ethereum/go-ethereum/log" "github.com/ethereum/go-ethereum/p2p" "github.com/ethereum/go-ethereum/params" ) // clientHandler is responsible for receiving and processing all incoming server // responses. type clientHandler struct { ulc *ulc checkpoint *params.TrustedCheckpoint fetcher *lightFetcher downloader *downloader.Downloader backend *LightEthereum closeCh chan struct{} wg sync.WaitGroup // WaitGroup used to track all connected peers. syncDone func() // Test hooks when syncing is done. } func newClientHandler(ulcServers []string, ulcFraction int, checkpoint *params.TrustedCheckpoint, backend *LightEthereum) *clientHandler { handler := &clientHandler{ checkpoint: checkpoint, backend: backend, closeCh: make(chan struct{}), } if ulcServers != nil { ulc, err := newULC(ulcServers, ulcFraction) if err != nil { log.Error("Failed to initialize ultra light client") } handler.ulc = ulc log.Info("Enable ultra light client mode") } var height uint64 if checkpoint != nil { height = (checkpoint.SectionIndex+1)*params.CHTFrequency - 1 } handler.fetcher = newLightFetcher(handler) handler.downloader = downloader.New(height, backend.chainDb, nil, backend.eventMux, nil, backend.blockchain, handler.removePeer) handler.backend.peers.notify((*downloaderPeerNotify)(handler)) return handler } func (h *clientHandler) stop() { close(h.closeCh) h.downloader.Terminate() h.fetcher.close() h.wg.Wait() } // runPeer is the p2p protocol run function for the given version. func (h *clientHandler) runPeer(version uint, p *p2p.Peer, rw p2p.MsgReadWriter) error { trusted := false if h.ulc != nil { trusted = h.ulc.trusted(p.ID()) } peer := newPeer(int(version), h.backend.config.NetworkId, trusted, p, newMeteredMsgWriter(rw, int(version))) peer.poolEntry = h.backend.serverPool.connect(peer, peer.Node()) if peer.poolEntry == nil { return p2p.DiscRequested } h.wg.Add(1) defer h.wg.Done() err := h.handle(peer) h.backend.serverPool.disconnect(peer.poolEntry) return err } func (h *clientHandler) handle(p *peer) error { if h.backend.peers.Len() >= h.backend.config.LightPeers && !p.Peer.Info().Network.Trusted { return p2p.DiscTooManyPeers } p.Log().Debug("Light Ethereum peer connected", "name", p.Name()) // Execute the LES handshake var ( head = h.backend.blockchain.CurrentHeader() hash = head.Hash() number = head.Number.Uint64() td = h.backend.blockchain.GetTd(hash, number) ) if err := p.Handshake(td, hash, number, h.backend.blockchain.Genesis().Hash(), nil); err != nil { p.Log().Debug("Light Ethereum handshake failed", "err", err) return err } // Register the peer locally if err := h.backend.peers.Register(p); err != nil { p.Log().Error("Light Ethereum peer registration failed", "err", err) return err } serverConnectionGauge.Update(int64(h.backend.peers.Len())) connectedAt := mclock.Now() defer func() { h.backend.peers.Unregister(p.id) connectionTimer.Update(time.Duration(mclock.Now() - connectedAt)) serverConnectionGauge.Update(int64(h.backend.peers.Len())) }() h.fetcher.announce(p, p.headInfo) // pool entry can be nil during the unit test. if p.poolEntry != nil { h.backend.serverPool.registered(p.poolEntry) } // Spawn a main loop to handle all incoming messages. for { if err := h.handleMsg(p); err != nil { p.Log().Debug("Light Ethereum message handling failed", "err", err) p.fcServer.DumpLogs() return err } } } // handleMsg is invoked whenever an inbound message is received from a remote // peer. The remote connection is torn down upon returning any error. func (h *clientHandler) handleMsg(p *peer) error { // Read the next message from the remote peer, and ensure it's fully consumed msg, err := p.rw.ReadMsg() if err != nil { return err } p.Log().Trace("Light Ethereum message arrived", "code", msg.Code, "bytes", msg.Size) if msg.Size > ProtocolMaxMsgSize { return errResp(ErrMsgTooLarge, "%v > %v", msg.Size, ProtocolMaxMsgSize) } defer msg.Discard() var deliverMsg *Msg // Handle the message depending on its contents switch msg.Code { case AnnounceMsg: p.Log().Trace("Received announce message") var req announceData if err := msg.Decode(&req); err != nil { return errResp(ErrDecode, "%v: %v", msg, err) } if err := req.sanityCheck(); err != nil { return err } update, size := req.Update.decode() if p.rejectUpdate(size) { return errResp(ErrRequestRejected, "") } p.updateFlowControl(update) if req.Hash != (common.Hash{}) { if p.announceType == announceTypeNone { return errResp(ErrUnexpectedResponse, "") } if p.announceType == announceTypeSigned { if err := req.checkSignature(p.ID(), update); err != nil { p.Log().Trace("Invalid announcement signature", "err", err) return err } p.Log().Trace("Valid announcement signature") } p.Log().Trace("Announce message content", "number", req.Number, "hash", req.Hash, "td", req.Td, "reorg", req.ReorgDepth) h.fetcher.announce(p, &req) } case BlockHeadersMsg: p.Log().Trace("Received block header response message") var resp struct { ReqID, BV uint64 Headers []*types.Header } if err := msg.Decode(&resp); err != nil { return errResp(ErrDecode, "msg %v: %v", msg, err) } p.fcServer.ReceivedReply(resp.ReqID, resp.BV) if h.fetcher.requestedID(resp.ReqID) { h.fetcher.deliverHeaders(p, resp.ReqID, resp.Headers) } else { if err := h.downloader.DeliverHeaders(p.id, resp.Headers); err != nil { log.Debug("Failed to deliver headers", "err", err) } } case BlockBodiesMsg: p.Log().Trace("Received block bodies response") var resp struct { ReqID, BV uint64 Data []*types.Body } if err := msg.Decode(&resp); err != nil { return errResp(ErrDecode, "msg %v: %v", msg, err) } p.fcServer.ReceivedReply(resp.ReqID, resp.BV) deliverMsg = &Msg{ MsgType: MsgBlockBodies, ReqID: resp.ReqID, Obj: resp.Data, } case CodeMsg: p.Log().Trace("Received code response") var resp struct { ReqID, BV uint64 Data [][]byte } if err := msg.Decode(&resp); err != nil { return errResp(ErrDecode, "msg %v: %v", msg, err) } p.fcServer.ReceivedReply(resp.ReqID, resp.BV) deliverMsg = &Msg{ MsgType: MsgCode, ReqID: resp.ReqID, Obj: resp.Data, } case ReceiptsMsg: p.Log().Trace("Received receipts response") var resp struct { ReqID, BV uint64 Receipts []types.Receipts } if err := msg.Decode(&resp); err != nil { return errResp(ErrDecode, "msg %v: %v", msg, err) } p.fcServer.ReceivedReply(resp.ReqID, resp.BV) deliverMsg = &Msg{ MsgType: MsgReceipts, ReqID: resp.ReqID, Obj: resp.Receipts, } case ProofsV2Msg: p.Log().Trace("Received les/2 proofs response") var resp struct { ReqID, BV uint64 Data light.NodeList } if err := msg.Decode(&resp); err != nil { return errResp(ErrDecode, "msg %v: %v", msg, err) } p.fcServer.ReceivedReply(resp.ReqID, resp.BV) deliverMsg = &Msg{ MsgType: MsgProofsV2, ReqID: resp.ReqID, Obj: resp.Data, } case HelperTrieProofsMsg: p.Log().Trace("Received helper trie proof response") var resp struct { ReqID, BV uint64 Data HelperTrieResps } if err := msg.Decode(&resp); err != nil { return errResp(ErrDecode, "msg %v: %v", msg, err) } p.fcServer.ReceivedReply(resp.ReqID, resp.BV) deliverMsg = &Msg{ MsgType: MsgHelperTrieProofs, ReqID: resp.ReqID, Obj: resp.Data, } case TxStatusMsg: p.Log().Trace("Received tx status response") var resp struct { ReqID, BV uint64 Status []light.TxStatus } if err := msg.Decode(&resp); err != nil { return errResp(ErrDecode, "msg %v: %v", msg, err) } p.fcServer.ReceivedReply(resp.ReqID, resp.BV) deliverMsg = &Msg{ MsgType: MsgTxStatus, ReqID: resp.ReqID, Obj: resp.Status, } case StopMsg: p.freezeServer(true) h.backend.retriever.frozen(p) p.Log().Debug("Service stopped") case ResumeMsg: var bv uint64 if err := msg.Decode(&bv); err != nil { return errResp(ErrDecode, "msg %v: %v", msg, err) } p.fcServer.ResumeFreeze(bv) p.freezeServer(false) p.Log().Debug("Service resumed") default: p.Log().Trace("Received invalid message", "code", msg.Code) return errResp(ErrInvalidMsgCode, "%v", msg.Code) } // Deliver the received response to retriever. if deliverMsg != nil { if err := h.backend.retriever.deliver(p, deliverMsg); err != nil { p.responseErrors++ if p.responseErrors > maxResponseErrors { return err } } } return nil } func (h *clientHandler) removePeer(id string) { h.backend.peers.Unregister(id) } type peerConnection struct { handler *clientHandler peer *peer } func (pc *peerConnection) Head() (common.Hash, *big.Int) { return pc.peer.HeadAndTd() } func (pc *peerConnection)
(origin common.Hash, amount int, skip int, reverse bool) error { rq := &distReq{ getCost: func(dp distPeer) uint64 { peer := dp.(*peer) return peer.GetRequestCost(GetBlockHeadersMsg, amount) }, canSend: func(dp distPeer) bool { return dp.(*peer) == pc.peer }, request: func(dp distPeer) func() { reqID := genReqID() peer := dp.(*peer) cost := peer.GetRequestCost(GetBlockHeadersMsg, amount) peer.fcServer.QueuedRequest(reqID, cost) return func() { peer.RequestHeadersByHash(reqID, cost, origin, amount, skip, reverse) } }, } _, ok := <-pc.handler.backend.reqDist.queue(rq) if !ok { return light.ErrNoPeers } return nil } func (pc *peerConnection) RequestHeadersByNumber(origin uint64, amount int, skip int, reverse bool) error { rq := &distReq{ getCost: func(dp distPeer) uint64 { peer := dp.(*peer) return peer.GetRequestCost(GetBlockHeadersMsg, amount) }, canSend: func(dp distPeer) bool { return dp.(*peer) == pc.peer }, request: func(dp distPeer) func() { reqID := genReqID() peer := dp.(*peer) cost := peer.GetRequestCost(GetBlockHeadersMsg, amount) peer.fcServer.QueuedRequest(reqID, cost) return func() { peer.RequestHeadersByNumber(reqID, cost, origin, amount, skip, reverse) } }, } _, ok := <-pc.handler.backend.reqDist.queue(rq) if !ok { return light.ErrNoPeers } return nil } // downloaderPeerNotify implements peerSetNotify type downloaderPeerNotify clientHandler func (d *downloaderPeerNotify) registerPeer(p *peer) { h := (*clientHandler)(d) pc := &peerConnection{ handler: h, peer: p, } h.downloader.RegisterLightPeer(p.id, ethVersion, pc) } func (d *downloaderPeerNotify) unregisterPeer(p *peer) { h := (*clientHandler)(d) h.downloader.UnregisterPeer(p.id) }
RequestHeadersByHash
identifier_name
client_handler.go
// Copyright 2019 The go-ethereum Authors // This file is part of the go-ethereum library. // // The go-ethereum library is free software: you can redistribute it and/or modify // it under the terms of the GNU Lesser General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // // The go-ethereum library is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Lesser General Public License for more details. // // You should have received a copy of the GNU Lesser General Public License // along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>. package les import ( "math/big" "sync" "time" "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common/mclock" "github.com/ethereum/go-ethereum/core/types" "github.com/ethereum/go-ethereum/eth/downloader" "github.com/ethereum/go-ethereum/light" "github.com/ethereum/go-ethereum/log" "github.com/ethereum/go-ethereum/p2p" "github.com/ethereum/go-ethereum/params" ) // clientHandler is responsible for receiving and processing all incoming server // responses. type clientHandler struct { ulc *ulc checkpoint *params.TrustedCheckpoint fetcher *lightFetcher downloader *downloader.Downloader backend *LightEthereum closeCh chan struct{} wg sync.WaitGroup // WaitGroup used to track all connected peers. syncDone func() // Test hooks when syncing is done. } func newClientHandler(ulcServers []string, ulcFraction int, checkpoint *params.TrustedCheckpoint, backend *LightEthereum) *clientHandler { handler := &clientHandler{ checkpoint: checkpoint, backend: backend, closeCh: make(chan struct{}), } if ulcServers != nil { ulc, err := newULC(ulcServers, ulcFraction) if err != nil
handler.ulc = ulc log.Info("Enable ultra light client mode") } var height uint64 if checkpoint != nil { height = (checkpoint.SectionIndex+1)*params.CHTFrequency - 1 } handler.fetcher = newLightFetcher(handler) handler.downloader = downloader.New(height, backend.chainDb, nil, backend.eventMux, nil, backend.blockchain, handler.removePeer) handler.backend.peers.notify((*downloaderPeerNotify)(handler)) return handler } func (h *clientHandler) stop() { close(h.closeCh) h.downloader.Terminate() h.fetcher.close() h.wg.Wait() } // runPeer is the p2p protocol run function for the given version. func (h *clientHandler) runPeer(version uint, p *p2p.Peer, rw p2p.MsgReadWriter) error { trusted := false if h.ulc != nil { trusted = h.ulc.trusted(p.ID()) } peer := newPeer(int(version), h.backend.config.NetworkId, trusted, p, newMeteredMsgWriter(rw, int(version))) peer.poolEntry = h.backend.serverPool.connect(peer, peer.Node()) if peer.poolEntry == nil { return p2p.DiscRequested } h.wg.Add(1) defer h.wg.Done() err := h.handle(peer) h.backend.serverPool.disconnect(peer.poolEntry) return err } func (h *clientHandler) handle(p *peer) error { if h.backend.peers.Len() >= h.backend.config.LightPeers && !p.Peer.Info().Network.Trusted { return p2p.DiscTooManyPeers } p.Log().Debug("Light Ethereum peer connected", "name", p.Name()) // Execute the LES handshake var ( head = h.backend.blockchain.CurrentHeader() hash = head.Hash() number = head.Number.Uint64() td = h.backend.blockchain.GetTd(hash, number) ) if err := p.Handshake(td, hash, number, h.backend.blockchain.Genesis().Hash(), nil); err != nil { p.Log().Debug("Light Ethereum handshake failed", "err", err) return err } // Register the peer locally if err := h.backend.peers.Register(p); err != nil { p.Log().Error("Light Ethereum peer registration failed", "err", err) return err } serverConnectionGauge.Update(int64(h.backend.peers.Len())) connectedAt := mclock.Now() defer func() { h.backend.peers.Unregister(p.id) connectionTimer.Update(time.Duration(mclock.Now() - connectedAt)) serverConnectionGauge.Update(int64(h.backend.peers.Len())) }() h.fetcher.announce(p, p.headInfo) // pool entry can be nil during the unit test. if p.poolEntry != nil { h.backend.serverPool.registered(p.poolEntry) } // Spawn a main loop to handle all incoming messages. for { if err := h.handleMsg(p); err != nil { p.Log().Debug("Light Ethereum message handling failed", "err", err) p.fcServer.DumpLogs() return err } } } // handleMsg is invoked whenever an inbound message is received from a remote // peer. The remote connection is torn down upon returning any error. func (h *clientHandler) handleMsg(p *peer) error { // Read the next message from the remote peer, and ensure it's fully consumed msg, err := p.rw.ReadMsg() if err != nil { return err } p.Log().Trace("Light Ethereum message arrived", "code", msg.Code, "bytes", msg.Size) if msg.Size > ProtocolMaxMsgSize { return errResp(ErrMsgTooLarge, "%v > %v", msg.Size, ProtocolMaxMsgSize) } defer msg.Discard() var deliverMsg *Msg // Handle the message depending on its contents switch msg.Code { case AnnounceMsg: p.Log().Trace("Received announce message") var req announceData if err := msg.Decode(&req); err != nil { return errResp(ErrDecode, "%v: %v", msg, err) } if err := req.sanityCheck(); err != nil { return err } update, size := req.Update.decode() if p.rejectUpdate(size) { return errResp(ErrRequestRejected, "") } p.updateFlowControl(update) if req.Hash != (common.Hash{}) { if p.announceType == announceTypeNone { return errResp(ErrUnexpectedResponse, "") } if p.announceType == announceTypeSigned { if err := req.checkSignature(p.ID(), update); err != nil { p.Log().Trace("Invalid announcement signature", "err", err) return err } p.Log().Trace("Valid announcement signature") } p.Log().Trace("Announce message content", "number", req.Number, "hash", req.Hash, "td", req.Td, "reorg", req.ReorgDepth) h.fetcher.announce(p, &req) } case BlockHeadersMsg: p.Log().Trace("Received block header response message") var resp struct { ReqID, BV uint64 Headers []*types.Header } if err := msg.Decode(&resp); err != nil { return errResp(ErrDecode, "msg %v: %v", msg, err) } p.fcServer.ReceivedReply(resp.ReqID, resp.BV) if h.fetcher.requestedID(resp.ReqID) { h.fetcher.deliverHeaders(p, resp.ReqID, resp.Headers) } else { if err := h.downloader.DeliverHeaders(p.id, resp.Headers); err != nil { log.Debug("Failed to deliver headers", "err", err) } } case BlockBodiesMsg: p.Log().Trace("Received block bodies response") var resp struct { ReqID, BV uint64 Data []*types.Body } if err := msg.Decode(&resp); err != nil { return errResp(ErrDecode, "msg %v: %v", msg, err) } p.fcServer.ReceivedReply(resp.ReqID, resp.BV) deliverMsg = &Msg{ MsgType: MsgBlockBodies, ReqID: resp.ReqID, Obj: resp.Data, } case CodeMsg: p.Log().Trace("Received code response") var resp struct { ReqID, BV uint64 Data [][]byte } if err := msg.Decode(&resp); err != nil { return errResp(ErrDecode, "msg %v: %v", msg, err) } p.fcServer.ReceivedReply(resp.ReqID, resp.BV) deliverMsg = &Msg{ MsgType: MsgCode, ReqID: resp.ReqID, Obj: resp.Data, } case ReceiptsMsg: p.Log().Trace("Received receipts response") var resp struct { ReqID, BV uint64 Receipts []types.Receipts } if err := msg.Decode(&resp); err != nil { return errResp(ErrDecode, "msg %v: %v", msg, err) } p.fcServer.ReceivedReply(resp.ReqID, resp.BV) deliverMsg = &Msg{ MsgType: MsgReceipts, ReqID: resp.ReqID, Obj: resp.Receipts, } case ProofsV2Msg: p.Log().Trace("Received les/2 proofs response") var resp struct { ReqID, BV uint64 Data light.NodeList } if err := msg.Decode(&resp); err != nil { return errResp(ErrDecode, "msg %v: %v", msg, err) } p.fcServer.ReceivedReply(resp.ReqID, resp.BV) deliverMsg = &Msg{ MsgType: MsgProofsV2, ReqID: resp.ReqID, Obj: resp.Data, } case HelperTrieProofsMsg: p.Log().Trace("Received helper trie proof response") var resp struct { ReqID, BV uint64 Data HelperTrieResps } if err := msg.Decode(&resp); err != nil { return errResp(ErrDecode, "msg %v: %v", msg, err) } p.fcServer.ReceivedReply(resp.ReqID, resp.BV) deliverMsg = &Msg{ MsgType: MsgHelperTrieProofs, ReqID: resp.ReqID, Obj: resp.Data, } case TxStatusMsg: p.Log().Trace("Received tx status response") var resp struct { ReqID, BV uint64 Status []light.TxStatus } if err := msg.Decode(&resp); err != nil { return errResp(ErrDecode, "msg %v: %v", msg, err) } p.fcServer.ReceivedReply(resp.ReqID, resp.BV) deliverMsg = &Msg{ MsgType: MsgTxStatus, ReqID: resp.ReqID, Obj: resp.Status, } case StopMsg: p.freezeServer(true) h.backend.retriever.frozen(p) p.Log().Debug("Service stopped") case ResumeMsg: var bv uint64 if err := msg.Decode(&bv); err != nil { return errResp(ErrDecode, "msg %v: %v", msg, err) } p.fcServer.ResumeFreeze(bv) p.freezeServer(false) p.Log().Debug("Service resumed") default: p.Log().Trace("Received invalid message", "code", msg.Code) return errResp(ErrInvalidMsgCode, "%v", msg.Code) } // Deliver the received response to retriever. if deliverMsg != nil { if err := h.backend.retriever.deliver(p, deliverMsg); err != nil { p.responseErrors++ if p.responseErrors > maxResponseErrors { return err } } } return nil } func (h *clientHandler) removePeer(id string) { h.backend.peers.Unregister(id) } type peerConnection struct { handler *clientHandler peer *peer } func (pc *peerConnection) Head() (common.Hash, *big.Int) { return pc.peer.HeadAndTd() } func (pc *peerConnection) RequestHeadersByHash(origin common.Hash, amount int, skip int, reverse bool) error { rq := &distReq{ getCost: func(dp distPeer) uint64 { peer := dp.(*peer) return peer.GetRequestCost(GetBlockHeadersMsg, amount) }, canSend: func(dp distPeer) bool { return dp.(*peer) == pc.peer }, request: func(dp distPeer) func() { reqID := genReqID() peer := dp.(*peer) cost := peer.GetRequestCost(GetBlockHeadersMsg, amount) peer.fcServer.QueuedRequest(reqID, cost) return func() { peer.RequestHeadersByHash(reqID, cost, origin, amount, skip, reverse) } }, } _, ok := <-pc.handler.backend.reqDist.queue(rq) if !ok { return light.ErrNoPeers } return nil } func (pc *peerConnection) RequestHeadersByNumber(origin uint64, amount int, skip int, reverse bool) error { rq := &distReq{ getCost: func(dp distPeer) uint64 { peer := dp.(*peer) return peer.GetRequestCost(GetBlockHeadersMsg, amount) }, canSend: func(dp distPeer) bool { return dp.(*peer) == pc.peer }, request: func(dp distPeer) func() { reqID := genReqID() peer := dp.(*peer) cost := peer.GetRequestCost(GetBlockHeadersMsg, amount) peer.fcServer.QueuedRequest(reqID, cost) return func() { peer.RequestHeadersByNumber(reqID, cost, origin, amount, skip, reverse) } }, } _, ok := <-pc.handler.backend.reqDist.queue(rq) if !ok { return light.ErrNoPeers } return nil } // downloaderPeerNotify implements peerSetNotify type downloaderPeerNotify clientHandler func (d *downloaderPeerNotify) registerPeer(p *peer) { h := (*clientHandler)(d) pc := &peerConnection{ handler: h, peer: p, } h.downloader.RegisterLightPeer(p.id, ethVersion, pc) } func (d *downloaderPeerNotify) unregisterPeer(p *peer) { h := (*clientHandler)(d) h.downloader.UnregisterPeer(p.id) }
{ log.Error("Failed to initialize ultra light client") }
conditional_block
files_test.go
/* Tests for files.go MIT licenced, please see LICENCE RCL January 2020 */ package files import ( "io/ioutil" "os" "strings" "testing" "time" "github.com/google/go-cmp/cmp" ) func ptime(ti string) time.Time { tp, err := time.Parse("2006-01-02 15:04:05 -0700 MST", ti) if err != nil { panic(err) } return tp } // TestFilesXochitlWithPDF tests the xochitl file format for a test with // a backing pdf func TestFilesXochitlWithPDF(t *testing.T) { template := "" rmf, err := RMFiler("../testfiles/cc8313bb-5fab-4ab5-af39-46e6d4160df3.pdf", template) if err != nil { t.Fatalf("Could not open file %v", err) } expected := RMFileInfo{ RmFS: &RmFS{ pdfPath: "cc8313bb-5fab-4ab5-af39-46e6d4160df3.pdf", identifier: "cc8313bb-5fab-4ab5-af39-46e6d4160df3", }, Version: 17, VisibleName: "tpl", LastModified: ptime("2019-12-28 23:17:19 +0000 GMT"), PageCount: 2, Pages: []RMPage{ { PageNo: 0, Identifier: "da7f9a41-c2b2-4cbc-9c1b-5a20b5d54224", rmFileDesc: &rmFileDesc{rmPath: "cc8313bb-5fab-4ab5-af39-46e6d4160df3/da7f9a41-c2b2-4cbc-9c1b-5a20b5d54224.rm"}, LayerNames: []string{"Layer 1", "Layer 2 is empty"}, }, { PageNo: 1, Identifier: "7794dbce-2506-4fb0-99fd-9ec031426d57", rmFileDesc: &rmFileDesc{rmPath: "cc8313bb-5fab-4ab5-af39-46e6d4160df3/7794dbce-2506-4fb0-99fd-9ec031426d57.rm"}, LayerNames: []string{"Layer 1", "Layer 2"}, }, }, } if rmf.pdfPath != expected.pdfPath { t.Errorf("pdfPath got %v wanted %v", rmf.pdfPath, expected.pdfPath) } if rmf.identifier != expected.identifier { t.Errorf("identifier got %v wanted %v", rmf.identifier, expected.identifier) } if rmf.Version != expected.Version { t.Errorf("Version got %v wanted %v", rmf.Version, expected.Version) } if rmf.VisibleName != expected.VisibleName { t.Errorf("VisibleName got %v wanted %v", rmf.VisibleName, expected.VisibleName) } if rmf.PageCount != expected.PageCount { t.Errorf("PageCount got %v wanted %v", rmf.PageCount, expected.PageCount) } if rmf.Pages[1].PageNo != expected.Pages[1].PageNo { t.Errorf("Page two PageNo got %v wanted %v", rmf.Pages[1].PageNo, expected.Pages[1].PageNo) } if rmf.Pages[1].Identifier != expected.Pages[1].Identifier { t.Errorf("Page two Identifier got %v wanted %v", rmf.Pages[1].Identifier, expected.Pages[1].Identifier) } if rmf.Pages[1].rmPath != expected.Pages[1].rmPath { t.Errorf("Page two rmPath got %v wanted %v", rmf.Pages[1].rmPath, expected.Pages[1].rmPath) } if rmf.Pages[1].LayerNames[1] != expected.Pages[1].LayerNames[1] { t.Error("Page two second layer names not the same") } // https://stackoverflow.com/a/29339052 redirStdOut := func(log string) string { oldStdout := os.Stdout r, w, _ := os.Pipe() os.Stdout = w // debug! rmf.Debug(log) w.Close() s, _ := ioutil.ReadAll(r) r.Close() os.Stdout = oldStdout return string(s) } rmf.Debugging = false s := redirStdOut("hi") if s != "" { t.Error("debug should be nil") } rmf.Debugging = true s = redirStdOut("hi") if s != "hi\n" { t.Errorf("debug got %s not %s", s, "hi") } } // TestFilesXochitlWithoutPDF tests xochitl format files without a pdf func TestFilesXochitlWithoutPDF(t *testing.T) { template := "../templates/A4.pdf" rmf, err := RMFiler("../testfiles/d34df12d-e72b-4939-a791-5b34b3a810e7", template) if err != nil { t.Fatalf("Could not open file %v", err) } expected := RMFileInfo{ RmFS: &RmFS{ pdfPath: "", // no pdf identifier: "d34df12d-e72b-4939-a791-5b34b3a810e7", }, Version: 0, VisibleName: "toolbox", LastModified: ptime("2020-01-05 13:03:52 +0000 GMT"), PageCount: 1, Pages: []RMPage{ { PageNo: 0, Identifier: "2c277cdb-79a5-4f69-b583-4901d944e77e", rmFileDesc: &rmFileDesc{rmPath: "d34df12d-e72b-4939-a791-5b34b3a810e7/2c277cdb-79a5-4f69-b583-4901d944e77e.rm"}, LayerNames: []string{"Layer 1"}, }, }, } if rmf.pdfPath != expected.pdfPath { t.Errorf("pdfPath got %v wanted %v", rmf.pdfPath, expected.pdfPath) } if rmf.identifier != expected.identifier { t.Errorf("identifier got %v wanted %v", rmf.identifier, expected.identifier) } if rmf.Version != expected.Version { t.Errorf("Version got %v wanted %v", rmf.Version, expected.Version) } if rmf.VisibleName != expected.VisibleName { t.Errorf("VisibleName got %v wanted %v", rmf.VisibleName, expected.VisibleName) } if rmf.PageCount != expected.PageCount { t.Errorf("PageCount got %v wanted %v", rmf.PageCount, expected.PageCount) } if rmf.Pages[0].PageNo != expected.Pages[0].PageNo { t.Errorf("Page one PageNo got %v wanted %v", rmf.Pages[0].PageNo, expected.Pages[0].PageNo) } if rmf.Pages[0].Identifier != expected.Pages[0].Identifier { t.Errorf("Page one Identifier got %v wanted %v", rmf.Pages[0].Identifier, expected.Pages[0].Identifier) } if rmf.Pages[0].rmPath != expected.Pages[0].rmPath { t.Errorf("Page one rmPath got %v wanted %v", rmf.Pages[0].rmPath, expected.Pages[0].rmPath) } if rmf.Pages[0].LayerNames[0] != expected.Pages[0].LayerNames[0] { t.Error("Page one second layer names not the same") } } // TestInsertedPage checks if an inserted page is detected correctly func TestInsertedPage(t *testing.T) { testUUID := "fbe9f971-03ba-4c21-a0e8-78dd921f9c4c" template := "../templates/A4.pdf" rmf, err := RMFiler("../testfiles/"+testUUID, template) if err != nil { t.Fatalf("Could not open file %v", err) }
expected := RMFileInfo{ RmFS: &RmFS{ pdfPath: "fbe9f971-03ba-4c21-a0e8-78dd921f9c4c.pdf", identifier: "fbe9f971-03ba-4c21-a0e8-78dd921f9c4c", }, Version: 0, VisibleName: "insert-pages", LastModified: ptime("2022-09-09 14:13:39 +0100 BST"), Orientation: "portrait", OriginalPageCount: 2, PageCount: 3, Pages: []RMPage{ { PageNo: 0, Identifier: "fa678373-8530-465d-a988-a0b158d957e4", rmFileDesc: &rmFileDesc{rmPath: "fbe9f971-03ba-4c21-a0e8-78dd921f9c4c/fa678373-8530-465d-a988-a0b158d957e4.rm"}, LayerNames: []string{"Layer 1"}, }, { PageNo: 1, Identifier: "0b8b6e65-926c-4269-9109-36fca8718c94", rmFileDesc: &rmFileDesc{rmPath: "fbe9f971-03ba-4c21-a0e8-78dd921f9c4c/0b8b6e65-926c-4269-9109-36fca8718c94.rm"}, LayerNames: []string{"Layer 1"}, }, { PageNo: 2, Identifier: "e2a69ab6-5c11-42d1-8d2d-9ce6569d9fdf", rmFileDesc: &rmFileDesc{rmPath: "fbe9f971-03ba-4c21-a0e8-78dd921f9c4c/e2a69ab6-5c11-42d1-8d2d-9ce6569d9fdf.rm"}, LayerNames: []string{"Layer 1"}, }, }, RedirectionPageMap: []int{0, -1, 1}, Debugging: false, } opt := cmp.Comparer(func(x, y RMFileInfo) bool { if x.pdfPath != y.pdfPath { t.Errorf("path %s != %s", x.pdfPath, y.pdfPath) return false } if x.identifier != y.identifier { t.Errorf("identifier %s != %s", x.pdfPath, y.pdfPath) return false } if x.Version != y.Version || x.VisibleName != y.VisibleName || x.Orientation != y.Orientation || x.OriginalPageCount != y.OriginalPageCount || x.PageCount != y.PageCount { t.Error("version, visiblename, orientation, originalpagecount or pagecount differ") return false } if len(x.RedirectionPageMap) != len(y.RedirectionPageMap) { t.Errorf("redirection length %d != %d", len(x.RedirectionPageMap), len(y.RedirectionPageMap)) return false } for i, rpm := range x.RedirectionPageMap { if rpm != y.RedirectionPageMap[i] { t.Errorf("redirection page map %d %d != %d", i, rpm, y.RedirectionPageMap[i]) return false } } if len(x.Pages) != len(y.Pages) { t.Errorf("page lengths different %d != %d", len(x.Pages), len(y.Pages)) return false } for i, xPage := range x.Pages { yPage := y.Pages[i] if xPage.PageNo != yPage.PageNo { t.Errorf("page %d != %d", xPage.PageNo, yPage.PageNo) return false } if xPage.Identifier != yPage.Identifier { t.Errorf("identifier %s != %s", xPage.Identifier, yPage.Identifier) return false } if xPage.rmPath != yPage.rmPath { t.Errorf("rmpath %x != %s", xPage.rmPath, yPage.rmPath) return false } if len(xPage.LayerNames) != len(yPage.LayerNames) { t.Errorf("layer len %d != %d", len(xPage.LayerNames), len(yPage.LayerNames)) return false } } return true }) // if !cmp.Equal(rmf, expected, cmpopts.IgnoreUnexported(rmf), cmpopts.IgnoreInterfaces(struct{ io.Reader }{})) { if !cmp.Equal(rmf, expected, opt) { t.Errorf("rmf != expected for insert page test") } if len(expected.Pages) != rmf.PageCount { t.Errorf("expected pages %d != rmf pages %d", len(expected.Pages), rmf.PageCount) } if len(rmf.insertedPages) != 1 || rmf.insertedPages[0] != 1 { t.Errorf( "inserted pages %v should equal [1]", rmf.insertedPages, ) } if !cmp.Equal(rmf.insertedPages.insertedPageNos(), []int{2}) { t.Errorf( "human inserted pages %v should equal {2}", rmf.insertedPages.insertedPageNos(), ) } if rmf.insertedPages.insertedPageNumbers() != "2" { t.Errorf( "human inserted pages as string %v should equal '2'", rmf.insertedPages.insertedPageNumbers(), ) } type iterExpected struct { pageNo int pdfPageNo int inserted bool isTemplate bool } iExpectArray := []iterExpected{ {0, 0, false, false}, {1, 0, true, true}, {2, 1, false, false}, } for i := 0; i < rmf.PageCount; i++ { // ignore filehandle in last assignment pageNo, pdfPageNo, inserted, isTemplate, _ := rmf.PageIterate() j := iterExpected{pageNo, pdfPageNo, inserted, isTemplate} e := iExpectArray[i] if j.pageNo != e.pageNo || j.pdfPageNo != e.pdfPageNo || j.inserted != e.inserted || j.isTemplate != e.isTemplate { t.Errorf("iter i %d expected %+v got %+v", i, e, j) } } } // TestHorizontal checks if a horizontal PDF is detected correctly func TestHorizontal(t *testing.T) { testUUID := "e724bba2-266f-434d-aaf2-935d2b405aee" template := "" rmf, err := RMFiler("../testfiles/"+testUUID, template) if err != nil { t.Errorf("Could not open file %v", err) } if rmf.Orientation != "landscape" { t.Errorf("Expected landscape orientation, got %s", rmf.Orientation) } } // TestExtensionIgnored checks that when providing an input with an extension // the extension is ignored func TestExtensionIgnored(t *testing.T) { testUUID := "e724bba2-266f-434d-aaf2-935d2b405aee.arbitrary" template := "" _, err := RMFiler("../testfiles/"+testUUID, template) if err != nil { t.Errorf("Could not open file %v", err) } } // TestZipWithNoMetadata tests a zip file with no metadata // note that this older rmapi zip format uses 0-indexed page numbers // // ../testfiles/no-metadata.zip // ddae88d1-7514-43b6-b7de-dcdd18eeb69a.content // ddae88d1-7514-43b6-b7de-dcdd18eeb69a.pagedata // ddae88d1-7514-43b6-b7de-dcdd18eeb69a/0-metadata.json // ddae88d1-7514-43b6-b7de-dcdd18eeb69a/0.rm // // in comparison, see ../testfiles/horizontal_rmapi.zip // e724bba2-266f-434d-aaf2-935d2b405aee.content // e724bba2-266f-434d-aaf2-935d2b405aee.metadata // e724bba2-266f-434d-aaf2-935d2b405aee.pagedata // e724bba2-266f-434d-aaf2-935d2b405aee.pdf // e724bba2-266f-434d-aaf2-935d2b405aee/1a9ef8e1-8009-4c84-bbe8-ba2885a137e6-metadata.json // e724bba2-266f-434d-aaf2-935d2b405aee/1a9ef8e1-8009-4c84-bbe8-ba2885a137e6.rm func TestZipWithNoMetadata(t *testing.T) { template := "" rmf, err := RMFiler("../testfiles/no-metadata.zip", template) if err != nil { t.Errorf("Could not open file %v", err) } for i := 0; i < rmf.PageCount; i++ { pageNo, pdfPageNo, inserted, isTemplate, _ := rmf.PageIterate() t.Logf( "pageno %d pdfpageno %d inserted %t istpl %t\n", pageNo, pdfPageNo, inserted, isTemplate, ) } } // TestZipVersion3 tests a remarkable v3.0.4 zip file made by rmapi // // ../testfiles/version3.zip // 701cdc43-04aa-410c-bc6f-3c773105a74d // 701cdc43-04aa-410c-bc6f-3c773105a74d.content // 701cdc43-04aa-410c-bc6f-3c773105a74d.metadata // 701cdc43-04aa-410c-bc6f-3c773105a74d.pdf func TestZipV3(t *testing.T) { template := "" rmf, err := RMFiler("../testfiles/version3.zip", template) expected := "software version 3 not supported -- no rm metadata file found" if !strings.Contains(err.Error(), expected) { t.Errorf("v3 file should error with %s, not %v", expected, err) } pages := 0 for i := 0; i < rmf.PageCount; i++ { pages++ pageNo, pdfPageNo, inserted, isTemplate, _ := rmf.PageIterate() t.Logf( "pageno %d pdfpageno %d inserted %t istpl %t\n", pageNo, pdfPageNo, inserted, isTemplate, ) } if pages != 2 { t.Errorf("page no %d != 2", pages) } }
random_line_split
files_test.go
/* Tests for files.go MIT licenced, please see LICENCE RCL January 2020 */ package files import ( "io/ioutil" "os" "strings" "testing" "time" "github.com/google/go-cmp/cmp" ) func ptime(ti string) time.Time { tp, err := time.Parse("2006-01-02 15:04:05 -0700 MST", ti) if err != nil { panic(err) } return tp } // TestFilesXochitlWithPDF tests the xochitl file format for a test with // a backing pdf func
(t *testing.T) { template := "" rmf, err := RMFiler("../testfiles/cc8313bb-5fab-4ab5-af39-46e6d4160df3.pdf", template) if err != nil { t.Fatalf("Could not open file %v", err) } expected := RMFileInfo{ RmFS: &RmFS{ pdfPath: "cc8313bb-5fab-4ab5-af39-46e6d4160df3.pdf", identifier: "cc8313bb-5fab-4ab5-af39-46e6d4160df3", }, Version: 17, VisibleName: "tpl", LastModified: ptime("2019-12-28 23:17:19 +0000 GMT"), PageCount: 2, Pages: []RMPage{ { PageNo: 0, Identifier: "da7f9a41-c2b2-4cbc-9c1b-5a20b5d54224", rmFileDesc: &rmFileDesc{rmPath: "cc8313bb-5fab-4ab5-af39-46e6d4160df3/da7f9a41-c2b2-4cbc-9c1b-5a20b5d54224.rm"}, LayerNames: []string{"Layer 1", "Layer 2 is empty"}, }, { PageNo: 1, Identifier: "7794dbce-2506-4fb0-99fd-9ec031426d57", rmFileDesc: &rmFileDesc{rmPath: "cc8313bb-5fab-4ab5-af39-46e6d4160df3/7794dbce-2506-4fb0-99fd-9ec031426d57.rm"}, LayerNames: []string{"Layer 1", "Layer 2"}, }, }, } if rmf.pdfPath != expected.pdfPath { t.Errorf("pdfPath got %v wanted %v", rmf.pdfPath, expected.pdfPath) } if rmf.identifier != expected.identifier { t.Errorf("identifier got %v wanted %v", rmf.identifier, expected.identifier) } if rmf.Version != expected.Version { t.Errorf("Version got %v wanted %v", rmf.Version, expected.Version) } if rmf.VisibleName != expected.VisibleName { t.Errorf("VisibleName got %v wanted %v", rmf.VisibleName, expected.VisibleName) } if rmf.PageCount != expected.PageCount { t.Errorf("PageCount got %v wanted %v", rmf.PageCount, expected.PageCount) } if rmf.Pages[1].PageNo != expected.Pages[1].PageNo { t.Errorf("Page two PageNo got %v wanted %v", rmf.Pages[1].PageNo, expected.Pages[1].PageNo) } if rmf.Pages[1].Identifier != expected.Pages[1].Identifier { t.Errorf("Page two Identifier got %v wanted %v", rmf.Pages[1].Identifier, expected.Pages[1].Identifier) } if rmf.Pages[1].rmPath != expected.Pages[1].rmPath { t.Errorf("Page two rmPath got %v wanted %v", rmf.Pages[1].rmPath, expected.Pages[1].rmPath) } if rmf.Pages[1].LayerNames[1] != expected.Pages[1].LayerNames[1] { t.Error("Page two second layer names not the same") } // https://stackoverflow.com/a/29339052 redirStdOut := func(log string) string { oldStdout := os.Stdout r, w, _ := os.Pipe() os.Stdout = w // debug! rmf.Debug(log) w.Close() s, _ := ioutil.ReadAll(r) r.Close() os.Stdout = oldStdout return string(s) } rmf.Debugging = false s := redirStdOut("hi") if s != "" { t.Error("debug should be nil") } rmf.Debugging = true s = redirStdOut("hi") if s != "hi\n" { t.Errorf("debug got %s not %s", s, "hi") } } // TestFilesXochitlWithoutPDF tests xochitl format files without a pdf func TestFilesXochitlWithoutPDF(t *testing.T) { template := "../templates/A4.pdf" rmf, err := RMFiler("../testfiles/d34df12d-e72b-4939-a791-5b34b3a810e7", template) if err != nil { t.Fatalf("Could not open file %v", err) } expected := RMFileInfo{ RmFS: &RmFS{ pdfPath: "", // no pdf identifier: "d34df12d-e72b-4939-a791-5b34b3a810e7", }, Version: 0, VisibleName: "toolbox", LastModified: ptime("2020-01-05 13:03:52 +0000 GMT"), PageCount: 1, Pages: []RMPage{ { PageNo: 0, Identifier: "2c277cdb-79a5-4f69-b583-4901d944e77e", rmFileDesc: &rmFileDesc{rmPath: "d34df12d-e72b-4939-a791-5b34b3a810e7/2c277cdb-79a5-4f69-b583-4901d944e77e.rm"}, LayerNames: []string{"Layer 1"}, }, }, } if rmf.pdfPath != expected.pdfPath { t.Errorf("pdfPath got %v wanted %v", rmf.pdfPath, expected.pdfPath) } if rmf.identifier != expected.identifier { t.Errorf("identifier got %v wanted %v", rmf.identifier, expected.identifier) } if rmf.Version != expected.Version { t.Errorf("Version got %v wanted %v", rmf.Version, expected.Version) } if rmf.VisibleName != expected.VisibleName { t.Errorf("VisibleName got %v wanted %v", rmf.VisibleName, expected.VisibleName) } if rmf.PageCount != expected.PageCount { t.Errorf("PageCount got %v wanted %v", rmf.PageCount, expected.PageCount) } if rmf.Pages[0].PageNo != expected.Pages[0].PageNo { t.Errorf("Page one PageNo got %v wanted %v", rmf.Pages[0].PageNo, expected.Pages[0].PageNo) } if rmf.Pages[0].Identifier != expected.Pages[0].Identifier { t.Errorf("Page one Identifier got %v wanted %v", rmf.Pages[0].Identifier, expected.Pages[0].Identifier) } if rmf.Pages[0].rmPath != expected.Pages[0].rmPath { t.Errorf("Page one rmPath got %v wanted %v", rmf.Pages[0].rmPath, expected.Pages[0].rmPath) } if rmf.Pages[0].LayerNames[0] != expected.Pages[0].LayerNames[0] { t.Error("Page one second layer names not the same") } } // TestInsertedPage checks if an inserted page is detected correctly func TestInsertedPage(t *testing.T) { testUUID := "fbe9f971-03ba-4c21-a0e8-78dd921f9c4c" template := "../templates/A4.pdf" rmf, err := RMFiler("../testfiles/"+testUUID, template) if err != nil { t.Fatalf("Could not open file %v", err) } expected := RMFileInfo{ RmFS: &RmFS{ pdfPath: "fbe9f971-03ba-4c21-a0e8-78dd921f9c4c.pdf", identifier: "fbe9f971-03ba-4c21-a0e8-78dd921f9c4c", }, Version: 0, VisibleName: "insert-pages", LastModified: ptime("2022-09-09 14:13:39 +0100 BST"), Orientation: "portrait", OriginalPageCount: 2, PageCount: 3, Pages: []RMPage{ { PageNo: 0, Identifier: "fa678373-8530-465d-a988-a0b158d957e4", rmFileDesc: &rmFileDesc{rmPath: "fbe9f971-03ba-4c21-a0e8-78dd921f9c4c/fa678373-8530-465d-a988-a0b158d957e4.rm"}, LayerNames: []string{"Layer 1"}, }, { PageNo: 1, Identifier: "0b8b6e65-926c-4269-9109-36fca8718c94", rmFileDesc: &rmFileDesc{rmPath: "fbe9f971-03ba-4c21-a0e8-78dd921f9c4c/0b8b6e65-926c-4269-9109-36fca8718c94.rm"}, LayerNames: []string{"Layer 1"}, }, { PageNo: 2, Identifier: "e2a69ab6-5c11-42d1-8d2d-9ce6569d9fdf", rmFileDesc: &rmFileDesc{rmPath: "fbe9f971-03ba-4c21-a0e8-78dd921f9c4c/e2a69ab6-5c11-42d1-8d2d-9ce6569d9fdf.rm"}, LayerNames: []string{"Layer 1"}, }, }, RedirectionPageMap: []int{0, -1, 1}, Debugging: false, } opt := cmp.Comparer(func(x, y RMFileInfo) bool { if x.pdfPath != y.pdfPath { t.Errorf("path %s != %s", x.pdfPath, y.pdfPath) return false } if x.identifier != y.identifier { t.Errorf("identifier %s != %s", x.pdfPath, y.pdfPath) return false } if x.Version != y.Version || x.VisibleName != y.VisibleName || x.Orientation != y.Orientation || x.OriginalPageCount != y.OriginalPageCount || x.PageCount != y.PageCount { t.Error("version, visiblename, orientation, originalpagecount or pagecount differ") return false } if len(x.RedirectionPageMap) != len(y.RedirectionPageMap) { t.Errorf("redirection length %d != %d", len(x.RedirectionPageMap), len(y.RedirectionPageMap)) return false } for i, rpm := range x.RedirectionPageMap { if rpm != y.RedirectionPageMap[i] { t.Errorf("redirection page map %d %d != %d", i, rpm, y.RedirectionPageMap[i]) return false } } if len(x.Pages) != len(y.Pages) { t.Errorf("page lengths different %d != %d", len(x.Pages), len(y.Pages)) return false } for i, xPage := range x.Pages { yPage := y.Pages[i] if xPage.PageNo != yPage.PageNo { t.Errorf("page %d != %d", xPage.PageNo, yPage.PageNo) return false } if xPage.Identifier != yPage.Identifier { t.Errorf("identifier %s != %s", xPage.Identifier, yPage.Identifier) return false } if xPage.rmPath != yPage.rmPath { t.Errorf("rmpath %x != %s", xPage.rmPath, yPage.rmPath) return false } if len(xPage.LayerNames) != len(yPage.LayerNames) { t.Errorf("layer len %d != %d", len(xPage.LayerNames), len(yPage.LayerNames)) return false } } return true }) // if !cmp.Equal(rmf, expected, cmpopts.IgnoreUnexported(rmf), cmpopts.IgnoreInterfaces(struct{ io.Reader }{})) { if !cmp.Equal(rmf, expected, opt) { t.Errorf("rmf != expected for insert page test") } if len(expected.Pages) != rmf.PageCount { t.Errorf("expected pages %d != rmf pages %d", len(expected.Pages), rmf.PageCount) } if len(rmf.insertedPages) != 1 || rmf.insertedPages[0] != 1 { t.Errorf( "inserted pages %v should equal [1]", rmf.insertedPages, ) } if !cmp.Equal(rmf.insertedPages.insertedPageNos(), []int{2}) { t.Errorf( "human inserted pages %v should equal {2}", rmf.insertedPages.insertedPageNos(), ) } if rmf.insertedPages.insertedPageNumbers() != "2" { t.Errorf( "human inserted pages as string %v should equal '2'", rmf.insertedPages.insertedPageNumbers(), ) } type iterExpected struct { pageNo int pdfPageNo int inserted bool isTemplate bool } iExpectArray := []iterExpected{ {0, 0, false, false}, {1, 0, true, true}, {2, 1, false, false}, } for i := 0; i < rmf.PageCount; i++ { // ignore filehandle in last assignment pageNo, pdfPageNo, inserted, isTemplate, _ := rmf.PageIterate() j := iterExpected{pageNo, pdfPageNo, inserted, isTemplate} e := iExpectArray[i] if j.pageNo != e.pageNo || j.pdfPageNo != e.pdfPageNo || j.inserted != e.inserted || j.isTemplate != e.isTemplate { t.Errorf("iter i %d expected %+v got %+v", i, e, j) } } } // TestHorizontal checks if a horizontal PDF is detected correctly func TestHorizontal(t *testing.T) { testUUID := "e724bba2-266f-434d-aaf2-935d2b405aee" template := "" rmf, err := RMFiler("../testfiles/"+testUUID, template) if err != nil { t.Errorf("Could not open file %v", err) } if rmf.Orientation != "landscape" { t.Errorf("Expected landscape orientation, got %s", rmf.Orientation) } } // TestExtensionIgnored checks that when providing an input with an extension // the extension is ignored func TestExtensionIgnored(t *testing.T) { testUUID := "e724bba2-266f-434d-aaf2-935d2b405aee.arbitrary" template := "" _, err := RMFiler("../testfiles/"+testUUID, template) if err != nil { t.Errorf("Could not open file %v", err) } } // TestZipWithNoMetadata tests a zip file with no metadata // note that this older rmapi zip format uses 0-indexed page numbers // // ../testfiles/no-metadata.zip // ddae88d1-7514-43b6-b7de-dcdd18eeb69a.content // ddae88d1-7514-43b6-b7de-dcdd18eeb69a.pagedata // ddae88d1-7514-43b6-b7de-dcdd18eeb69a/0-metadata.json // ddae88d1-7514-43b6-b7de-dcdd18eeb69a/0.rm // // in comparison, see ../testfiles/horizontal_rmapi.zip // e724bba2-266f-434d-aaf2-935d2b405aee.content // e724bba2-266f-434d-aaf2-935d2b405aee.metadata // e724bba2-266f-434d-aaf2-935d2b405aee.pagedata // e724bba2-266f-434d-aaf2-935d2b405aee.pdf // e724bba2-266f-434d-aaf2-935d2b405aee/1a9ef8e1-8009-4c84-bbe8-ba2885a137e6-metadata.json // e724bba2-266f-434d-aaf2-935d2b405aee/1a9ef8e1-8009-4c84-bbe8-ba2885a137e6.rm func TestZipWithNoMetadata(t *testing.T) { template := "" rmf, err := RMFiler("../testfiles/no-metadata.zip", template) if err != nil { t.Errorf("Could not open file %v", err) } for i := 0; i < rmf.PageCount; i++ { pageNo, pdfPageNo, inserted, isTemplate, _ := rmf.PageIterate() t.Logf( "pageno %d pdfpageno %d inserted %t istpl %t\n", pageNo, pdfPageNo, inserted, isTemplate, ) } } // TestZipVersion3 tests a remarkable v3.0.4 zip file made by rmapi // // ../testfiles/version3.zip // 701cdc43-04aa-410c-bc6f-3c773105a74d // 701cdc43-04aa-410c-bc6f-3c773105a74d.content // 701cdc43-04aa-410c-bc6f-3c773105a74d.metadata // 701cdc43-04aa-410c-bc6f-3c773105a74d.pdf func TestZipV3(t *testing.T) { template := "" rmf, err := RMFiler("../testfiles/version3.zip", template) expected := "software version 3 not supported -- no rm metadata file found" if !strings.Contains(err.Error(), expected) { t.Errorf("v3 file should error with %s, not %v", expected, err) } pages := 0 for i := 0; i < rmf.PageCount; i++ { pages++ pageNo, pdfPageNo, inserted, isTemplate, _ := rmf.PageIterate() t.Logf( "pageno %d pdfpageno %d inserted %t istpl %t\n", pageNo, pdfPageNo, inserted, isTemplate, ) } if pages != 2 { t.Errorf("page no %d != 2", pages) } }
TestFilesXochitlWithPDF
identifier_name
files_test.go
/* Tests for files.go MIT licenced, please see LICENCE RCL January 2020 */ package files import ( "io/ioutil" "os" "strings" "testing" "time" "github.com/google/go-cmp/cmp" ) func ptime(ti string) time.Time { tp, err := time.Parse("2006-01-02 15:04:05 -0700 MST", ti) if err != nil { panic(err) } return tp } // TestFilesXochitlWithPDF tests the xochitl file format for a test with // a backing pdf func TestFilesXochitlWithPDF(t *testing.T) { template := "" rmf, err := RMFiler("../testfiles/cc8313bb-5fab-4ab5-af39-46e6d4160df3.pdf", template) if err != nil { t.Fatalf("Could not open file %v", err) } expected := RMFileInfo{ RmFS: &RmFS{ pdfPath: "cc8313bb-5fab-4ab5-af39-46e6d4160df3.pdf", identifier: "cc8313bb-5fab-4ab5-af39-46e6d4160df3", }, Version: 17, VisibleName: "tpl", LastModified: ptime("2019-12-28 23:17:19 +0000 GMT"), PageCount: 2, Pages: []RMPage{ { PageNo: 0, Identifier: "da7f9a41-c2b2-4cbc-9c1b-5a20b5d54224", rmFileDesc: &rmFileDesc{rmPath: "cc8313bb-5fab-4ab5-af39-46e6d4160df3/da7f9a41-c2b2-4cbc-9c1b-5a20b5d54224.rm"}, LayerNames: []string{"Layer 1", "Layer 2 is empty"}, }, { PageNo: 1, Identifier: "7794dbce-2506-4fb0-99fd-9ec031426d57", rmFileDesc: &rmFileDesc{rmPath: "cc8313bb-5fab-4ab5-af39-46e6d4160df3/7794dbce-2506-4fb0-99fd-9ec031426d57.rm"}, LayerNames: []string{"Layer 1", "Layer 2"}, }, }, } if rmf.pdfPath != expected.pdfPath { t.Errorf("pdfPath got %v wanted %v", rmf.pdfPath, expected.pdfPath) } if rmf.identifier != expected.identifier { t.Errorf("identifier got %v wanted %v", rmf.identifier, expected.identifier) } if rmf.Version != expected.Version { t.Errorf("Version got %v wanted %v", rmf.Version, expected.Version) } if rmf.VisibleName != expected.VisibleName { t.Errorf("VisibleName got %v wanted %v", rmf.VisibleName, expected.VisibleName) } if rmf.PageCount != expected.PageCount { t.Errorf("PageCount got %v wanted %v", rmf.PageCount, expected.PageCount) } if rmf.Pages[1].PageNo != expected.Pages[1].PageNo { t.Errorf("Page two PageNo got %v wanted %v", rmf.Pages[1].PageNo, expected.Pages[1].PageNo) } if rmf.Pages[1].Identifier != expected.Pages[1].Identifier { t.Errorf("Page two Identifier got %v wanted %v", rmf.Pages[1].Identifier, expected.Pages[1].Identifier) } if rmf.Pages[1].rmPath != expected.Pages[1].rmPath { t.Errorf("Page two rmPath got %v wanted %v", rmf.Pages[1].rmPath, expected.Pages[1].rmPath) } if rmf.Pages[1].LayerNames[1] != expected.Pages[1].LayerNames[1]
// https://stackoverflow.com/a/29339052 redirStdOut := func(log string) string { oldStdout := os.Stdout r, w, _ := os.Pipe() os.Stdout = w // debug! rmf.Debug(log) w.Close() s, _ := ioutil.ReadAll(r) r.Close() os.Stdout = oldStdout return string(s) } rmf.Debugging = false s := redirStdOut("hi") if s != "" { t.Error("debug should be nil") } rmf.Debugging = true s = redirStdOut("hi") if s != "hi\n" { t.Errorf("debug got %s not %s", s, "hi") } } // TestFilesXochitlWithoutPDF tests xochitl format files without a pdf func TestFilesXochitlWithoutPDF(t *testing.T) { template := "../templates/A4.pdf" rmf, err := RMFiler("../testfiles/d34df12d-e72b-4939-a791-5b34b3a810e7", template) if err != nil { t.Fatalf("Could not open file %v", err) } expected := RMFileInfo{ RmFS: &RmFS{ pdfPath: "", // no pdf identifier: "d34df12d-e72b-4939-a791-5b34b3a810e7", }, Version: 0, VisibleName: "toolbox", LastModified: ptime("2020-01-05 13:03:52 +0000 GMT"), PageCount: 1, Pages: []RMPage{ { PageNo: 0, Identifier: "2c277cdb-79a5-4f69-b583-4901d944e77e", rmFileDesc: &rmFileDesc{rmPath: "d34df12d-e72b-4939-a791-5b34b3a810e7/2c277cdb-79a5-4f69-b583-4901d944e77e.rm"}, LayerNames: []string{"Layer 1"}, }, }, } if rmf.pdfPath != expected.pdfPath { t.Errorf("pdfPath got %v wanted %v", rmf.pdfPath, expected.pdfPath) } if rmf.identifier != expected.identifier { t.Errorf("identifier got %v wanted %v", rmf.identifier, expected.identifier) } if rmf.Version != expected.Version { t.Errorf("Version got %v wanted %v", rmf.Version, expected.Version) } if rmf.VisibleName != expected.VisibleName { t.Errorf("VisibleName got %v wanted %v", rmf.VisibleName, expected.VisibleName) } if rmf.PageCount != expected.PageCount { t.Errorf("PageCount got %v wanted %v", rmf.PageCount, expected.PageCount) } if rmf.Pages[0].PageNo != expected.Pages[0].PageNo { t.Errorf("Page one PageNo got %v wanted %v", rmf.Pages[0].PageNo, expected.Pages[0].PageNo) } if rmf.Pages[0].Identifier != expected.Pages[0].Identifier { t.Errorf("Page one Identifier got %v wanted %v", rmf.Pages[0].Identifier, expected.Pages[0].Identifier) } if rmf.Pages[0].rmPath != expected.Pages[0].rmPath { t.Errorf("Page one rmPath got %v wanted %v", rmf.Pages[0].rmPath, expected.Pages[0].rmPath) } if rmf.Pages[0].LayerNames[0] != expected.Pages[0].LayerNames[0] { t.Error("Page one second layer names not the same") } } // TestInsertedPage checks if an inserted page is detected correctly func TestInsertedPage(t *testing.T) { testUUID := "fbe9f971-03ba-4c21-a0e8-78dd921f9c4c" template := "../templates/A4.pdf" rmf, err := RMFiler("../testfiles/"+testUUID, template) if err != nil { t.Fatalf("Could not open file %v", err) } expected := RMFileInfo{ RmFS: &RmFS{ pdfPath: "fbe9f971-03ba-4c21-a0e8-78dd921f9c4c.pdf", identifier: "fbe9f971-03ba-4c21-a0e8-78dd921f9c4c", }, Version: 0, VisibleName: "insert-pages", LastModified: ptime("2022-09-09 14:13:39 +0100 BST"), Orientation: "portrait", OriginalPageCount: 2, PageCount: 3, Pages: []RMPage{ { PageNo: 0, Identifier: "fa678373-8530-465d-a988-a0b158d957e4", rmFileDesc: &rmFileDesc{rmPath: "fbe9f971-03ba-4c21-a0e8-78dd921f9c4c/fa678373-8530-465d-a988-a0b158d957e4.rm"}, LayerNames: []string{"Layer 1"}, }, { PageNo: 1, Identifier: "0b8b6e65-926c-4269-9109-36fca8718c94", rmFileDesc: &rmFileDesc{rmPath: "fbe9f971-03ba-4c21-a0e8-78dd921f9c4c/0b8b6e65-926c-4269-9109-36fca8718c94.rm"}, LayerNames: []string{"Layer 1"}, }, { PageNo: 2, Identifier: "e2a69ab6-5c11-42d1-8d2d-9ce6569d9fdf", rmFileDesc: &rmFileDesc{rmPath: "fbe9f971-03ba-4c21-a0e8-78dd921f9c4c/e2a69ab6-5c11-42d1-8d2d-9ce6569d9fdf.rm"}, LayerNames: []string{"Layer 1"}, }, }, RedirectionPageMap: []int{0, -1, 1}, Debugging: false, } opt := cmp.Comparer(func(x, y RMFileInfo) bool { if x.pdfPath != y.pdfPath { t.Errorf("path %s != %s", x.pdfPath, y.pdfPath) return false } if x.identifier != y.identifier { t.Errorf("identifier %s != %s", x.pdfPath, y.pdfPath) return false } if x.Version != y.Version || x.VisibleName != y.VisibleName || x.Orientation != y.Orientation || x.OriginalPageCount != y.OriginalPageCount || x.PageCount != y.PageCount { t.Error("version, visiblename, orientation, originalpagecount or pagecount differ") return false } if len(x.RedirectionPageMap) != len(y.RedirectionPageMap) { t.Errorf("redirection length %d != %d", len(x.RedirectionPageMap), len(y.RedirectionPageMap)) return false } for i, rpm := range x.RedirectionPageMap { if rpm != y.RedirectionPageMap[i] { t.Errorf("redirection page map %d %d != %d", i, rpm, y.RedirectionPageMap[i]) return false } } if len(x.Pages) != len(y.Pages) { t.Errorf("page lengths different %d != %d", len(x.Pages), len(y.Pages)) return false } for i, xPage := range x.Pages { yPage := y.Pages[i] if xPage.PageNo != yPage.PageNo { t.Errorf("page %d != %d", xPage.PageNo, yPage.PageNo) return false } if xPage.Identifier != yPage.Identifier { t.Errorf("identifier %s != %s", xPage.Identifier, yPage.Identifier) return false } if xPage.rmPath != yPage.rmPath { t.Errorf("rmpath %x != %s", xPage.rmPath, yPage.rmPath) return false } if len(xPage.LayerNames) != len(yPage.LayerNames) { t.Errorf("layer len %d != %d", len(xPage.LayerNames), len(yPage.LayerNames)) return false } } return true }) // if !cmp.Equal(rmf, expected, cmpopts.IgnoreUnexported(rmf), cmpopts.IgnoreInterfaces(struct{ io.Reader }{})) { if !cmp.Equal(rmf, expected, opt) { t.Errorf("rmf != expected for insert page test") } if len(expected.Pages) != rmf.PageCount { t.Errorf("expected pages %d != rmf pages %d", len(expected.Pages), rmf.PageCount) } if len(rmf.insertedPages) != 1 || rmf.insertedPages[0] != 1 { t.Errorf( "inserted pages %v should equal [1]", rmf.insertedPages, ) } if !cmp.Equal(rmf.insertedPages.insertedPageNos(), []int{2}) { t.Errorf( "human inserted pages %v should equal {2}", rmf.insertedPages.insertedPageNos(), ) } if rmf.insertedPages.insertedPageNumbers() != "2" { t.Errorf( "human inserted pages as string %v should equal '2'", rmf.insertedPages.insertedPageNumbers(), ) } type iterExpected struct { pageNo int pdfPageNo int inserted bool isTemplate bool } iExpectArray := []iterExpected{ {0, 0, false, false}, {1, 0, true, true}, {2, 1, false, false}, } for i := 0; i < rmf.PageCount; i++ { // ignore filehandle in last assignment pageNo, pdfPageNo, inserted, isTemplate, _ := rmf.PageIterate() j := iterExpected{pageNo, pdfPageNo, inserted, isTemplate} e := iExpectArray[i] if j.pageNo != e.pageNo || j.pdfPageNo != e.pdfPageNo || j.inserted != e.inserted || j.isTemplate != e.isTemplate { t.Errorf("iter i %d expected %+v got %+v", i, e, j) } } } // TestHorizontal checks if a horizontal PDF is detected correctly func TestHorizontal(t *testing.T) { testUUID := "e724bba2-266f-434d-aaf2-935d2b405aee" template := "" rmf, err := RMFiler("../testfiles/"+testUUID, template) if err != nil { t.Errorf("Could not open file %v", err) } if rmf.Orientation != "landscape" { t.Errorf("Expected landscape orientation, got %s", rmf.Orientation) } } // TestExtensionIgnored checks that when providing an input with an extension // the extension is ignored func TestExtensionIgnored(t *testing.T) { testUUID := "e724bba2-266f-434d-aaf2-935d2b405aee.arbitrary" template := "" _, err := RMFiler("../testfiles/"+testUUID, template) if err != nil { t.Errorf("Could not open file %v", err) } } // TestZipWithNoMetadata tests a zip file with no metadata // note that this older rmapi zip format uses 0-indexed page numbers // // ../testfiles/no-metadata.zip // ddae88d1-7514-43b6-b7de-dcdd18eeb69a.content // ddae88d1-7514-43b6-b7de-dcdd18eeb69a.pagedata // ddae88d1-7514-43b6-b7de-dcdd18eeb69a/0-metadata.json // ddae88d1-7514-43b6-b7de-dcdd18eeb69a/0.rm // // in comparison, see ../testfiles/horizontal_rmapi.zip // e724bba2-266f-434d-aaf2-935d2b405aee.content // e724bba2-266f-434d-aaf2-935d2b405aee.metadata // e724bba2-266f-434d-aaf2-935d2b405aee.pagedata // e724bba2-266f-434d-aaf2-935d2b405aee.pdf // e724bba2-266f-434d-aaf2-935d2b405aee/1a9ef8e1-8009-4c84-bbe8-ba2885a137e6-metadata.json // e724bba2-266f-434d-aaf2-935d2b405aee/1a9ef8e1-8009-4c84-bbe8-ba2885a137e6.rm func TestZipWithNoMetadata(t *testing.T) { template := "" rmf, err := RMFiler("../testfiles/no-metadata.zip", template) if err != nil { t.Errorf("Could not open file %v", err) } for i := 0; i < rmf.PageCount; i++ { pageNo, pdfPageNo, inserted, isTemplate, _ := rmf.PageIterate() t.Logf( "pageno %d pdfpageno %d inserted %t istpl %t\n", pageNo, pdfPageNo, inserted, isTemplate, ) } } // TestZipVersion3 tests a remarkable v3.0.4 zip file made by rmapi // // ../testfiles/version3.zip // 701cdc43-04aa-410c-bc6f-3c773105a74d // 701cdc43-04aa-410c-bc6f-3c773105a74d.content // 701cdc43-04aa-410c-bc6f-3c773105a74d.metadata // 701cdc43-04aa-410c-bc6f-3c773105a74d.pdf func TestZipV3(t *testing.T) { template := "" rmf, err := RMFiler("../testfiles/version3.zip", template) expected := "software version 3 not supported -- no rm metadata file found" if !strings.Contains(err.Error(), expected) { t.Errorf("v3 file should error with %s, not %v", expected, err) } pages := 0 for i := 0; i < rmf.PageCount; i++ { pages++ pageNo, pdfPageNo, inserted, isTemplate, _ := rmf.PageIterate() t.Logf( "pageno %d pdfpageno %d inserted %t istpl %t\n", pageNo, pdfPageNo, inserted, isTemplate, ) } if pages != 2 { t.Errorf("page no %d != 2", pages) } }
{ t.Error("Page two second layer names not the same") }
conditional_block
files_test.go
/* Tests for files.go MIT licenced, please see LICENCE RCL January 2020 */ package files import ( "io/ioutil" "os" "strings" "testing" "time" "github.com/google/go-cmp/cmp" ) func ptime(ti string) time.Time { tp, err := time.Parse("2006-01-02 15:04:05 -0700 MST", ti) if err != nil { panic(err) } return tp } // TestFilesXochitlWithPDF tests the xochitl file format for a test with // a backing pdf func TestFilesXochitlWithPDF(t *testing.T) { template := "" rmf, err := RMFiler("../testfiles/cc8313bb-5fab-4ab5-af39-46e6d4160df3.pdf", template) if err != nil { t.Fatalf("Could not open file %v", err) } expected := RMFileInfo{ RmFS: &RmFS{ pdfPath: "cc8313bb-5fab-4ab5-af39-46e6d4160df3.pdf", identifier: "cc8313bb-5fab-4ab5-af39-46e6d4160df3", }, Version: 17, VisibleName: "tpl", LastModified: ptime("2019-12-28 23:17:19 +0000 GMT"), PageCount: 2, Pages: []RMPage{ { PageNo: 0, Identifier: "da7f9a41-c2b2-4cbc-9c1b-5a20b5d54224", rmFileDesc: &rmFileDesc{rmPath: "cc8313bb-5fab-4ab5-af39-46e6d4160df3/da7f9a41-c2b2-4cbc-9c1b-5a20b5d54224.rm"}, LayerNames: []string{"Layer 1", "Layer 2 is empty"}, }, { PageNo: 1, Identifier: "7794dbce-2506-4fb0-99fd-9ec031426d57", rmFileDesc: &rmFileDesc{rmPath: "cc8313bb-5fab-4ab5-af39-46e6d4160df3/7794dbce-2506-4fb0-99fd-9ec031426d57.rm"}, LayerNames: []string{"Layer 1", "Layer 2"}, }, }, } if rmf.pdfPath != expected.pdfPath { t.Errorf("pdfPath got %v wanted %v", rmf.pdfPath, expected.pdfPath) } if rmf.identifier != expected.identifier { t.Errorf("identifier got %v wanted %v", rmf.identifier, expected.identifier) } if rmf.Version != expected.Version { t.Errorf("Version got %v wanted %v", rmf.Version, expected.Version) } if rmf.VisibleName != expected.VisibleName { t.Errorf("VisibleName got %v wanted %v", rmf.VisibleName, expected.VisibleName) } if rmf.PageCount != expected.PageCount { t.Errorf("PageCount got %v wanted %v", rmf.PageCount, expected.PageCount) } if rmf.Pages[1].PageNo != expected.Pages[1].PageNo { t.Errorf("Page two PageNo got %v wanted %v", rmf.Pages[1].PageNo, expected.Pages[1].PageNo) } if rmf.Pages[1].Identifier != expected.Pages[1].Identifier { t.Errorf("Page two Identifier got %v wanted %v", rmf.Pages[1].Identifier, expected.Pages[1].Identifier) } if rmf.Pages[1].rmPath != expected.Pages[1].rmPath { t.Errorf("Page two rmPath got %v wanted %v", rmf.Pages[1].rmPath, expected.Pages[1].rmPath) } if rmf.Pages[1].LayerNames[1] != expected.Pages[1].LayerNames[1] { t.Error("Page two second layer names not the same") } // https://stackoverflow.com/a/29339052 redirStdOut := func(log string) string { oldStdout := os.Stdout r, w, _ := os.Pipe() os.Stdout = w // debug! rmf.Debug(log) w.Close() s, _ := ioutil.ReadAll(r) r.Close() os.Stdout = oldStdout return string(s) } rmf.Debugging = false s := redirStdOut("hi") if s != "" { t.Error("debug should be nil") } rmf.Debugging = true s = redirStdOut("hi") if s != "hi\n" { t.Errorf("debug got %s not %s", s, "hi") } } // TestFilesXochitlWithoutPDF tests xochitl format files without a pdf func TestFilesXochitlWithoutPDF(t *testing.T) { template := "../templates/A4.pdf" rmf, err := RMFiler("../testfiles/d34df12d-e72b-4939-a791-5b34b3a810e7", template) if err != nil { t.Fatalf("Could not open file %v", err) } expected := RMFileInfo{ RmFS: &RmFS{ pdfPath: "", // no pdf identifier: "d34df12d-e72b-4939-a791-5b34b3a810e7", }, Version: 0, VisibleName: "toolbox", LastModified: ptime("2020-01-05 13:03:52 +0000 GMT"), PageCount: 1, Pages: []RMPage{ { PageNo: 0, Identifier: "2c277cdb-79a5-4f69-b583-4901d944e77e", rmFileDesc: &rmFileDesc{rmPath: "d34df12d-e72b-4939-a791-5b34b3a810e7/2c277cdb-79a5-4f69-b583-4901d944e77e.rm"}, LayerNames: []string{"Layer 1"}, }, }, } if rmf.pdfPath != expected.pdfPath { t.Errorf("pdfPath got %v wanted %v", rmf.pdfPath, expected.pdfPath) } if rmf.identifier != expected.identifier { t.Errorf("identifier got %v wanted %v", rmf.identifier, expected.identifier) } if rmf.Version != expected.Version { t.Errorf("Version got %v wanted %v", rmf.Version, expected.Version) } if rmf.VisibleName != expected.VisibleName { t.Errorf("VisibleName got %v wanted %v", rmf.VisibleName, expected.VisibleName) } if rmf.PageCount != expected.PageCount { t.Errorf("PageCount got %v wanted %v", rmf.PageCount, expected.PageCount) } if rmf.Pages[0].PageNo != expected.Pages[0].PageNo { t.Errorf("Page one PageNo got %v wanted %v", rmf.Pages[0].PageNo, expected.Pages[0].PageNo) } if rmf.Pages[0].Identifier != expected.Pages[0].Identifier { t.Errorf("Page one Identifier got %v wanted %v", rmf.Pages[0].Identifier, expected.Pages[0].Identifier) } if rmf.Pages[0].rmPath != expected.Pages[0].rmPath { t.Errorf("Page one rmPath got %v wanted %v", rmf.Pages[0].rmPath, expected.Pages[0].rmPath) } if rmf.Pages[0].LayerNames[0] != expected.Pages[0].LayerNames[0] { t.Error("Page one second layer names not the same") } } // TestInsertedPage checks if an inserted page is detected correctly func TestInsertedPage(t *testing.T) { testUUID := "fbe9f971-03ba-4c21-a0e8-78dd921f9c4c" template := "../templates/A4.pdf" rmf, err := RMFiler("../testfiles/"+testUUID, template) if err != nil { t.Fatalf("Could not open file %v", err) } expected := RMFileInfo{ RmFS: &RmFS{ pdfPath: "fbe9f971-03ba-4c21-a0e8-78dd921f9c4c.pdf", identifier: "fbe9f971-03ba-4c21-a0e8-78dd921f9c4c", }, Version: 0, VisibleName: "insert-pages", LastModified: ptime("2022-09-09 14:13:39 +0100 BST"), Orientation: "portrait", OriginalPageCount: 2, PageCount: 3, Pages: []RMPage{ { PageNo: 0, Identifier: "fa678373-8530-465d-a988-a0b158d957e4", rmFileDesc: &rmFileDesc{rmPath: "fbe9f971-03ba-4c21-a0e8-78dd921f9c4c/fa678373-8530-465d-a988-a0b158d957e4.rm"}, LayerNames: []string{"Layer 1"}, }, { PageNo: 1, Identifier: "0b8b6e65-926c-4269-9109-36fca8718c94", rmFileDesc: &rmFileDesc{rmPath: "fbe9f971-03ba-4c21-a0e8-78dd921f9c4c/0b8b6e65-926c-4269-9109-36fca8718c94.rm"}, LayerNames: []string{"Layer 1"}, }, { PageNo: 2, Identifier: "e2a69ab6-5c11-42d1-8d2d-9ce6569d9fdf", rmFileDesc: &rmFileDesc{rmPath: "fbe9f971-03ba-4c21-a0e8-78dd921f9c4c/e2a69ab6-5c11-42d1-8d2d-9ce6569d9fdf.rm"}, LayerNames: []string{"Layer 1"}, }, }, RedirectionPageMap: []int{0, -1, 1}, Debugging: false, } opt := cmp.Comparer(func(x, y RMFileInfo) bool { if x.pdfPath != y.pdfPath { t.Errorf("path %s != %s", x.pdfPath, y.pdfPath) return false } if x.identifier != y.identifier { t.Errorf("identifier %s != %s", x.pdfPath, y.pdfPath) return false } if x.Version != y.Version || x.VisibleName != y.VisibleName || x.Orientation != y.Orientation || x.OriginalPageCount != y.OriginalPageCount || x.PageCount != y.PageCount { t.Error("version, visiblename, orientation, originalpagecount or pagecount differ") return false } if len(x.RedirectionPageMap) != len(y.RedirectionPageMap) { t.Errorf("redirection length %d != %d", len(x.RedirectionPageMap), len(y.RedirectionPageMap)) return false } for i, rpm := range x.RedirectionPageMap { if rpm != y.RedirectionPageMap[i] { t.Errorf("redirection page map %d %d != %d", i, rpm, y.RedirectionPageMap[i]) return false } } if len(x.Pages) != len(y.Pages) { t.Errorf("page lengths different %d != %d", len(x.Pages), len(y.Pages)) return false } for i, xPage := range x.Pages { yPage := y.Pages[i] if xPage.PageNo != yPage.PageNo { t.Errorf("page %d != %d", xPage.PageNo, yPage.PageNo) return false } if xPage.Identifier != yPage.Identifier { t.Errorf("identifier %s != %s", xPage.Identifier, yPage.Identifier) return false } if xPage.rmPath != yPage.rmPath { t.Errorf("rmpath %x != %s", xPage.rmPath, yPage.rmPath) return false } if len(xPage.LayerNames) != len(yPage.LayerNames) { t.Errorf("layer len %d != %d", len(xPage.LayerNames), len(yPage.LayerNames)) return false } } return true }) // if !cmp.Equal(rmf, expected, cmpopts.IgnoreUnexported(rmf), cmpopts.IgnoreInterfaces(struct{ io.Reader }{})) { if !cmp.Equal(rmf, expected, opt) { t.Errorf("rmf != expected for insert page test") } if len(expected.Pages) != rmf.PageCount { t.Errorf("expected pages %d != rmf pages %d", len(expected.Pages), rmf.PageCount) } if len(rmf.insertedPages) != 1 || rmf.insertedPages[0] != 1 { t.Errorf( "inserted pages %v should equal [1]", rmf.insertedPages, ) } if !cmp.Equal(rmf.insertedPages.insertedPageNos(), []int{2}) { t.Errorf( "human inserted pages %v should equal {2}", rmf.insertedPages.insertedPageNos(), ) } if rmf.insertedPages.insertedPageNumbers() != "2" { t.Errorf( "human inserted pages as string %v should equal '2'", rmf.insertedPages.insertedPageNumbers(), ) } type iterExpected struct { pageNo int pdfPageNo int inserted bool isTemplate bool } iExpectArray := []iterExpected{ {0, 0, false, false}, {1, 0, true, true}, {2, 1, false, false}, } for i := 0; i < rmf.PageCount; i++ { // ignore filehandle in last assignment pageNo, pdfPageNo, inserted, isTemplate, _ := rmf.PageIterate() j := iterExpected{pageNo, pdfPageNo, inserted, isTemplate} e := iExpectArray[i] if j.pageNo != e.pageNo || j.pdfPageNo != e.pdfPageNo || j.inserted != e.inserted || j.isTemplate != e.isTemplate { t.Errorf("iter i %d expected %+v got %+v", i, e, j) } } } // TestHorizontal checks if a horizontal PDF is detected correctly func TestHorizontal(t *testing.T) { testUUID := "e724bba2-266f-434d-aaf2-935d2b405aee" template := "" rmf, err := RMFiler("../testfiles/"+testUUID, template) if err != nil { t.Errorf("Could not open file %v", err) } if rmf.Orientation != "landscape" { t.Errorf("Expected landscape orientation, got %s", rmf.Orientation) } } // TestExtensionIgnored checks that when providing an input with an extension // the extension is ignored func TestExtensionIgnored(t *testing.T) { testUUID := "e724bba2-266f-434d-aaf2-935d2b405aee.arbitrary" template := "" _, err := RMFiler("../testfiles/"+testUUID, template) if err != nil { t.Errorf("Could not open file %v", err) } } // TestZipWithNoMetadata tests a zip file with no metadata // note that this older rmapi zip format uses 0-indexed page numbers // // ../testfiles/no-metadata.zip // ddae88d1-7514-43b6-b7de-dcdd18eeb69a.content // ddae88d1-7514-43b6-b7de-dcdd18eeb69a.pagedata // ddae88d1-7514-43b6-b7de-dcdd18eeb69a/0-metadata.json // ddae88d1-7514-43b6-b7de-dcdd18eeb69a/0.rm // // in comparison, see ../testfiles/horizontal_rmapi.zip // e724bba2-266f-434d-aaf2-935d2b405aee.content // e724bba2-266f-434d-aaf2-935d2b405aee.metadata // e724bba2-266f-434d-aaf2-935d2b405aee.pagedata // e724bba2-266f-434d-aaf2-935d2b405aee.pdf // e724bba2-266f-434d-aaf2-935d2b405aee/1a9ef8e1-8009-4c84-bbe8-ba2885a137e6-metadata.json // e724bba2-266f-434d-aaf2-935d2b405aee/1a9ef8e1-8009-4c84-bbe8-ba2885a137e6.rm func TestZipWithNoMetadata(t *testing.T) { template := "" rmf, err := RMFiler("../testfiles/no-metadata.zip", template) if err != nil { t.Errorf("Could not open file %v", err) } for i := 0; i < rmf.PageCount; i++ { pageNo, pdfPageNo, inserted, isTemplate, _ := rmf.PageIterate() t.Logf( "pageno %d pdfpageno %d inserted %t istpl %t\n", pageNo, pdfPageNo, inserted, isTemplate, ) } } // TestZipVersion3 tests a remarkable v3.0.4 zip file made by rmapi // // ../testfiles/version3.zip // 701cdc43-04aa-410c-bc6f-3c773105a74d // 701cdc43-04aa-410c-bc6f-3c773105a74d.content // 701cdc43-04aa-410c-bc6f-3c773105a74d.metadata // 701cdc43-04aa-410c-bc6f-3c773105a74d.pdf func TestZipV3(t *testing.T)
{ template := "" rmf, err := RMFiler("../testfiles/version3.zip", template) expected := "software version 3 not supported -- no rm metadata file found" if !strings.Contains(err.Error(), expected) { t.Errorf("v3 file should error with %s, not %v", expected, err) } pages := 0 for i := 0; i < rmf.PageCount; i++ { pages++ pageNo, pdfPageNo, inserted, isTemplate, _ := rmf.PageIterate() t.Logf( "pageno %d pdfpageno %d inserted %t istpl %t\n", pageNo, pdfPageNo, inserted, isTemplate, ) } if pages != 2 { t.Errorf("page no %d != 2", pages) } }
identifier_body
pars.py
from tkinter import * from tkinter.ttk import * from selenium import webdriver from selenium.webdriver.chrome.options import Options from bs4 import BeautifulSoup import re import datetime import openpyxl dict_regions_russia = { 'Планета Земля':-1, 'Без учета региона': 0, 'Европа': 111, 'СНГ': 166, 'Универсальное': 318, 'Азия': 183, 'Россия': 225, 'Северо-Западный федеральный округ': 17, 'Калининградская область': 10857, 'Калининград': 22, 'Мурманская область': 10897, 'Мурманск': 24, 'Республика Карелия': 10933, 'Петрозаводск': 18, 'Санкт-Петербург и Ленинградская область': 10174, 'Санкт-Петербург' :2, 'Псковская область' :10926, 'Псков' :25, 'Великие Луки' :10928, 'Новгородская область' :10904, 'Великий Новгород' :24, 'Центральный федеральный округ' :3, 'Тверская область' :10819, 'Тверь' :14, 'Смоленская область' :10795, 'Смоленск' :12, 'Брянская область' :10650, 'Брянск' :191, 'Калужская область' :10693, 'Калуга' :6, 'Обнинск' :967, 'Курская область' :10705, 'Курск' :8, 'Орловская область' :10772, 'Орёл' :10, 'Тульская область' :10832, 'Тула' :15, 'Москва и Московская область' :1, 'Москва' :213, 'Долгопрудный' :214, 'Дубна' :215, 'Зеленоград' :216, 'Пущино' :217, 'Белгородская область' :10645, 'Белгород' :4, 'Липецкая область' :10712, 'Липецк' :9, 'Ярославская область' :10841, 'Ярославль' :16, 'Владимирская область' :10658, 'Владимир' :192, 'Александров' :10656, 'Гусь-Хрустальный' :10661, 'Муром' :10668, 'Ивановская область' :10687, 'Иваново' :5, 'Рязанская область' :10776, 'Рязань' :11, 'Тамбовская область' :10802, 'Тамбов' :13, 'Воронежская область' :10672, 'Воронеж' :193, 'Южный федеральный округ' :26, 'Ростовская область' :11029, 'Ростов-на-Дону' :39, 'Шахты' :11053, 'Таганрог' :971, 'Новочеркасск' :238, 'Волгодонск' :11036, 'Краснодарский край' :10995, 'Краснодар' :35, 'Анапа' :1107, 'Новороссийск' :970, 'Сочи' :239, 'Туапсе' :1058, 'Геленджик' :10990, 'Армавир' :10987, 'Ейск' :10993, 'Республика Адыгея' :11004, 'Майкоп' :1093, 'Карачаево-Черкесская республика' :11020, 'Черкесск' :1104, 'Кабардино-Балкарская республика' :11013, 'Нальчик' :30, 'Северная Осетия' :11021, 'Владикавказ' :33, 'Республика Ингушетия' :11012, 'Чеченская республика' :11024, 'Грозный' :1106, 'Республика Дагестан' :11010, 'Махачкала' :28, 'Ставропольский край' :11069, 'Ставрополь' :36, 'Каменск-Шахтинский' :11043, 'Пятигорск' :11067, 'Минеральные Воды' :11063, 'Ессентуки' :11057, 'Кисловодск' :11062, 'Республика Калмыкия' :11015, 'Элиста' :1094, 'Астраханская область' :10946, 'Астрахань' :37, 'Волгоградская область' :10950, 'Волгоград' :38, 'Поволжье' :40, 'Саратовская область' :11146, 'Саратов' :194, 'Жигулевск' :11132, 'Балаково' :11143, 'Пензенская область' :11095, 'Пенза' :49, 'Республика Мордовия' :11117, 'Саранск' :42, 'Ульяновская область' :11153, 'Ульяновск' :195, 'Самарская область' :11131, 'Самара' :51, 'Тольятти' :240, 'Сызрань' :11139, 'Чувашская республика' :11156, 'Чебоксары' :45, 'Республика Марий Эл' :11077, 'Йошкар-Ола' :41, 'Нижегородская область' :11079, 'Нижний Новгород' :47, 'Саров' :11083, 'Кировская область' :11070, 'Киров' :46, 'Костромская область' :10699, 'Кострома' :7, 'Вологодская область' :10853, 'Вологда' :21, 'Архангельская область' :10842, 'Архангельск' :20, 'Северодвинск' :10849, 'Ненецкий автономный округ' :10176, 'Республика Коми' :10939, 'Сыктывкар' :19, 'Удмуртская республика' :11148, 'Ижевск' :44, 'Республика Татарстан' :11119, 'Казань' :43, 'Набережные Челны' :236, 'Нижнекамск' :11127, 'Пермский край' :11108, 'Пермь' :50, 'Республика Башкортостан' :11111, 'Уфа' :172, 'Нефтекамск' :11114, 'Салават' :11115, 'Стерлитамак' :11116, 'Оренбургская область' :11084, 'Оренбург' :48, 'Дзержинск' :972, 'Урал' :52, 'Челябинская область' :11225, 'Челябинск' :56, 'Магнитогорск' :235, 'Снежинск' :11218, 'Курганская область' :11158, 'Курган' :53, 'Свердловская область' :11162, 'Екатеринбург' :54, 'Каменск-Уральский' :11164, 'Нижний Тагил' :11168, 'Новоуральск' :11170, 'Первоуральск' :11171, 'Тюменская область' :11176, 'Тюмень' :55, 'Тобольск' :11175, 'Ханты-Мансийский автономный округ' :11193, 'Ханты-Мансийск' :57, 'Сургут' :973, 'Нижневартовск' :1091, 'Сибирь' :59, 'Омская область' :11318, 'Омск' :66, 'Новосибирская область' :11316, 'Новосибирск' :65, 'Бердск' :11314, 'Томская область' :11353, 'Томск' :67, 'Ямало-Ненецкий автономный округ' :11232, 'Салехард' :58, 'Алтайский край' :11235, 'Барнаул' :197, 'Бийск' :975, 'Рубцовск' :11251, 'Республика Алтай' :10231, 'Горно-Алтайск' :11319, 'Кемеровская область' :11282, 'Кемерово' :64, 'Междуреченск' :11287, 'Новокузнецк' :237, 'Прокопьевск' :11291, 'Республика Хакасия' :11340, 'Абакан' :1095, 'Республика Тыва' :10233, 'Кызыл' :11333, 'Красноярский край' :11309, 'Красноярск' :62, 'Ачинск' :11302, 'Норильск' :11311, 'Железногорск' :20086, 'Иркутская область' :11266, 'Иркутск' :63, 'Братск' :976, 'Республика Бурятия' :11330, 'Улан-Удэ' :198, 'Забайкальский край' :21949, 'Чита' :68, 'Дальневосточный федеральный округ' :73, 'Республика Саха (Якутия)' :11443, 'Якутск' :74, 'Амурская область' :11375, 'Благовещенск' :77, 'Еврейская автономная область' :10243, 'Биробиджан' :11393, 'Приморский край' :11409, 'Владивосток' :75, 'Находка' :974, 'Уссурийск' :11426, 'Чукотский автономный округ' :10251, 'Анадырь' :11458, 'Камчатский край' :11398, 'Петропавловск-Камчатский' :78, 'Магаданская область' :11403, 'Магадан' :79, 'Сахалинская область' :11450, 'Южно-Сахалинск' :80, 'Хабаровский край' :11457, 'Хабаровск' :76, 'Комсомольск-на-Амуре' :11453 } exceptions = [ '2gis.ru', 'yandex.ru', 'wikipedia', 'pulscen', 'blizko.ru', 'Avito.ru', 'avito.ru', 'edadeal.ru' ] def stop_pars(event): print('Stop Pars') def output(event): button_1.config(state="disabled") # button_2.config(state="normal") root.update() # получаю список запросов inquiries_text = text.get(1.0, END) inquiries_text = inquiries_text.split("\n") inquiries = [] for val in inquiries_text: if len(val) != 0: inquiries.append(val.strip()) # получаю регион region = dict_regions_russia[combo.get()] # получаю глубину парсинга deep_pars = spin.get() try: deep_pars = int(deep_pars) # отрабатывает исклчюение на то что ввели не цифру except ValueError: deep_pars = 1 # максимальная глубина парсинга 10 страниц if deep_pars > 10: deep_pars = 10 # если ввели ноль if deep_pars == 0: deep_pars = 1 progress = 0 main_simple_progress = ( 100 / len(inquiries) ) / int(deep_pars) # Запускаю selenium options = Options() # Запускаем драйвер без графической оболочки браузера options.headless = True # Убираем логирование в консоль options.add_argument('--log-level=3') # Инициализируем драйвер хром driver = webdriver.Chrome(chrome_options=options, executable_path='drivers/chromedriver.exe') for inquirie in inquiries: title_list = [] description_list = [] keywords_list = [] h1_list = [] h2_list = [] h3_list = [] for i in range(1, deep_pars + 1): # получаю страницу яндекс поиска q = 'https://yandex.ru/search/?text='+str(inquirie)+'&lr='+str(region)+'&p='+str(i) driver.get(q) soup = BeautifulSoup (driver.page_source, features="html.parser") links = [] # обрабатываю полученную страницу for link in soup.select('.serp-item .organic__url'): # делаю сравнение со списком исключений для ссылок check_link = True for exception_val in exceptions: result = re.search(exception_val, link.attrs["href"]) if result : check_link = False break # заполняю список собранными ссылками if check_link: links.append(link.attrs["href"]) one_part_progress = round( main_simple_progress / len(links), 1 ) for link in links: driver.get(link) soup_site = BeautifulSoup (driver.page_source, features="html.parser") if soup_site.title != None: title_list.append(soup_site.title.string) h3 = soup_site.find_all('h3') if h3 != None: for tag in h3: h3_list.append(tag.text) h2 = soup_site.find_all('h2') if h2 != None: for tag in h2: h2_list.append(tag.text) h1 = soup_site.find_all('h1') if h1 != None: for tag in h1: h1_list.append(tag.text) description = soup_site.find('meta', {'name':'description'}) if description != None: description_list.append(description.get('content')) keywords = soup_site.find('meta', {'name':'keywords'}) if keywords != None: keywords_list.append(keywords.get('content')) # создаю новую книгу workboo
yxl.Workbook() title_sheet = workbook.active # filename = datetime.datetime.today().strftime("%Y-%m-%d-%H-%M-%S") # выбираем активный лист и меняем ему название title_sheet.title = "title" if title_check.get() == True: i = 1 for word in title_list: cellref = title_sheet.cell(row=i, column=1) cellref.value = word i = i + 1 if h3_check.get() == True: # добавляю новую страницу h3_sheet = workbook.create_sheet('H3', 1) i = 1 for word in h3_list: cellref = h3_sheet.cell(row=i, column=1) cellref.value = word i = i + 1 if h2_check.get() == True: # добавляю новую страницу h2_sheet = workbook.create_sheet('H2', 1) i = 1 for word in h2_list: cellref = h2_sheet.cell(row=i, column=1) cellref.value = word i = i + 1 # добавляю новую страницу if h1_check.get() == True: h1_sheet = workbook.create_sheet('H1', 1) i = 1 for word in h1_list: cellref = h1_sheet.cell(row=i, column=1) cellref.value = word i = i + 1 if keywords_check.get() == True: # добавляю новую страницу keywords_sheet = workbook.create_sheet('Keywords', 1) i = 1 for word in keywords_list: cellref = keywords_sheet.cell(row=i, column=1) cellref.value = word i = i + 1 if desc_check.get() == True: # добавляю новую страницу description_sheet = workbook.create_sheet('Description', 1) i = 1 for word in description_list: cellref = description_sheet.cell(row=i, column=1) cellref.value = word i = i + 1 # сохраняю данные в exel workbook.save(filename = inquirie+'.xlsx') progress = progress + one_part_progress print('Прогресс '+ str(progress)) barVar.set(round(progress)) root.update() button_1.config(state="normal") # button_2.config(state="disabled") barVar.set(100) root.update() print('Done') # --------------------------------------------------------------------------------- # рисую интерфейс root = Tk() root.title("Парсер мета данных сайтов по запросам") root.geometry('400x450') root.resizable(width=False, height=False) frame_1 = Frame() frame_2 = Frame() frame_3 = Frame() frame_4 = Frame() frame_5 = Frame() frame_6 = Frame() frame_7 = Frame() frame_8 = Frame() frame_1.pack() frame_2.pack() frame_3.pack() frame_4.pack() frame_5.pack() frame_6.pack() frame_7.pack() frame_8.pack() lable_1 = Label(frame_1, text="Что собрать:") lable_1.pack() h1_check = BooleanVar() h1_check.set(1) c4 = Checkbutton(frame_1, text="h1", variable=h1_check, onvalue=1, offvalue=0) c4.pack(side=LEFT) title_check = BooleanVar() title_check.set(1) c1 = Checkbutton(frame_1, text="title", variable=title_check, onvalue=1, offvalue=0) c1.pack(side=RIGHT) h2_check = BooleanVar() h2_check.set(1) c5 = Checkbutton(frame_2, text="h2", variable=h2_check, onvalue=1, offvalue=0) c5.pack(side=LEFT) desc_check = BooleanVar() desc_check.set(1) c2 = Checkbutton(frame_2, text="desc", variable=desc_check, onvalue=1, offvalue=0) c2.pack(side=RIGHT) h3_check = BooleanVar() h3_check.set(1) c6 = Checkbutton(frame_3, text="h3", variable=h3_check, onvalue=1, offvalue=0) c6.pack(side=LEFT) keywords_check = BooleanVar() keywords_check.set(1) c3 = Checkbutton(frame_3, text="keys", variable=keywords_check, onvalue=1, offvalue=0) c3.pack(side=RIGHT) lable_2 = Label(frame_4, text="Регион:") lable_2.pack() combo = Combobox(frame_4) combo['values'] = ( 'Республика Татарстан', 'Казань', 'Москва и Московская область', 'Москва', 'Набережные Челны', 'Нижнекамск', 'Санкт-Петербург и Ленинградская область', 'Санкт-Петербург', 'Планета Земля', 'Без учета региона', 'Европа', 'СНГ', 'Универсальное', 'Азия', 'Россия', 'Северо-Западный федеральный округ', 'Калининградская область', 'Калининград', 'Мурманская область', 'Мурманск', 'Республика Карелия', 'Петрозаводск', 'Псковская область', 'Псков', 'Великие Луки', 'Новгородская область', 'Великий Новгород', 'Центральный федеральный округ', 'Тверская область', 'Тверь', 'Смоленская область', 'Смоленск', 'Брянская область', 'Брянск', 'Калужская область', 'Калуга', 'Обнинск', 'Курская область', 'Курск', 'Орловская область', 'Орёл', 'Тульская область', 'Тула', 'Долгопрудный', 'Дубна', 'Зеленоград', 'Пущино', 'Белгородская область', 'Белгород', 'Липецкая область', 'Липецк', 'Ярославская область', 'Ярославль', 'Владимирская область', 'Владимир', 'Александров', 'Гусь-Хрустальный', 'Муром', 'Ивановская область', 'Иваново', 'Рязанская область', 'Рязань', 'Тамбовская область', 'Тамбов', 'Воронежская область', 'Воронеж', 'Южный федеральный округ', 'Ростовская область', 'Ростов-на-Дону', 'Шахты', 'Таганрог', 'Новочеркасск', 'Волгодонск', 'Краснодарский край', 'Краснодар', 'Анапа', 'Новороссийск', 'Сочи', 'Туапсе', 'Геленджик', 'Армавир', 'Ейск', 'Республика Адыгея', 'Майкоп', 'Карачаево-Черкесская республика', 'Черкесск', 'Кабардино-Балкарская республика', 'Нальчик', 'Северная Осетия', 'Владикавказ', 'Республика Ингушетия', 'Чеченская республика', 'Грозный', 'Республика Дагестан', 'Махачкала', 'Ставропольский край', 'Ставрополь', 'Каменск-Шахтинский', 'Пятигорск', 'Минеральные Воды', 'Ессентуки', 'Кисловодск', 'Республика Калмыкия', 'Элиста', 'Астраханская область', 'Астрахань', 'Волгоградская область', 'Волгоград', 'Поволжье', 'Саратовская область', 'Саратов', 'Жигулевск', 'Балаково', 'Пензенская область', 'Пенза', 'Республика Мордовия', 'Саранск', 'Ульяновская область', 'Ульяновск', 'Самарская область', 'Самара', 'Тольятти', 'Сызрань', 'Чувашская республика', 'Чебоксары', 'Республика Марий Эл', 'Йошкар-Ола', 'Нижегородская область', 'Нижний Новгород', 'Саров', 'Кировская область', 'Киров', 'Костромская область', 'Кострома', 'Вологодская область', 'Вологда', 'Архангельская область', 'Архангельск', 'Северодвинск', 'Ненецкий автономный округ', 'Республика Коми', 'Сыктывкар', 'Удмуртская республика', 'Ижевск', 'Пермский край', 'Пермь', 'Республика Башкортостан', 'Уфа', 'Нефтекамск', 'Салават', 'Стерлитамак', 'Оренбургская область', 'Оренбург', 'Дзержинск', 'Урал', 'Челябинская область', 'Челябинск', 'Магнитогорск', 'Снежинск', 'Курганская область', 'Курган', 'Свердловская область', 'Екатеринбург', 'Каменск-Уральский', 'Нижний Тагил', 'Новоуральск', 'Первоуральск', 'Тюменская область', 'Тюмень', 'Тобольск', 'Ханты-Мансийский автономный округ', 'Ханты-Мансийск', 'Сургут', 'Нижневартовск', 'Сибирь', 'Омская область', 'Омск', 'Новосибирская область', 'Новосибирск', 'Бердск', 'Томская область', 'Томск', 'Ямало-Ненецкий автономный округ', 'Салехард', 'Алтайский край', 'Барнаул', 'Бийск', 'Рубцовск', 'Республика Алтай', 'Горно-Алтайск', 'Кемеровская область', 'Кемерово', 'Междуреченск', 'Новокузнецк', 'Прокопьевск', 'Республика Хакасия', 'Абакан', 'Республика Тыва', 'Кызыл', 'Красноярский край', 'Красноярск', 'Ачинск', 'Норильск', 'Железногорск', 'Иркутская область', 'Иркутск', 'Братск', 'Республика Бурятия', 'Улан-Удэ', 'Забайкальский край', 'Чита', 'Дальневосточный федеральный округ', 'Республика Саха (Якутия)', 'Якутск', 'Амурская область', 'Благовещенск', 'Еврейская автономная область', 'Биробиджан', 'Приморский край', 'Владивосток', 'Находка', 'Уссурийск', 'Чукотский автономный округ', 'Анадырь', 'Камчатский край', 'Петропавловск-Камчатский', 'Магаданская область', 'Магадан', 'Сахалинская область', 'Южно-Сахалинск', 'Хабаровский край', 'Хабаровск', 'Комсомольск-на-Амуре' ) combo.current(1) # установите вариант по умолчанию combo.pack() lable_21 = Label(frame_5, text="Глубина парсинга:") lable_21.pack() spin = Spinbox(frame_5, from_=0, to=10, width=5, textvariable=1) spin.pack() lable_3 = Label(frame_6, text="Запросы:") lable_3.pack() text = Text(frame_6, width=38, height=12, bg="lightblue", fg='black', wrap=WORD) text.pack() button_1 = Button(frame_8, text="Собрать данные") button_1.pack(side=RIGHT, pady=5) button_1.bind("<Button-1>", output) # button_2 = Button(frame_7, text="Остановить") # button_2.pack(side=LEFT) # button_2.bind("<Button-1>", stop_pars) # button_2.config(state="disabled") barVar = DoubleVar() barVar.set(0) progress_bar = Progressbar(frame_7, orient = HORIZONTAL, length = 300, variable=barVar, mode = 'determinate') progress_bar.pack(pady=5) root.event_add('<<Paste>>', '<Control-igrave>') root.event_add("<<Copy>>", "<Control-ntilde>") root.mainloop()
k = openp
identifier_name
pars.py
from tkinter import * from tkinter.ttk import * from selenium import webdriver from selenium.webdriver.chrome.options import Options from bs4 import BeautifulSoup import re import datetime import openpyxl dict_regions_russia = { 'Планета Земля':-1, 'Без учета региона': 0, 'Европа': 111, 'СНГ': 166, 'Универсальное': 318, 'Азия': 183, 'Россия': 225, 'Северо-Западный федеральный округ': 17, 'Калининградская область': 10857, 'Калининград': 22, 'Мурманская область': 10897, 'Мурманск': 24, 'Республика Карелия': 10933, 'Петрозаводск': 18, 'Санкт-Петербург и Ленинградская область': 10174, 'Санкт-Петербург' :2, 'Псковская область' :10926, 'Псков' :25, 'Великие Луки' :10928, 'Новгородская область' :10904, 'Великий Новгород' :24, 'Центральный федеральный округ' :3, 'Тверская область' :10819, 'Тверь' :14, 'Смоленская область' :10795, 'Смоленск' :12, 'Брянская область' :10650, 'Брянск' :191, 'Калужская область' :10693, 'Калуга' :6, 'Обнинск' :967, 'Курская область' :10705, 'Курск' :8, 'Орловская область' :10772, 'Орёл' :10, 'Тульская область' :10832, 'Тула' :15, 'Москва и Московская область' :1, 'Москва' :213, 'Долгопрудный' :214, 'Дубна' :215, 'Зеленоград' :216, 'Пущино' :217, 'Белгородская область' :10645, 'Белгород' :4, 'Липецкая область' :10712, 'Липецк' :9, 'Ярославская область' :10841, 'Ярославль' :16, 'Владимирская область' :10658, 'Владимир' :192, 'Александров' :10656, 'Гусь-Хрустальный' :10661, 'Муром' :10668, 'Ивановская область' :10687, 'Иваново' :5, 'Рязанская область' :10776, 'Рязань' :11, 'Тамбовская область' :10802, 'Тамбов' :13, 'Воронежская область' :10672, 'Воронеж' :193, 'Южный федеральный округ' :26, 'Ростовская область' :11029, 'Ростов-на-Дону' :39, 'Шахты' :11053, 'Таганрог' :971, 'Новочеркасск' :238, 'Волгодонск' :11036, 'Краснодарский край' :10995, 'Краснодар' :35, 'Анапа' :1107, 'Новороссийск' :970, 'Сочи' :239, 'Туапсе' :1058, 'Геленджик' :10990, 'Армавир' :10987, 'Ейск' :10993, 'Республика Адыгея' :11004, 'Майкоп' :1093, 'Карачаево-Черкесская республика' :11020, 'Черкесск' :1104, 'Кабардино-Балкарская республика' :11013, 'Нальчик' :30, 'Северная Осетия' :11021, 'Владикавказ' :33, 'Республика Ингушетия' :11012, 'Чеченская республика' :11024, 'Грозный' :1106, 'Республика Дагестан' :11010, 'Махачкала' :28, 'Ставропольский край' :11069, 'Ставрополь' :36, 'Каменск-Шахтинский' :11043, 'Пятигорск' :11067, 'Минеральные Воды' :11063, 'Ессентуки' :11057, 'Кисловодск' :11062, 'Республика Калмыкия' :11015, 'Элиста' :1094, 'Астраханская область' :10946, 'Астрахань' :37, 'Волгоградская область' :10950, 'Волгоград' :38, 'Поволжье' :40, 'Саратовская область' :11146, 'Саратов' :194, 'Жигулевск' :11132, 'Балаково' :11143, 'Пензенская область' :11095, 'Пенза' :49, 'Республика Мордовия' :11117, 'Саранск' :42, 'Ульяновская область' :11153, 'Ульяновск' :195, 'Самарская область' :11131, 'Самара' :51, 'Тольятти' :240, 'Сызрань' :11139, 'Чувашская республика' :11156, 'Чебоксары' :45, 'Республика Марий Эл' :11077, 'Йошкар-Ола' :41, 'Нижегородская область' :11079, 'Нижний Новгород' :47, 'Саров' :11083, 'Кировская область' :11070, 'Киров' :46, 'Костромская область' :10699, 'Кострома' :7, 'Вологодская область' :10853, 'Вологда' :21, 'Архангельская область' :10842, 'Архангельск' :20, 'Северодвинск' :10849, 'Ненецкий автономный округ' :10176, 'Республика Коми' :10939, 'Сыктывкар' :19, 'Удмуртская республика' :11148, 'Ижевск' :44, 'Республика Татарстан' :11119, 'Казань' :43, 'Набережные Челны' :236, 'Нижнекамск' :11127, 'Пермский край' :11108, 'Пермь' :50, 'Республика Башкортостан' :11111, 'Уфа' :172, 'Нефтекамск' :11114, 'Салават' :11115, 'Стерлитамак' :11116, 'Оренбургская область' :11084, 'Оренбург' :48, 'Дзержинск' :972, 'Урал' :52, 'Челябинская область' :11225, 'Челябинск' :56, 'Магнитогорск' :235, 'Снежинск' :11218, 'Курганская область' :11158, 'Курган' :53, 'Свердловская область' :11162, 'Екатеринбург' :54, 'Каменск-Уральский' :11164, 'Нижний Тагил' :11168, 'Новоуральск' :11170, 'Первоуральск' :11171, 'Тюменская область' :11176, 'Тюмень' :55, 'Тобольск' :11175, 'Ханты-Мансийский автономный округ' :11193, 'Ханты-Мансийск' :57, 'Сургут' :973, 'Нижневартовск' :1091, 'Сибирь' :59, 'Омская область' :11318, 'Омск' :66, 'Новосибирская область' :11316, 'Новосибирск' :65, 'Бердск' :11314, 'Томская область' :11353, 'Томск' :67, 'Ямало-Ненецкий автономный округ' :11232, 'Салехард' :58, 'Алтайский край' :11235, 'Барнаул' :197, 'Бийск' :975, 'Рубцовск' :11251, 'Республика Алтай' :10231, 'Горно-Алтайск' :11319, 'Кемеровская область' :11282, 'Кемерово' :64, 'Междуреченск' :11287, 'Новокузнецк' :237, 'Прокопьевск' :11291, 'Республика Хакасия' :11340, 'Абакан' :1095, 'Республика Тыва' :10233, 'Кызыл' :11333, 'Красноярский край' :11309, 'Красноярск' :62, 'Ачинск' :11302, 'Норильск' :11311, 'Железногорск' :20086, 'Иркутская область' :11266, 'Иркутск' :63, 'Братск' :976, 'Республика Бурятия' :11330, 'Улан-Удэ' :198, 'Забайкальский край' :21949, 'Чита' :68, 'Дальневосточный федеральный округ' :73, 'Республика Саха (Якутия)' :11443, 'Якутск' :74, 'Амурская область' :11375, 'Благовещенск' :77, 'Еврейская автономная область' :10243, 'Биробиджан' :11393, 'Приморский край' :11409, 'Владивосток' :75, 'Находка' :974, 'Уссурийск' :11426, 'Чукотский автономный округ' :10251, 'Анадырь' :11458, 'Камчатский край' :11398, 'Петропавловск-Камчатский' :78, 'Магаданская область' :11403, 'Магадан' :79, 'Сахалинская область' :11450, 'Южно-Сахалинск' :80, 'Хабаровский край' :11457, 'Хабаровск' :76, 'Комсомольск-на-Амуре' :11453 } exceptions = [ '2gis.ru', 'yandex.ru', 'wikipedia', 'pulscen', 'blizko.ru', 'Avito.ru', 'avito.ru', 'edadeal.ru' ] def stop_pars(event): print('Stop Pars') def output(event): button_1.config(state="disabled") # button_2.config(state="normal") root.update() # получаю список запросов inquiries_text = text.get(1.0, END) inquiries_text = inquiries_text.split("\n") inquiries = [] for val in inquiries_text: if len(val) != 0: inquiries.append(val.strip()) # получаю регион region = dict_regions_russia[combo.get()] # получаю глубину парсинга deep_pars = spin.get() try: deep_pars = int(deep_pars) # отрабатывает исклчюение на то что ввели не цифру except ValueError: deep_pars = 1 # максимальная глубина парсинга 10 страниц if deep_pars > 10: deep_pars = 10 # если ввели ноль if deep_pars == 0: deep_pars = 1 progress = 0 main_simple_progress = ( 100 / len(inquiries) ) / int(deep_pars) # Запускаю selenium options = Options() # Запускаем драйвер без графической оболочки браузера options.headless = True # Убираем логирование в консоль options.add_argument('--log-level=3') # Инициализируем драйвер хром driver = webdriver.Chrome(chrome_options=options, executable_path='drivers/chromedriver.exe') for inquirie in inquiries: title_list = [] description_list = [] keywords_list = [] h1_list = [] h2_list = [] h3_list = [] for i in range(1, deep_pars + 1): # получаю страницу яндекс поиска q = 'https://yandex.ru/search/?text='+str(inquirie)+'&lr='+str(region)+'&p='+str(i) driver.get(q) soup = BeautifulSoup (driver.page_source, features="html.parser") links = [] # обрабатываю полученную страницу for link in soup.select('.serp-item .organic__url'): # делаю сравнение со списком исключений для ссылок check_link = True for exception_val in exceptions: result = re.search(exception_val, link.attrs["href"]) if result : check_link = False break # заполняю список собранными ссылками if check_link: links.append(link.attrs["href"]) one_part_progress = round( main_simple_progress / len(links), 1 ) for link in links: driver.get(link) soup_site = BeautifulSoup (driver.page_source, features="html.parser") if soup_site.title != None: title_list.append(soup_site.title.string) h3 = soup_site.find_all('h3') if h3 != None: for tag in h3: h3_list.append(tag.text) h2 = soup_site.find_all('h2') if h2 != None: for tag in h2: h2_list.append(tag.text) h1 = soup_site.find_all('h1') if h1 != None: for tag in h1: h1_list.append(tag.text) description = soup_site.find('meta', {'name':'description'}) if description != None: description_list.append(description.get('content')) keywords = soup_site.find('meta', {'name':'keywords'}) if keywords != None: keywords_list.append(keywords.get('content'))
title_sheet = workbook.active # filename = datetime.datetime.today().strftime("%Y-%m-%d-%H-%M-%S") # выбираем активный лист и меняем ему название title_sheet.title = "title" if title_check.get() == True: i = 1 for word in title_list: cellref = title_sheet.cell(row=i, column=1) cellref.value = word i = i + 1 if h3_check.get() == True: # добавляю новую страницу h3_sheet = workbook.create_sheet('H3', 1) i = 1 for word in h3_list: cellref = h3_sheet.cell(row=i, column=1) cellref.value = word i = i + 1 if h2_check.get() == True: # добавляю новую страницу h2_sheet = workbook.create_sheet('H2', 1) i = 1 for word in h2_list: cellref = h2_sheet.cell(row=i, column=1) cellref.value = word i = i + 1 # добавляю новую страницу if h1_check.get() == True: h1_sheet = workbook.create_sheet('H1', 1) i = 1 for word in h1_list: cellref = h1_sheet.cell(row=i, column=1) cellref.value = word i = i + 1 if keywords_check.get() == True: # добавляю новую страницу keywords_sheet = workbook.create_sheet('Keywords', 1) i = 1 for word in keywords_list: cellref = keywords_sheet.cell(row=i, column=1) cellref.value = word i = i + 1 if desc_check.get() == True: # добавляю новую страницу description_sheet = workbook.create_sheet('Description', 1) i = 1 for word in description_list: cellref = description_sheet.cell(row=i, column=1) cellref.value = word i = i + 1 # сохраняю данные в exel workbook.save(filename = inquirie+'.xlsx') progress = progress + one_part_progress print('Прогресс '+ str(progress)) barVar.set(round(progress)) root.update() button_1.config(state="normal") # button_2.config(state="disabled") barVar.set(100) root.update() print('Done') # --------------------------------------------------------------------------------- # рисую интерфейс root = Tk() root.title("Парсер мета данных сайтов по запросам") root.geometry('400x450') root.resizable(width=False, height=False) frame_1 = Frame() frame_2 = Frame() frame_3 = Frame() frame_4 = Frame() frame_5 = Frame() frame_6 = Frame() frame_7 = Frame() frame_8 = Frame() frame_1.pack() frame_2.pack() frame_3.pack() frame_4.pack() frame_5.pack() frame_6.pack() frame_7.pack() frame_8.pack() lable_1 = Label(frame_1, text="Что собрать:") lable_1.pack() h1_check = BooleanVar() h1_check.set(1) c4 = Checkbutton(frame_1, text="h1", variable=h1_check, onvalue=1, offvalue=0) c4.pack(side=LEFT) title_check = BooleanVar() title_check.set(1) c1 = Checkbutton(frame_1, text="title", variable=title_check, onvalue=1, offvalue=0) c1.pack(side=RIGHT) h2_check = BooleanVar() h2_check.set(1) c5 = Checkbutton(frame_2, text="h2", variable=h2_check, onvalue=1, offvalue=0) c5.pack(side=LEFT) desc_check = BooleanVar() desc_check.set(1) c2 = Checkbutton(frame_2, text="desc", variable=desc_check, onvalue=1, offvalue=0) c2.pack(side=RIGHT) h3_check = BooleanVar() h3_check.set(1) c6 = Checkbutton(frame_3, text="h3", variable=h3_check, onvalue=1, offvalue=0) c6.pack(side=LEFT) keywords_check = BooleanVar() keywords_check.set(1) c3 = Checkbutton(frame_3, text="keys", variable=keywords_check, onvalue=1, offvalue=0) c3.pack(side=RIGHT) lable_2 = Label(frame_4, text="Регион:") lable_2.pack() combo = Combobox(frame_4) combo['values'] = ( 'Республика Татарстан', 'Казань', 'Москва и Московская область', 'Москва', 'Набережные Челны', 'Нижнекамск', 'Санкт-Петербург и Ленинградская область', 'Санкт-Петербург', 'Планета Земля', 'Без учета региона', 'Европа', 'СНГ', 'Универсальное', 'Азия', 'Россия', 'Северо-Западный федеральный округ', 'Калининградская область', 'Калининград', 'Мурманская область', 'Мурманск', 'Республика Карелия', 'Петрозаводск', 'Псковская область', 'Псков', 'Великие Луки', 'Новгородская область', 'Великий Новгород', 'Центральный федеральный округ', 'Тверская область', 'Тверь', 'Смоленская область', 'Смоленск', 'Брянская область', 'Брянск', 'Калужская область', 'Калуга', 'Обнинск', 'Курская область', 'Курск', 'Орловская область', 'Орёл', 'Тульская область', 'Тула', 'Долгопрудный', 'Дубна', 'Зеленоград', 'Пущино', 'Белгородская область', 'Белгород', 'Липецкая область', 'Липецк', 'Ярославская область', 'Ярославль', 'Владимирская область', 'Владимир', 'Александров', 'Гусь-Хрустальный', 'Муром', 'Ивановская область', 'Иваново', 'Рязанская область', 'Рязань', 'Тамбовская область', 'Тамбов', 'Воронежская область', 'Воронеж', 'Южный федеральный округ', 'Ростовская область', 'Ростов-на-Дону', 'Шахты', 'Таганрог', 'Новочеркасск', 'Волгодонск', 'Краснодарский край', 'Краснодар', 'Анапа', 'Новороссийск', 'Сочи', 'Туапсе', 'Геленджик', 'Армавир', 'Ейск', 'Республика Адыгея', 'Майкоп', 'Карачаево-Черкесская республика', 'Черкесск', 'Кабардино-Балкарская республика', 'Нальчик', 'Северная Осетия', 'Владикавказ', 'Республика Ингушетия', 'Чеченская республика', 'Грозный', 'Республика Дагестан', 'Махачкала', 'Ставропольский край', 'Ставрополь', 'Каменск-Шахтинский', 'Пятигорск', 'Минеральные Воды', 'Ессентуки', 'Кисловодск', 'Республика Калмыкия', 'Элиста', 'Астраханская область', 'Астрахань', 'Волгоградская область', 'Волгоград', 'Поволжье', 'Саратовская область', 'Саратов', 'Жигулевск', 'Балаково', 'Пензенская область', 'Пенза', 'Республика Мордовия', 'Саранск', 'Ульяновская область', 'Ульяновск', 'Самарская область', 'Самара', 'Тольятти', 'Сызрань', 'Чувашская республика', 'Чебоксары', 'Республика Марий Эл', 'Йошкар-Ола', 'Нижегородская область', 'Нижний Новгород', 'Саров', 'Кировская область', 'Киров', 'Костромская область', 'Кострома', 'Вологодская область', 'Вологда', 'Архангельская область', 'Архангельск', 'Северодвинск', 'Ненецкий автономный округ', 'Республика Коми', 'Сыктывкар', 'Удмуртская республика', 'Ижевск', 'Пермский край', 'Пермь', 'Республика Башкортостан', 'Уфа', 'Нефтекамск', 'Салават', 'Стерлитамак', 'Оренбургская область', 'Оренбург', 'Дзержинск', 'Урал', 'Челябинская область', 'Челябинск', 'Магнитогорск', 'Снежинск', 'Курганская область', 'Курган', 'Свердловская область', 'Екатеринбург', 'Каменск-Уральский', 'Нижний Тагил', 'Новоуральск', 'Первоуральск', 'Тюменская область', 'Тюмень', 'Тобольск', 'Ханты-Мансийский автономный округ', 'Ханты-Мансийск', 'Сургут', 'Нижневартовск', 'Сибирь', 'Омская область', 'Омск', 'Новосибирская область', 'Новосибирск', 'Бердск', 'Томская область', 'Томск', 'Ямало-Ненецкий автономный округ', 'Салехард', 'Алтайский край', 'Барнаул', 'Бийск', 'Рубцовск', 'Республика Алтай', 'Горно-Алтайск', 'Кемеровская область', 'Кемерово', 'Междуреченск', 'Новокузнецк', 'Прокопьевск', 'Республика Хакасия', 'Абакан', 'Республика Тыва', 'Кызыл', 'Красноярский край', 'Красноярск', 'Ачинск', 'Норильск', 'Железногорск', 'Иркутская область', 'Иркутск', 'Братск', 'Республика Бурятия', 'Улан-Удэ', 'Забайкальский край', 'Чита', 'Дальневосточный федеральный округ', 'Республика Саха (Якутия)', 'Якутск', 'Амурская область', 'Благовещенск', 'Еврейская автономная область', 'Биробиджан', 'Приморский край', 'Владивосток', 'Находка', 'Уссурийск', 'Чукотский автономный округ', 'Анадырь', 'Камчатский край', 'Петропавловск-Камчатский', 'Магаданская область', 'Магадан', 'Сахалинская область', 'Южно-Сахалинск', 'Хабаровский край', 'Хабаровск', 'Комсомольск-на-Амуре' ) combo.current(1) # установите вариант по умолчанию combo.pack() lable_21 = Label(frame_5, text="Глубина парсинга:") lable_21.pack() spin = Spinbox(frame_5, from_=0, to=10, width=5, textvariable=1) spin.pack() lable_3 = Label(frame_6, text="Запросы:") lable_3.pack() text = Text(frame_6, width=38, height=12, bg="lightblue", fg='black', wrap=WORD) text.pack() button_1 = Button(frame_8, text="Собрать данные") button_1.pack(side=RIGHT, pady=5) button_1.bind("<Button-1>", output) # button_2 = Button(frame_7, text="Остановить") # button_2.pack(side=LEFT) # button_2.bind("<Button-1>", stop_pars) # button_2.config(state="disabled") barVar = DoubleVar() barVar.set(0) progress_bar = Progressbar(frame_7, orient = HORIZONTAL, length = 300, variable=barVar, mode = 'determinate') progress_bar.pack(pady=5) root.event_add('<<Paste>>', '<Control-igrave>') root.event_add("<<Copy>>", "<Control-ntilde>") root.mainloop()
# создаю новую книгу workbook = openpyxl.Workbook()
random_line_split
pars.py
from tkinter import * from tkinter.ttk import * from selenium import webdriver from selenium.webdriver.chrome.options import Options from bs4 import BeautifulSoup import re import datetime import openpyxl dict_regions_russia = { 'Планета Земля':-1, 'Без учета региона': 0, 'Европа': 111, 'СНГ': 166, 'Универсальное': 318, 'Азия': 183, 'Россия': 225, 'Северо-Западный федеральный округ': 17, 'Калининградская область': 10857, 'Калининград': 22, 'Мурманская область': 10897, 'Мурманск': 24, 'Республика Карелия': 10933, 'Петрозаводск': 18, 'Санкт-Петербург и Ленинградская область': 10174, 'Санкт-Петербург' :2, 'Псковская область' :10926, 'Псков' :25, 'Великие Луки' :10928, 'Новгородская область' :10904, 'Великий Новгород' :24, 'Центральный федеральный округ' :3, 'Тверская область' :10819, 'Тверь' :14, 'Смоленская область' :10795, 'Смоленск' :12, 'Брянская область' :10650, 'Брянск' :191, 'Калужская область' :10693, 'Калуга' :6, 'Обнинск' :967, 'Курская область' :10705, 'Курск' :8, 'Орловская область' :10772, 'Орёл' :10, 'Тульская область' :10832, 'Тула' :15, 'Москва и Московская область' :1, 'Москва' :213, 'Долгопрудный' :214, 'Дубна' :215, 'Зеленоград' :216, 'Пущино' :217, 'Белгородская область' :10645, 'Белгород' :4, 'Липецкая область' :10712, 'Липецк' :9, 'Ярославская область' :10841, 'Ярославль' :16, 'Владимирская область' :10658, 'Владимир' :192, 'Александров' :10656, 'Гусь-Хрустальный' :10661, 'Муром' :10668, 'Ивановская область' :10687, 'Иваново' :5, 'Рязанская область' :10776, 'Рязань' :11, 'Тамбовская область' :10802, 'Тамбов' :13, 'Воронежская область' :10672, 'Воронеж' :193, 'Южный федеральный округ' :26, 'Ростовская область' :11029, 'Ростов-на-Дону' :39, 'Шахты' :11053, 'Таганрог' :971, 'Новочеркасск' :238, 'Волгодонск' :11036, 'Краснодарский край' :10995, 'Краснодар' :35, 'Анапа' :1107, 'Новороссийск' :970, 'Сочи' :239, 'Туапсе' :1058, 'Геленджик' :10990, 'Армавир' :10987, 'Ейск' :10993, 'Республика Адыгея' :11004, 'Майкоп' :1093, 'Карачаево-Черкесская республика' :11020, 'Черкесск' :1104, 'Кабардино-Балкарская республика' :11013, 'Нальчик' :30, 'Северная Осетия' :11021, 'Владикавказ' :33, 'Республика Ингушетия' :11012, 'Чеченская республика' :11024, 'Грозный' :1106, 'Республика Дагестан' :11010, 'Махачкала' :28, 'Ставропольский край' :11069, 'Ставрополь' :36, 'Каменск-Шахтинский' :11043, 'Пятигорск' :11067, 'Минеральные Воды' :11063, 'Ессентуки' :11057, 'Кисловодск' :11062, 'Республика Калмыкия' :11015, 'Элиста' :1094, 'Астраханская область' :10946, 'Астрахань' :37, 'Волгоградская область' :10950, 'Волгоград' :38, 'Поволжье' :40, 'Саратовская область' :11146, 'Саратов' :194, 'Жигулевск' :11132, 'Балаково' :11143, 'Пензенская область' :11095, 'Пенза' :49, 'Республика Мордовия' :11117, 'Саранск' :42, 'Ульяновская область' :11153, 'Ульяновск' :195, 'Самарская область' :11131, 'Самара' :51, 'Тольятти' :240, 'Сызрань' :11139, 'Чувашская республика' :11156, 'Чебоксары' :45, 'Республика Марий Эл' :11077, 'Йошкар-Ола' :41, 'Нижегородская область' :11079, 'Нижний Новгород' :47, 'Саров' :11083, 'Кировская область' :11070, 'Киров' :46, 'Костромская область' :10699, 'Кострома' :7, 'Вологодская область' :10853, 'Вологда' :21, 'Архангельская область' :10842, 'Архангельск' :20, 'Северодвинск' :10849, 'Ненецкий автономный округ' :10176, 'Республика Коми' :10939, 'Сыктывкар' :19, 'Удмуртская республика' :11148, 'Ижевск' :44, 'Республика Татарстан' :11119, 'Казань' :43, 'Набережные Челны' :236, 'Нижнекамск' :11127, 'Пермский край' :11108, 'Пермь' :50, 'Республика Башкортостан' :11111, 'Уфа' :172, 'Нефтекамск' :11114, 'Салават' :11115, 'Стерлитамак' :11116, 'Оренбургская область' :11084, 'Оренбург' :48, 'Дзержинск' :972, 'Урал' :52, 'Челябинская область' :11225, 'Челябинск' :56, 'Магнитогорск' :235, 'Снежинск' :11218, 'Курганская область' :11158, 'Курган' :53, 'Свердловская область' :11162, 'Екатеринбург' :54, 'Каменск-Уральский' :11164, 'Нижний Тагил' :11168, 'Новоуральск' :11170, 'Первоуральск' :11171, 'Тюменская область' :11176, 'Тюмень' :55, 'Тобольск' :11175, 'Ханты-Мансийский автономный округ' :11193, 'Ханты-Мансийск' :57, 'Сургут' :973, 'Нижневартовск' :1091, 'Сибирь' :59, 'Омская область' :11318, 'Омск' :66, 'Новосибирская область' :11316, 'Новосибирск' :65, 'Бердск' :11314, 'Томская область' :11353, 'Томск' :67, 'Ямало-Ненецкий автономный округ' :11232, 'Салехард' :58, 'Алтайский край' :11235, 'Барнаул' :197, 'Бийск' :975, 'Рубцовск' :11251, 'Республика Алтай' :10231, 'Горно-Алтайск' :11319, 'Кемеровская область' :11282, 'Кемерово' :64, 'Междуреченск' :11287, 'Новокузнецк' :237, 'Прокопьевск' :11291, 'Республика Хакасия' :11340, 'Абакан' :1095, 'Республика Тыва' :10233, 'Кызыл' :11333, 'Красноярский край' :11309, 'Красноярск' :62, 'Ачинск' :11302, 'Норильск' :11311, 'Железногорск' :20086, 'Иркутская область' :11266, 'Иркутск' :63, 'Братск' :976, 'Республика Бурятия' :11330, 'Улан-Удэ' :198, 'Забайкальский край' :21949, 'Чита' :68, 'Дальневосточный федеральный округ' :73, 'Республика Саха (Якутия)' :11443, 'Якутск' :74, 'Амурская область' :11375, 'Благовещенск' :77, 'Еврейская автономная область' :10243, 'Биробиджан' :11393, 'Приморский край' :11409, 'Владивосток' :75, 'Находка' :974, 'Уссурийск' :11426, 'Чукотский автономный округ' :10251, 'Анадырь' :11458, 'Камчатский край' :11398, 'Петропавловск-Камчатский' :78, 'Магаданская область' :11403, 'Магадан' :79, 'Сахалинская область' :11450, 'Южно-Сахалинск' :80, 'Хабаровский край' :11457, 'Хабаровск' :76, 'Комсомольск-на-Амуре' :11453 } exceptions = [ '2gis.ru', 'yandex.ru', 'wikipedia', 'pulscen', 'blizko.ru', 'Avito.ru', 'avito.ru', 'edadeal.ru' ] def stop_pars(event): print('Stop Pars') def output(event): button_1.config(state="disabled") # button_2.config(state="normal") root.update() # получаю список запросов inquiries_text = text.get(1.0, END) inquiries_text = inquiries_text.split("\n") inquiries = [] for val in inquiries_text: if len(val) != 0: inquiries.append(val.strip()) # получаю регион region = dict_regions_russia[combo.get()] # получаю глубину парсинга deep_pars = spin.get() try: deep_pars = int(deep_pars) # отрабатывает исклчюение на то что ввели не цифру except ValueError: deep_pars = 1 # максимальная глубина парсинга 10 страниц if deep_pars > 10: deep_pars = 10 # если ввели ноль if deep_pars == 0: deep_pars = 1 progress = 0 main_simple_progress = ( 100 / len(inquiries) ) / int(deep_pars) # Запускаю selenium options = Options() # Запускаем драйвер без графической оболочки браузера options.headless = True # Убираем логирование в консоль options.add_argument('--log-level=3') # Инициализируем драйвер хром driver = webdriver.Chrome(chrome_options=options, executable_path='drivers/chromedriver.exe') for inquirie in inquiries: title_list = [] description_list = [] keywords_list = [] h1_list = [] h2_list = [] h3_list = [] for i in range(1, deep_pars + 1): # получаю страницу яндекс поиска q = 'https://yandex.ru/search/?text='+str(inquirie)+'&lr='+str(region)+'&p='+str(i) driver.get(q) soup = BeautifulSoup (driver.page_source, features="html.parser") links = [] # обрабатываю полученную страницу for link in soup.select('.serp-item .organic__url'): # делаю сравнение со списком исключений для ссылок check_link = True for exception_val in exceptions: result = re.search(exception_val, link.attrs["href"]) if result : check_link = False break # заполняю список собранными ссылками if check_link: links.append(link.attrs["href"]) one_part_progress = round( main_simple_progress / len(links), 1 ) for link in links: driver.get(link) soup_site = BeautifulSoup (driver.page_source, features="html.parser") if soup_site.title != None: title_list.append(soup_site.title.string) h3 = soup_site.find_all('h3') if h3 != None: for tag in h3: h3_list.append(tag.text) h2 = soup_site.find_all('h2') if h2 != None: for tag in h2: h2_list.append(tag.text) h1 = soup_site.find_all('h1') if h1 != None: for tag in h1: h1_list.append(tag.text) description = soup_site.find('meta', {'name':'description'}) if description != None: description_list.append(description.get('content')) keywords = soup_site.find('meta', {'name':'keywords'}) if keywords != None: keywords_list.append(keywords.get('content')) # создаю новую книгу workbook = openpyxl.Workbo
et = workbook.active # filename = datetime.datetime.today().strftime("%Y-%m-%d-%H-%M-%S") # выбираем активный лист и меняем ему название title_sheet.title = "title" if title_check.get() == True: i = 1 for word in title_list: cellref = title_sheet.cell(row=i, column=1) cellref.value = word i = i + 1 if h3_check.get() == True: # добавляю новую страницу h3_sheet = workbook.create_sheet('H3', 1) i = 1 for word in h3_list: cellref = h3_sheet.cell(row=i, column=1) cellref.value = word i = i + 1 if h2_check.get() == True: # добавляю новую страницу h2_sheet = workbook.create_sheet('H2', 1) i = 1 for word in h2_list: cellref = h2_sheet.cell(row=i, column=1) cellref.value = word i = i + 1 # добавляю новую страницу if h1_check.get() == True: h1_sheet = workbook.create_sheet('H1', 1) i = 1 for word in h1_list: cellref = h1_sheet.cell(row=i, column=1) cellref.value = word i = i + 1 if keywords_check.get() == True: # добавляю новую страницу keywords_sheet = workbook.create_sheet('Keywords', 1) i = 1 for word in keywords_list: cellref = keywords_sheet.cell(row=i, column=1) cellref.value = word i = i + 1 if desc_check.get() == True: # добавляю новую страницу description_sheet = workbook.create_sheet('Description', 1) i = 1 for word in description_list: cellref = description_sheet.cell(row=i, column=1) cellref.value = word i = i + 1 # сохраняю данные в exel workbook.save(filename = inquirie+'.xlsx') progress = progress + one_part_progress print('Прогресс '+ str(progress)) barVar.set(round(progress)) root.update() button_1.config(state="normal") # button_2.config(state="disabled") barVar.set(100) root.update() print('Done') # --------------------------------------------------------------------------------- # рисую интерфейс root = Tk() root.title("Парсер мета данных сайтов по запросам") root.geometry('400x450') root.resizable(width=False, height=False) frame_1 = Frame() frame_2 = Frame() frame_3 = Frame() frame_4 = Frame() frame_5 = Frame() frame_6 = Frame() frame_7 = Frame() frame_8 = Frame() frame_1.pack() frame_2.pack() frame_3.pack() frame_4.pack() frame_5.pack() frame_6.pack() frame_7.pack() frame_8.pack() lable_1 = Label(frame_1, text="Что собрать:") lable_1.pack() h1_check = BooleanVar() h1_check.set(1) c4 = Checkbutton(frame_1, text="h1", variable=h1_check, onvalue=1, offvalue=0) c4.pack(side=LEFT) title_check = BooleanVar() title_check.set(1) c1 = Checkbutton(frame_1, text="title", variable=title_check, onvalue=1, offvalue=0) c1.pack(side=RIGHT) h2_check = BooleanVar() h2_check.set(1) c5 = Checkbutton(frame_2, text="h2", variable=h2_check, onvalue=1, offvalue=0) c5.pack(side=LEFT) desc_check = BooleanVar() desc_check.set(1) c2 = Checkbutton(frame_2, text="desc", variable=desc_check, onvalue=1, offvalue=0) c2.pack(side=RIGHT) h3_check = BooleanVar() h3_check.set(1) c6 = Checkbutton(frame_3, text="h3", variable=h3_check, onvalue=1, offvalue=0) c6.pack(side=LEFT) keywords_check = BooleanVar() keywords_check.set(1) c3 = Checkbutton(frame_3, text="keys", variable=keywords_check, onvalue=1, offvalue=0) c3.pack(side=RIGHT) lable_2 = Label(frame_4, text="Регион:") lable_2.pack() combo = Combobox(frame_4) combo['values'] = ( 'Республика Татарстан', 'Казань', 'Москва и Московская область', 'Москва', 'Набережные Челны', 'Нижнекамск', 'Санкт-Петербург и Ленинградская область', 'Санкт-Петербург', 'Планета Земля', 'Без учета региона', 'Европа', 'СНГ', 'Универсальное', 'Азия', 'Россия', 'Северо-Западный федеральный округ', 'Калининградская область', 'Калининград', 'Мурманская область', 'Мурманск', 'Республика Карелия', 'Петрозаводск', 'Псковская область', 'Псков', 'Великие Луки', 'Новгородская область', 'Великий Новгород', 'Центральный федеральный округ', 'Тверская область', 'Тверь', 'Смоленская область', 'Смоленск', 'Брянская область', 'Брянск', 'Калужская область', 'Калуга', 'Обнинск', 'Курская область', 'Курск', 'Орловская область', 'Орёл', 'Тульская область', 'Тула', 'Долгопрудный', 'Дубна', 'Зеленоград', 'Пущино', 'Белгородская область', 'Белгород', 'Липецкая область', 'Липецк', 'Ярославская область', 'Ярославль', 'Владимирская область', 'Владимир', 'Александров', 'Гусь-Хрустальный', 'Муром', 'Ивановская область', 'Иваново', 'Рязанская область', 'Рязань', 'Тамбовская область', 'Тамбов', 'Воронежская область', 'Воронеж', 'Южный федеральный округ', 'Ростовская область', 'Ростов-на-Дону', 'Шахты', 'Таганрог', 'Новочеркасск', 'Волгодонск', 'Краснодарский край', 'Краснодар', 'Анапа', 'Новороссийск', 'Сочи', 'Туапсе', 'Геленджик', 'Армавир', 'Ейск', 'Республика Адыгея', 'Майкоп', 'Карачаево-Черкесская республика', 'Черкесск', 'Кабардино-Балкарская республика', 'Нальчик', 'Северная Осетия', 'Владикавказ', 'Республика Ингушетия', 'Чеченская республика', 'Грозный', 'Республика Дагестан', 'Махачкала', 'Ставропольский край', 'Ставрополь', 'Каменск-Шахтинский', 'Пятигорск', 'Минеральные Воды', 'Ессентуки', 'Кисловодск', 'Республика Калмыкия', 'Элиста', 'Астраханская область', 'Астрахань', 'Волгоградская область', 'Волгоград', 'Поволжье', 'Саратовская область', 'Саратов', 'Жигулевск', 'Балаково', 'Пензенская область', 'Пенза', 'Республика Мордовия', 'Саранск', 'Ульяновская область', 'Ульяновск', 'Самарская область', 'Самара', 'Тольятти', 'Сызрань', 'Чувашская республика', 'Чебоксары', 'Республика Марий Эл', 'Йошкар-Ола', 'Нижегородская область', 'Нижний Новгород', 'Саров', 'Кировская область', 'Киров', 'Костромская область', 'Кострома', 'Вологодская область', 'Вологда', 'Архангельская область', 'Архангельск', 'Северодвинск', 'Ненецкий автономный округ', 'Республика Коми', 'Сыктывкар', 'Удмуртская республика', 'Ижевск', 'Пермский край', 'Пермь', 'Республика Башкортостан', 'Уфа', 'Нефтекамск', 'Салават', 'Стерлитамак', 'Оренбургская область', 'Оренбург', 'Дзержинск', 'Урал', 'Челябинская область', 'Челябинск', 'Магнитогорск', 'Снежинск', 'Курганская область', 'Курган', 'Свердловская область', 'Екатеринбург', 'Каменск-Уральский', 'Нижний Тагил', 'Новоуральск', 'Первоуральск', 'Тюменская область', 'Тюмень', 'Тобольск', 'Ханты-Мансийский автономный округ', 'Ханты-Мансийск', 'Сургут', 'Нижневартовск', 'Сибирь', 'Омская область', 'Омск', 'Новосибирская область', 'Новосибирск', 'Бердск', 'Томская область', 'Томск', 'Ямало-Ненецкий автономный округ', 'Салехард', 'Алтайский край', 'Барнаул', 'Бийск', 'Рубцовск', 'Республика Алтай', 'Горно-Алтайск', 'Кемеровская область', 'Кемерово', 'Междуреченск', 'Новокузнецк', 'Прокопьевск', 'Республика Хакасия', 'Абакан', 'Республика Тыва', 'Кызыл', 'Красноярский край', 'Красноярск', 'Ачинск', 'Норильск', 'Железногорск', 'Иркутская область', 'Иркутск', 'Братск', 'Республика Бурятия', 'Улан-Удэ', 'Забайкальский край', 'Чита', 'Дальневосточный федеральный округ', 'Республика Саха (Якутия)', 'Якутск', 'Амурская область', 'Благовещенск', 'Еврейская автономная область', 'Биробиджан', 'Приморский край', 'Владивосток', 'Находка', 'Уссурийск', 'Чукотский автономный округ', 'Анадырь', 'Камчатский край', 'Петропавловск-Камчатский', 'Магаданская область', 'Магадан', 'Сахалинская область', 'Южно-Сахалинск', 'Хабаровский край', 'Хабаровск', 'Комсомольск-на-Амуре' ) combo.current(1) # установите вариант по умолчанию combo.pack() lable_21 = Label(frame_5, text="Глубина парсинга:") lable_21.pack() spin = Spinbox(frame_5, from_=0, to=10, width=5, textvariable=1) spin.pack() lable_3 = Label(frame_6, text="Запросы:") lable_3.pack() text = Text(frame_6, width=38, height=12, bg="lightblue", fg='black', wrap=WORD) text.pack() button_1 = Button(frame_8, text="Собрать данные") button_1.pack(side=RIGHT, pady=5) button_1.bind("<Button-1>", output) # button_2 = Button(frame_7, text="Остановить") # button_2.pack(side=LEFT) # button_2.bind("<Button-1>", stop_pars) # button_2.config(state="disabled") barVar = DoubleVar() barVar.set(0) progress_bar = Progressbar(frame_7, orient = HORIZONTAL, length = 300, variable=barVar, mode = 'determinate') progress_bar.pack(pady=5) root.event_add('<<Paste>>', '<Control-igrave>') root.event_add("<<Copy>>", "<Control-ntilde>") root.mainloop()
ok() title_she
identifier_body
pars.py
from tkinter import * from tkinter.ttk import * from selenium import webdriver from selenium.webdriver.chrome.options import Options from bs4 import BeautifulSoup import re import datetime import openpyxl dict_regions_russia = { 'Планета Земля':-1, 'Без учета региона': 0, 'Европа': 111, 'СНГ': 166, 'Универсальное': 318, 'Азия': 183, 'Россия': 225, 'Северо-Западный федеральный округ': 17, 'Калининградская область': 10857, 'Калининград': 22, 'Мурманская область': 10897, 'Мурманск': 24, 'Республика Карелия': 10933, 'Петрозаводск': 18, 'Санкт-Петербург и Ленинградская область': 10174, 'Санкт-Петербург' :2, 'Псковская область' :10926, 'Псков' :25, 'Великие Луки' :10928, 'Новгородская область' :10904, 'Великий Новгород' :24, 'Центральный федеральный округ' :3, 'Тверская область' :10819, 'Тверь' :14, 'Смоленская область' :10795, 'Смоленск' :12, 'Брянская область' :10650, 'Брянск' :191, 'Калужская область' :10693, 'Калуга' :6, 'Обнинск' :967, 'Курская область' :10705, 'Курск' :8, 'Орловская область' :10772, 'Орёл' :10, 'Тульская область' :10832, 'Тула' :15, 'Москва и Московская область' :1, 'Москва' :213, 'Долгопрудный' :214, 'Дубна' :215, 'Зеленоград' :216, 'Пущино' :217, 'Белгородская область' :10645, 'Белгород' :4, 'Липецкая область' :10712, 'Липецк' :9, 'Ярославская область' :10841, 'Ярославль' :16, 'Владимирская область' :10658, 'Владимир' :192, 'Александров' :10656, 'Гусь-Хрустальный' :10661, 'Муром' :10668, 'Ивановская область' :10687, 'Иваново' :5, 'Рязанская область' :10776, 'Рязань' :11, 'Тамбовская область' :10802, 'Тамбов' :13, 'Воронежская область' :10672, 'Воронеж' :193, 'Южный федеральный округ' :26, 'Ростовская область' :11029, 'Ростов-на-Дону' :39, 'Шахты' :11053, 'Таганрог' :971, 'Новочеркасск' :238, 'Волгодонск' :11036, 'Краснодарский край' :10995, 'Краснодар' :35, 'Анапа' :1107, 'Новороссийск' :970, 'Сочи' :239, 'Туапсе' :1058, 'Геленджик' :10990, 'Армавир' :10987, 'Ейск' :10993, 'Республика Адыгея' :11004, 'Майкоп' :1093, 'Карачаево-Черкесская республика' :11020, 'Черкесск' :1104, 'Кабардино-Балкарская республика' :11013, 'Нальчик' :30, 'Северная Осетия' :11021, 'Владикавказ' :33, 'Республика Ингушетия' :11012, 'Чеченская республика' :11024, 'Грозный' :1106, 'Республика Дагестан' :11010, 'Махачкала' :28, 'Ставропольский край' :11069, 'Ставрополь' :36, 'Каменск-Шахтинский' :11043, 'Пятигорск' :11067, 'Минеральные Воды' :11063, 'Ессентуки' :11057, 'Кисловодск' :11062, 'Республика Калмыкия' :11015, 'Элиста' :1094, 'Астраханская область' :10946, 'Астрахань' :37, 'Волгоградская область' :10950, 'Волгоград' :38, 'Поволжье' :40, 'Саратовская область' :11146, 'Саратов' :194, 'Жигулевск' :11132, 'Балаково' :11143, 'Пензенская область' :11095, 'Пенза' :49, 'Республика Мордовия' :11117, 'Саранск' :42, 'Ульяновская область' :11153, 'Ульяновск' :195, 'Самарская область' :11131, 'Самара' :51, 'Тольятти' :240, 'Сызрань' :11139, 'Чувашская республика' :11156, 'Чебоксары' :45, 'Республика Марий Эл' :11077, 'Йошкар-Ола' :41, 'Нижегородская область' :11079, 'Нижний Новгород' :47, 'Саров' :11083, 'Кировская область' :11070, 'Киров' :46, 'Костромская область' :10699, 'Кострома' :7, 'Вологодская область' :10853, 'Вологда' :21, 'Архангельская область' :10842, 'Архангельск' :20, 'Северодвинск' :10849, 'Ненецкий автономный округ' :10176, 'Республика Коми' :10939, 'Сыктывкар' :19, 'Удмуртская республика' :11148, 'Ижевск' :44, 'Республика Татарстан' :11119, 'Казань' :43, 'Набережные Челны' :236, 'Нижнекамск' :11127, 'Пермский край' :11108, 'Пермь' :50, 'Республика Башкортостан' :11111, 'Уфа' :172, 'Нефтекамск' :11114, 'Салават' :11115, 'Стерлитамак' :11116, 'Оренбургская область' :11084, 'Оренбург' :48, 'Дзержинск' :972, 'Урал' :52, 'Челябинская область' :11225, 'Челябинск' :56, 'Магнитогорск' :235, 'Снежинск' :11218, 'Курганская область' :11158, 'Курган' :53, 'Свердловская область' :11162, 'Екатеринбург' :54, 'Каменск-Уральский' :11164, 'Нижний Тагил' :11168, 'Новоуральск' :11170, 'Первоуральск' :11171, 'Тюменская область' :11176, 'Тюмень' :55, 'Тобольск' :11175, 'Ханты-Мансийский автономный округ' :11193, 'Ханты-Мансийск' :57, 'Сургут' :973, 'Нижневартовск' :1091, 'Сибирь' :59, 'Омская область' :11318, 'Омск' :66, 'Новосибирская область' :11316, 'Новосибирск' :65, 'Бердск' :11314, 'Томская область' :11353, 'Томск' :67, 'Ямало-Ненецкий автономный округ' :11232, 'Салехард' :58, 'Алтайский край' :11235, 'Барнаул' :197, 'Бийск' :975, 'Рубцовск' :11251, 'Республика Алтай' :10231, 'Горно-Алтайск' :11319, 'Кемеровская область' :11282, 'Кемерово' :64, 'Междуреченск' :11287, 'Новокузнецк' :237, 'Прокопьевск' :11291, 'Республика Хакасия' :11340, 'Абакан' :1095, 'Республика Тыва' :10233, 'Кызыл' :11333, 'Красноярский край' :11309, 'Красноярск' :62, 'Ачинск' :11302, 'Норильск' :11311, 'Железногорск' :20086, 'Иркутская область' :11266, 'Иркутск' :63, 'Братск' :976, 'Республика Бурятия' :11330, 'Улан-Удэ' :198, 'Забайкальский край' :21949, 'Чита' :68, 'Дальневосточный федеральный округ' :73, 'Республика Саха (Якутия)' :11443, 'Якутск' :74, 'Амурская область' :11375, 'Благовещенск' :77, 'Еврейская автономная область' :10243, 'Биробиджан' :11393, 'Приморский край' :11409, 'Владивосток' :75, 'Находка' :974, 'Уссурийск' :11426, 'Чукотский автономный округ' :10251, 'Анадырь' :11458, 'Камчатский край' :11398, 'Петропавловск-Камчатский' :78, 'Магаданская область' :11403, 'Магадан' :79, 'Сахалинская область' :11450, 'Южно-Сахалинск' :80, 'Хабаровский край' :11457, 'Хабаровск' :76, 'Комсомольск-на-Амуре' :11453 } exceptions = [ '2gis.ru', 'yandex.ru', 'wikipedia', 'pulscen', 'blizko.ru', 'Avito.ru', 'avito.ru', 'edadeal.ru' ] def stop_pars(event): print('Stop Pars') def output(event): button_1.config(state="disabled") # button_2.config(state="normal") root.update() # получаю список запросов inquiries_text = text.get(1.0, END) inquiries_text = inquiries_text.split("\n") inquiries = [] for val in inquiries_text: if len(val) != 0: inquiries.append(val.strip()) # получаю регион region = dict_regions_russia[combo.get()] # получаю глубину парсинга deep_pars = spin.get() try: deep_pars = int(deep_pars) # отрабатывает исклчюение на то что ввели не цифру except ValueError: deep_pars = 1 # максимальная глубина парсинга 10 страниц if deep_pars > 10: deep_pars = 10 # если ввели ноль if deep_pars == 0: deep_pars = 1 progress = 0 main_simple_progress = ( 100 / len(inquiries) ) / int(deep_pars) # Запускаю selenium options = Options() # Запускаем драйвер без графической оболочки браузера options.headless = True # Убираем логирование в консоль options.add_argument('--log-level=3') # Инициализируем драйвер хром driver = webdriver.Chrome(chrome_options=options, executable_path='drivers/chromedriver.exe') for inquirie in inquiries: title_list = [] description_list = [] keywords_list = [] h1_list = [] h2_list = [] h3_list = [] for i in range(1, deep_pars + 1): # получаю страницу яндекс поиска q = 'https://yandex.ru/search/?text='+str(inquirie)+'&lr='+str(region)+'&p='+str(i) driver.get(q) soup = BeautifulSoup (driver.page_source, features="html.parser") links = [] # обрабатываю полученную страницу for link in soup.select('.serp-item .organic__url'): # делаю сравнение со списком исключений для ссылок check_link = True for exception_val in exceptions: result = re.search(exception_val, link.attrs["href"]) if result : check_link = False break # заполняю список собранными ссылками if check_link: links.append(link.attrs["href"]) one_part_progress = round( main_simple_progress / len(links), 1 ) for link in links: driver.get(link) soup_site = BeautifulSoup (driver.page_source, features="html.parser") if soup_site.title != None: title_list.append(soup_site.title.string) h3 = soup_site.find_all('h3') if h3 != None: for tag in h3: h3_list.append(tag.text) h2 = soup_site.find_all('h2') if h2 != None: for tag in h2: h2_list.append(tag.text) h1 = soup_site.find_all('h1') if h1 != None: for tag in h1: h1_list.append(tag.text) description = soup_site.find('meta', {'name':'description'}) if description != None: description_list.append(description.get('content')) keywords = soup_site.find('meta', {'name':'keywords'}) if keywords != None: keywords_list.append(keywords.get('content')) # создаю новую книгу workbook = openpyxl.Workbook() title_sheet = workbook.active # filename = datetime.datetime.today().strftime("%Y-%m-%d-%H-%M-%S") # выбираем активный лист и меняем ему название title_sheet.title = "title" if title_check.get() == True: i = 1 for word in title_list: cellref = title_sheet.cell(row=i, column=1) cellref.value = word i = i + 1 if h3_check.get() == True: # добавляю новую страницу h3_sheet = workbook.create_sheet('H3', 1) i = 1 for word in h3_list: cellref = h3_sheet.cell(row=i, column=1) cellref.value = word i = i + 1 if h2_check.get() == True: # добавляю новую страницу h2_sheet = workbook.create_sheet('H2', 1) i = 1 for word in h2_list: cellref = h2_sheet.cell(row=i, column=1) cellref.value = word i = i + 1 # добавляю новую страницу if h1_check.get() == True: h1_sheet = workbook.create_sheet('H1', 1) i = 1 for word in h1_list: cellref = h1_sheet.cell(row=i, column=1) cellref.value = word i = i + 1 if keywords_check.get() == True: # добавляю новую страницу keywords_sheet = workbook.create_sheet('Keywords', 1) i = 1 for word in keywords_list: cellref = keywords_sheet.cell(row=i, column=1) cellref.value = word i = i + 1 if desc_check.get() == True: # добавляю новую страницу description_sheet = workbook.create_sheet('Description', 1) i = 1 for word in description_list: cellref = description_sheet.cell(row=i, column=1) cellref.value = word i = i + 1 # сохраняю данные в exel workbook.save(filename = inquirie+'.xlsx') progress = progress + one_part_progress print('Прогресс '+ str(progress)) barVar.set(round(progress)) root.update() button_1.config(state="normal") # button_2.config(state="disabled") barVar.set(100) root.update() print('Done') # --------------------------------------------------------------------------------- # рисую интерфейс root = Tk() root.title("Парсер мета данных сайтов по запросам") root.geometry('400x450') root.resizable(width=False, height=False) frame_1 = Frame() frame_2 = Frame() frame_3 = Frame() frame_4 = Frame() frame_5 = Frame() frame_6 = Frame() frame_7 = Frame() frame_8 = Frame() frame_1.pack() frame_2.pack() frame_3.pack() frame_4.pack() frame_5.pack() frame_6.pack() frame_7.pack() frame_8.pack() lable_1 = Label(frame_1, text="Что собрать:") lable_1.pack() h1_check = BooleanVar() h1_check.set(1) c4 = Checkbutton(frame_1, text="h1", variable=h1_check, onvalue=1, offvalue=0) c4.pack(side=LEFT) title_check = BooleanVar() title_check.set(1) c1 = Checkbutton(frame_1, text="title", variable=title_check, onvalue=1, offvalue=0) c1.pack(side=RIGHT) h2_check = BooleanVar() h2_check.set(1) c5 = Checkbutton(frame_2, text="h2", variable=h2_check, onvalue=1, offvalue=0) c5.pack(side=LEFT) desc_check = BooleanVar() desc_check.set(1) c2 = Checkbutton(frame_2, text="desc", variable=desc_check, onvalue=1, offvalue=0) c2.pack(side=RIGHT) h3_check = BooleanVar() h3_check.set(
ble=h3_check, onvalue=1, offvalue=0) c6.pack(side=LEFT) keywords_check = BooleanVar() keywords_check.set(1) c3 = Checkbutton(frame_3, text="keys", variable=keywords_check, onvalue=1, offvalue=0) c3.pack(side=RIGHT) lable_2 = Label(frame_4, text="Регион:") lable_2.pack() combo = Combobox(frame_4) combo['values'] = ( 'Республика Татарстан', 'Казань', 'Москва и Московская область', 'Москва', 'Набережные Челны', 'Нижнекамск', 'Санкт-Петербург и Ленинградская область', 'Санкт-Петербург', 'Планета Земля', 'Без учета региона', 'Европа', 'СНГ', 'Универсальное', 'Азия', 'Россия', 'Северо-Западный федеральный округ', 'Калининградская область', 'Калининград', 'Мурманская область', 'Мурманск', 'Республика Карелия', 'Петрозаводск', 'Псковская область', 'Псков', 'Великие Луки', 'Новгородская область', 'Великий Новгород', 'Центральный федеральный округ', 'Тверская область', 'Тверь', 'Смоленская область', 'Смоленск', 'Брянская область', 'Брянск', 'Калужская область', 'Калуга', 'Обнинск', 'Курская область', 'Курск', 'Орловская область', 'Орёл', 'Тульская область', 'Тула', 'Долгопрудный', 'Дубна', 'Зеленоград', 'Пущино', 'Белгородская область', 'Белгород', 'Липецкая область', 'Липецк', 'Ярославская область', 'Ярославль', 'Владимирская область', 'Владимир', 'Александров', 'Гусь-Хрустальный', 'Муром', 'Ивановская область', 'Иваново', 'Рязанская область', 'Рязань', 'Тамбовская область', 'Тамбов', 'Воронежская область', 'Воронеж', 'Южный федеральный округ', 'Ростовская область', 'Ростов-на-Дону', 'Шахты', 'Таганрог', 'Новочеркасск', 'Волгодонск', 'Краснодарский край', 'Краснодар', 'Анапа', 'Новороссийск', 'Сочи', 'Туапсе', 'Геленджик', 'Армавир', 'Ейск', 'Республика Адыгея', 'Майкоп', 'Карачаево-Черкесская республика', 'Черкесск', 'Кабардино-Балкарская республика', 'Нальчик', 'Северная Осетия', 'Владикавказ', 'Республика Ингушетия', 'Чеченская республика', 'Грозный', 'Республика Дагестан', 'Махачкала', 'Ставропольский край', 'Ставрополь', 'Каменск-Шахтинский', 'Пятигорск', 'Минеральные Воды', 'Ессентуки', 'Кисловодск', 'Республика Калмыкия', 'Элиста', 'Астраханская область', 'Астрахань', 'Волгоградская область', 'Волгоград', 'Поволжье', 'Саратовская область', 'Саратов', 'Жигулевск', 'Балаково', 'Пензенская область', 'Пенза', 'Республика Мордовия', 'Саранск', 'Ульяновская область', 'Ульяновск', 'Самарская область', 'Самара', 'Тольятти', 'Сызрань', 'Чувашская республика', 'Чебоксары', 'Республика Марий Эл', 'Йошкар-Ола', 'Нижегородская область', 'Нижний Новгород', 'Саров', 'Кировская область', 'Киров', 'Костромская область', 'Кострома', 'Вологодская область', 'Вологда', 'Архангельская область', 'Архангельск', 'Северодвинск', 'Ненецкий автономный округ', 'Республика Коми', 'Сыктывкар', 'Удмуртская республика', 'Ижевск', 'Пермский край', 'Пермь', 'Республика Башкортостан', 'Уфа', 'Нефтекамск', 'Салават', 'Стерлитамак', 'Оренбургская область', 'Оренбург', 'Дзержинск', 'Урал', 'Челябинская область', 'Челябинск', 'Магнитогорск', 'Снежинск', 'Курганская область', 'Курган', 'Свердловская область', 'Екатеринбург', 'Каменск-Уральский', 'Нижний Тагил', 'Новоуральск', 'Первоуральск', 'Тюменская область', 'Тюмень', 'Тобольск', 'Ханты-Мансийский автономный округ', 'Ханты-Мансийск', 'Сургут', 'Нижневартовск', 'Сибирь', 'Омская область', 'Омск', 'Новосибирская область', 'Новосибирск', 'Бердск', 'Томская область', 'Томск', 'Ямало-Ненецкий автономный округ', 'Салехард', 'Алтайский край', 'Барнаул', 'Бийск', 'Рубцовск', 'Республика Алтай', 'Горно-Алтайск', 'Кемеровская область', 'Кемерово', 'Междуреченск', 'Новокузнецк', 'Прокопьевск', 'Республика Хакасия', 'Абакан', 'Республика Тыва', 'Кызыл', 'Красноярский край', 'Красноярск', 'Ачинск', 'Норильск', 'Железногорск', 'Иркутская область', 'Иркутск', 'Братск', 'Республика Бурятия', 'Улан-Удэ', 'Забайкальский край', 'Чита', 'Дальневосточный федеральный округ', 'Республика Саха (Якутия)', 'Якутск', 'Амурская область', 'Благовещенск', 'Еврейская автономная область', 'Биробиджан', 'Приморский край', 'Владивосток', 'Находка', 'Уссурийск', 'Чукотский автономный округ', 'Анадырь', 'Камчатский край', 'Петропавловск-Камчатский', 'Магаданская область', 'Магадан', 'Сахалинская область', 'Южно-Сахалинск', 'Хабаровский край', 'Хабаровск', 'Комсомольск-на-Амуре' ) combo.current(1) # установите вариант по умолчанию combo.pack() lable_21 = Label(frame_5, text="Глубина парсинга:") lable_21.pack() spin = Spinbox(frame_5, from_=0, to=10, width=5, textvariable=1) spin.pack() lable_3 = Label(frame_6, text="Запросы:") lable_3.pack() text = Text(frame_6, width=38, height=12, bg="lightblue", fg='black', wrap=WORD) text.pack() button_1 = Button(frame_8, text="Собрать данные") button_1.pack(side=RIGHT, pady=5) button_1.bind("<Button-1>", output) # button_2 = Button(frame_7, text="Остановить") # button_2.pack(side=LEFT) # button_2.bind("<Button-1>", stop_pars) # button_2.config(state="disabled") barVar = DoubleVar() barVar.set(0) progress_bar = Progressbar(frame_7, orient = HORIZONTAL, length = 300, variable=barVar, mode = 'determinate') progress_bar.pack(pady=5) root.event_add('<<Paste>>', '<Control-igrave>') root.event_add("<<Copy>>", "<Control-ntilde>") root.mainloop()
1) c6 = Checkbutton(frame_3, text="h3", varia
conditional_block
openai.py
import os from contextlib import contextmanager from typing import Any, Callable, Dict, List from pydantic import Field, validator, PositiveInt from enum import Enum import openai from langchain.chat_models import AzureChatOpenAI, ChatOpenAI from langchain.llms import AzureOpenAI, OpenAI from langchain.llms.openai import BaseOpenAI from langchain.base_language import BaseLanguageModel from athena.logger import logger from .model_config import ModelConfig OPENAI_PREFIX = "openai_" AZURE_OPENAI_PREFIX = "azure_openai_" ######################################################################### # Monkey patching openai/langchain api # # ===================================================================== # # This allows us to have multiple api keys i.e. mixing # # openai and azure openai api keys so we can use not only deployed # # models but also models from the non-azure openai api. # # This is mostly for testing purposes, in production we can just deploy # # the models to azure that we want to use. # ######################################################################### def _wrap(old: Any, new: Any) -> Callable: def repl(*args: Any, **kwargs: Any) -> Any: new(args[0]) # args[0] is self return old(*args, **kwargs) return repl def _async_wrap(old: Any, new: Any): async def repl(*args, **kwargs): new(args[0]) # args[0] is self return await old(*args, **kwargs) return repl def _set_credentials(self): openai.api_key = self.openai_api_key api_type = "open_ai" api_base = "https://api.openai.com/v1" api_version = None if hasattr(self, "openai_api_type"): api_type = self.openai_api_type if api_type == "azure": if hasattr(self, "openai_api_base"): api_base = self.openai_api_base if hasattr(self, "openai_api_version"): api_version = self.openai_api_version openai.api_type = api_type openai.api_base = api_base openai.api_version = api_version # Monkey patching langchain # pylint: disable=protected-access ChatOpenAI._generate = _wrap(ChatOpenAI._generate, _set_credentials) # type: ignore ChatOpenAI._agenerate = _async_wrap(ChatOpenAI._agenerate, _set_credentials) # type: ignore BaseOpenAI._generate = _wrap(BaseOpenAI._generate, _set_credentials) # type: ignore BaseOpenAI._agenerate = _async_wrap(BaseOpenAI._agenerate, _set_credentials) # type: ignore # pylint: enable=protected-access ######################################################################### # Monkey patching end # ######################################################################### def _use_azure_credentials(): openai.api_type = "azure" openai.api_key = os.environ.get("LLM_AZURE_OPENAI_API_KEY") openai.api_base = os.environ.get("LLM_AZURE_OPENAI_API_BASE") # os.environ.get("LLM_AZURE_OPENAI_API_VERSION") openai.api_version = "2023-03-15-preview" def _use_openai_credentials(): openai.api_type = "open_ai" openai.api_key = os.environ.get("LLM_OPENAI_API_KEY") openai.api_base = "https://api.openai.com/v1" openai.api_version = None openai_available = os.environ.get("LLM_OPENAI_API_KEY") is not None azure_openai_available = os.environ.get("LLM_AZURE_OPENAI_API_KEY") is not None # This is a hack to make sure that the openai api is set correctly # Right now it is overkill, but it will be useful when the api gets fixed and we no longer # hardcode the model names (i.e. OpenAI fixes their api) @contextmanager def _openai_client(use_azure_api: bool, is_preference: bool): """Set the openai client to use the correct api type, if available Args: use_azure_api (bool): If true, use the azure api, else use the openai api is_preference (bool): If true, it can fall back to the other api if the preferred one is not available """ if use_azure_api: if azure_openai_available: _use_azure_credentials() elif is_preference and openai_available: _use_openai_credentials() elif is_preference: raise EnvironmentError( "No OpenAI api available, please set LLM_AZURE_OPENAI_API_KEY, LLM_AZURE_OPENAI_API_BASE and " "LLM_AZURE_OPENAI_API_VERSION environment variables or LLM_OPENAI_API_KEY environment variable" ) else: raise EnvironmentError( "Azure OpenAI api not available, please set LLM_AZURE_OPENAI_API_KEY, LLM_AZURE_OPENAI_API_BASE and " "LLM_AZURE_OPENAI_API_VERSION environment variables" ) else: if openai_available: _use_openai_credentials() elif is_preference and azure_openai_available: _use_azure_credentials() elif is_preference: raise EnvironmentError( "No OpenAI api available, please set LLM_OPENAI_API_KEY environment variable or LLM_AZURE_OPENAI_API_KEY, " "LLM_AZURE_OPENAI_API_BASE and LLM_AZURE_OPENAI_API_VERSION environment variables" ) else: raise EnvironmentError( "OpenAI api not available, please set LLM_OPENAI_API_KEY environment variable" ) # API client is setup correctly yield def _get_available_deployments(openai_models: Dict[str, List[str]], model_aliases: Dict[str, str]): available_deployments: Dict[str, Dict[str, Any]] = { "chat_completion": {}, "completion": {}, "fine_tuneing": {}, } if azure_openai_available: with _openai_client(use_azure_api=True, is_preference=False): deployments = openai.Deployment.list().get("data") or [] # type: ignore for deployment in deployments: model_name = deployment.model if model_name in model_aliases:
if model_name in openai_models["chat_completion"]: available_deployments["chat_completion"][deployment.id] = deployment elif model_name in openai_models["completion"]: available_deployments["completion"][deployment.id] = deployment elif model_name in openai_models["fine_tuneing"]: available_deployments["fine_tuneing"][deployment.id] = deployment return available_deployments def _get_available_models(openai_models: Dict[str, List[str]], available_deployments: Dict[str, Dict[str, Any]]): available_models: Dict[str, BaseLanguageModel] = {} if openai_available: openai_api_key = os.environ["LLM_OPENAI_API_KEY"] for model_name in openai_models["chat_completion"]: available_models[OPENAI_PREFIX + model_name] = ChatOpenAI( model=model_name, openai_api_key=openai_api_key, client="") for model_name in openai_models["completion"]: available_models[OPENAI_PREFIX + model_name] = OpenAI( model=model_name, openai_api_key=openai_api_key, client="") if azure_openai_available: azure_openai_api_key = os.environ["LLM_AZURE_OPENAI_API_KEY"] azure_openai_api_base = os.environ["LLM_AZURE_OPENAI_API_BASE"] azure_openai_api_version = os.environ["LLM_AZURE_OPENAI_API_VERSION"] for model_type, Model in [("chat_completion", AzureChatOpenAI), ("completion", AzureOpenAI)]: for deployment_name, deployment in available_deployments[model_type].items(): available_models[AZURE_OPENAI_PREFIX + deployment_name] = Model( model=deployment.model, deployment_name=deployment_name, openai_api_base=azure_openai_api_base, openai_api_version=azure_openai_api_version, openai_api_key=azure_openai_api_key, client="", ) return available_models _model_aliases = { "gpt-35-turbo": "gpt-3.5-turbo", } # Hardcoded because openai can't provide a trustworthly api to get the list of models and capabilities... openai_models = { "chat_completion": [ "gpt-4", "gpt-4-32k", "gpt-3.5-turbo", "gpt-3.5-turbo-16k" ], "completion": [ "text-davinci-003", "text-curie-001", "text-babbage-001", "text-ada-001", ], "fine_tuneing": [ "davinci", "curie", "babbage", "ada", ] } available_deployments = _get_available_deployments(openai_models, _model_aliases) available_models = _get_available_models(openai_models, available_deployments) logger.info("Available openai models: %s", ", ".join(available_models.keys())) OpenAIModel = Enum('OpenAIModel', {name: name for name in available_models}) # type: ignore default_openai_model = OpenAIModel[os.environ.get("LLM_DEFAULT_MODEL", "gpt-3.5-turbo")] # Long descriptions will be displayed in the playground UI and are copied from the OpenAI docs class OpenAIModelConfig(ModelConfig): """OpenAI LLM configuration.""" model_name: OpenAIModel = Field(default=default_openai_model, # type: ignore description="The name of the model to use.") max_tokens: PositiveInt = Field(1024, description="""\ The maximum number of [tokens](https://platform.openai.com/tokenizer) to generate in the chat completion. The total length of input tokens and generated tokens is limited by the model's context length. \ [Example Python code](https://github.com/openai/openai-cookbook/blob/main/examples/How_to_count_tokens_with_tiktoken.ipynb) for counting tokens.\ """) temperature: float = Field(default=0.0, ge=0, le=2, description="""\ What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, \ while lower values like 0.2 will make it more focused and deterministic. We generally recommend altering this or `top_p` but not both.\ """) top_p: float = Field(default=1, ge=0, le=1, description="""\ An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. \ So 0.1 means only the tokens comprising the top 10% probability mass are considered. We generally recommend altering this or `temperature` but not both.\ """) presence_penalty: float = Field(default=0, ge=-2, le=2, description="""\ Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far, \ increasing the model's likelihood to talk about new topics. [See more information about frequency and presence penalties.](https://platform.openai.com/docs/api-reference/parameter-details)\ """) frequency_penalty: float = Field(default=0, ge=-2, le=2, description="""\ Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, \ decreasing the model's likelihood to repeat the same line verbatim. [See more information about frequency and presence penalties.](https://platform.openai.com/docs/api-reference/parameter-details)\ """) @validator('max_tokens') def max_tokens_must_be_positive(cls, v): """ Validate that max_tokens is a positive integer. """ if v <= 0: raise ValueError('max_tokens must be a positive integer') return v def get_model(self) -> BaseLanguageModel: """Get the model from the configuration. Returns: BaseLanguageModel: The model. """ model = available_models[self.model_name.value] kwargs = model._lc_kwargs secrets = {secret: getattr(model, secret) for secret in model.lc_secrets.keys()} kwargs.update(secrets) model_kwargs = kwargs.get("model_kwargs", {}) for attr, value in self.dict().items(): if attr == "model_name": # Skip model_name continue if hasattr(model, attr): # If the model has the attribute, add it to kwargs kwargs[attr] = value else: # Otherwise, add it to model_kwargs (necessary for chat models) model_kwargs[attr] = value kwargs["model_kwargs"] = model_kwargs # Initialize a copy of the model using the config model = model.__class__(**kwargs) return model
model_name = model_aliases[model_name]
conditional_block
openai.py
import os from contextlib import contextmanager from typing import Any, Callable, Dict, List from pydantic import Field, validator, PositiveInt from enum import Enum import openai from langchain.chat_models import AzureChatOpenAI, ChatOpenAI from langchain.llms import AzureOpenAI, OpenAI from langchain.llms.openai import BaseOpenAI from langchain.base_language import BaseLanguageModel from athena.logger import logger from .model_config import ModelConfig OPENAI_PREFIX = "openai_" AZURE_OPENAI_PREFIX = "azure_openai_" ######################################################################### # Monkey patching openai/langchain api # # ===================================================================== # # This allows us to have multiple api keys i.e. mixing # # openai and azure openai api keys so we can use not only deployed # # models but also models from the non-azure openai api. # # This is mostly for testing purposes, in production we can just deploy # # the models to azure that we want to use. # ######################################################################### def _wrap(old: Any, new: Any) -> Callable: def repl(*args: Any, **kwargs: Any) -> Any: new(args[0]) # args[0] is self return old(*args, **kwargs) return repl def _async_wrap(old: Any, new: Any): async def repl(*args, **kwargs): new(args[0]) # args[0] is self return await old(*args, **kwargs) return repl def _set_credentials(self): openai.api_key = self.openai_api_key api_type = "open_ai" api_base = "https://api.openai.com/v1" api_version = None if hasattr(self, "openai_api_type"): api_type = self.openai_api_type if api_type == "azure": if hasattr(self, "openai_api_base"): api_base = self.openai_api_base if hasattr(self, "openai_api_version"): api_version = self.openai_api_version openai.api_type = api_type openai.api_base = api_base openai.api_version = api_version # Monkey patching langchain # pylint: disable=protected-access ChatOpenAI._generate = _wrap(ChatOpenAI._generate, _set_credentials) # type: ignore ChatOpenAI._agenerate = _async_wrap(ChatOpenAI._agenerate, _set_credentials) # type: ignore BaseOpenAI._generate = _wrap(BaseOpenAI._generate, _set_credentials) # type: ignore BaseOpenAI._agenerate = _async_wrap(BaseOpenAI._agenerate, _set_credentials) # type: ignore # pylint: enable=protected-access ######################################################################### # Monkey patching end # ######################################################################### def _use_azure_credentials(): openai.api_type = "azure" openai.api_key = os.environ.get("LLM_AZURE_OPENAI_API_KEY") openai.api_base = os.environ.get("LLM_AZURE_OPENAI_API_BASE") # os.environ.get("LLM_AZURE_OPENAI_API_VERSION") openai.api_version = "2023-03-15-preview" def _use_openai_credentials(): openai.api_type = "open_ai" openai.api_key = os.environ.get("LLM_OPENAI_API_KEY") openai.api_base = "https://api.openai.com/v1" openai.api_version = None openai_available = os.environ.get("LLM_OPENAI_API_KEY") is not None azure_openai_available = os.environ.get("LLM_AZURE_OPENAI_API_KEY") is not None # This is a hack to make sure that the openai api is set correctly # Right now it is overkill, but it will be useful when the api gets fixed and we no longer # hardcode the model names (i.e. OpenAI fixes their api) @contextmanager def _openai_client(use_azure_api: bool, is_preference: bool): """Set the openai client to use the correct api type, if available Args: use_azure_api (bool): If true, use the azure api, else use the openai api is_preference (bool): If true, it can fall back to the other api if the preferred one is not available """ if use_azure_api: if azure_openai_available: _use_azure_credentials() elif is_preference and openai_available: _use_openai_credentials() elif is_preference: raise EnvironmentError( "No OpenAI api available, please set LLM_AZURE_OPENAI_API_KEY, LLM_AZURE_OPENAI_API_BASE and " "LLM_AZURE_OPENAI_API_VERSION environment variables or LLM_OPENAI_API_KEY environment variable" ) else: raise EnvironmentError( "Azure OpenAI api not available, please set LLM_AZURE_OPENAI_API_KEY, LLM_AZURE_OPENAI_API_BASE and " "LLM_AZURE_OPENAI_API_VERSION environment variables" ) else: if openai_available: _use_openai_credentials() elif is_preference and azure_openai_available: _use_azure_credentials() elif is_preference: raise EnvironmentError( "No OpenAI api available, please set LLM_OPENAI_API_KEY environment variable or LLM_AZURE_OPENAI_API_KEY, " "LLM_AZURE_OPENAI_API_BASE and LLM_AZURE_OPENAI_API_VERSION environment variables" ) else: raise EnvironmentError( "OpenAI api not available, please set LLM_OPENAI_API_KEY environment variable" ) # API client is setup correctly yield def _get_available_deployments(openai_models: Dict[str, List[str]], model_aliases: Dict[str, str]): available_deployments: Dict[str, Dict[str, Any]] = { "chat_completion": {}, "completion": {}, "fine_tuneing": {}, } if azure_openai_available: with _openai_client(use_azure_api=True, is_preference=False): deployments = openai.Deployment.list().get("data") or [] # type: ignore for deployment in deployments: model_name = deployment.model if model_name in model_aliases: model_name = model_aliases[model_name] if model_name in openai_models["chat_completion"]: available_deployments["chat_completion"][deployment.id] = deployment elif model_name in openai_models["completion"]: available_deployments["completion"][deployment.id] = deployment elif model_name in openai_models["fine_tuneing"]: available_deployments["fine_tuneing"][deployment.id] = deployment return available_deployments def _get_available_models(openai_models: Dict[str, List[str]], available_deployments: Dict[str, Dict[str, Any]]): available_models: Dict[str, BaseLanguageModel] = {} if openai_available: openai_api_key = os.environ["LLM_OPENAI_API_KEY"] for model_name in openai_models["chat_completion"]: available_models[OPENAI_PREFIX + model_name] = ChatOpenAI( model=model_name, openai_api_key=openai_api_key, client="") for model_name in openai_models["completion"]: available_models[OPENAI_PREFIX + model_name] = OpenAI( model=model_name, openai_api_key=openai_api_key, client="") if azure_openai_available: azure_openai_api_key = os.environ["LLM_AZURE_OPENAI_API_KEY"] azure_openai_api_base = os.environ["LLM_AZURE_OPENAI_API_BASE"] azure_openai_api_version = os.environ["LLM_AZURE_OPENAI_API_VERSION"] for model_type, Model in [("chat_completion", AzureChatOpenAI), ("completion", AzureOpenAI)]: for deployment_name, deployment in available_deployments[model_type].items(): available_models[AZURE_OPENAI_PREFIX + deployment_name] = Model( model=deployment.model, deployment_name=deployment_name, openai_api_base=azure_openai_api_base, openai_api_version=azure_openai_api_version, openai_api_key=azure_openai_api_key, client="", ) return available_models _model_aliases = { "gpt-35-turbo": "gpt-3.5-turbo", } # Hardcoded because openai can't provide a trustworthly api to get the list of models and capabilities... openai_models = { "chat_completion": [ "gpt-4", "gpt-4-32k", "gpt-3.5-turbo", "gpt-3.5-turbo-16k" ], "completion": [ "text-davinci-003", "text-curie-001", "text-babbage-001", "text-ada-001", ], "fine_tuneing": [ "davinci", "curie", "babbage", "ada", ] } available_deployments = _get_available_deployments(openai_models, _model_aliases) available_models = _get_available_models(openai_models, available_deployments) logger.info("Available openai models: %s", ", ".join(available_models.keys())) OpenAIModel = Enum('OpenAIModel', {name: name for name in available_models}) # type: ignore default_openai_model = OpenAIModel[os.environ.get("LLM_DEFAULT_MODEL", "gpt-3.5-turbo")] # Long descriptions will be displayed in the playground UI and are copied from the OpenAI docs class OpenAIModelConfig(ModelConfig): """OpenAI LLM configuration.""" model_name: OpenAIModel = Field(default=default_openai_model, # type: ignore description="The name of the model to use.") max_tokens: PositiveInt = Field(1024, description="""\ The maximum number of [tokens](https://platform.openai.com/tokenizer) to generate in the chat completion. The total length of input tokens and generated tokens is limited by the model's context length. \ [Example Python code](https://github.com/openai/openai-cookbook/blob/main/examples/How_to_count_tokens_with_tiktoken.ipynb) for counting tokens.\ """) temperature: float = Field(default=0.0, ge=0, le=2, description="""\ What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, \ while lower values like 0.2 will make it more focused and deterministic. We generally recommend altering this or `top_p` but not both.\ """) top_p: float = Field(default=1, ge=0, le=1, description="""\ An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. \ So 0.1 means only the tokens comprising the top 10% probability mass are considered. We generally recommend altering this or `temperature` but not both.\ """) presence_penalty: float = Field(default=0, ge=-2, le=2, description="""\ Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far, \ increasing the model's likelihood to talk about new topics. [See more information about frequency and presence penalties.](https://platform.openai.com/docs/api-reference/parameter-details)\ """) frequency_penalty: float = Field(default=0, ge=-2, le=2, description="""\ Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, \ decreasing the model's likelihood to repeat the same line verbatim. [See more information about frequency and presence penalties.](https://platform.openai.com/docs/api-reference/parameter-details)\ """) @validator('max_tokens') def max_tokens_must_be_positive(cls, v): """ Validate that max_tokens is a positive integer. """ if v <= 0: raise ValueError('max_tokens must be a positive integer') return v def get_model(self) -> BaseLanguageModel:
BaseLanguageModel: The model. """ model = available_models[self.model_name.value] kwargs = model._lc_kwargs secrets = {secret: getattr(model, secret) for secret in model.lc_secrets.keys()} kwargs.update(secrets) model_kwargs = kwargs.get("model_kwargs", {}) for attr, value in self.dict().items(): if attr == "model_name": # Skip model_name continue if hasattr(model, attr): # If the model has the attribute, add it to kwargs kwargs[attr] = value else: # Otherwise, add it to model_kwargs (necessary for chat models) model_kwargs[attr] = value kwargs["model_kwargs"] = model_kwargs # Initialize a copy of the model using the config model = model.__class__(**kwargs) return model
"""Get the model from the configuration. Returns:
random_line_split
openai.py
import os from contextlib import contextmanager from typing import Any, Callable, Dict, List from pydantic import Field, validator, PositiveInt from enum import Enum import openai from langchain.chat_models import AzureChatOpenAI, ChatOpenAI from langchain.llms import AzureOpenAI, OpenAI from langchain.llms.openai import BaseOpenAI from langchain.base_language import BaseLanguageModel from athena.logger import logger from .model_config import ModelConfig OPENAI_PREFIX = "openai_" AZURE_OPENAI_PREFIX = "azure_openai_" ######################################################################### # Monkey patching openai/langchain api # # ===================================================================== # # This allows us to have multiple api keys i.e. mixing # # openai and azure openai api keys so we can use not only deployed # # models but also models from the non-azure openai api. # # This is mostly for testing purposes, in production we can just deploy # # the models to azure that we want to use. # ######################################################################### def _wrap(old: Any, new: Any) -> Callable: def repl(*args: Any, **kwargs: Any) -> Any: new(args[0]) # args[0] is self return old(*args, **kwargs) return repl def _async_wrap(old: Any, new: Any): async def repl(*args, **kwargs): new(args[0]) # args[0] is self return await old(*args, **kwargs) return repl def _set_credentials(self): openai.api_key = self.openai_api_key api_type = "open_ai" api_base = "https://api.openai.com/v1" api_version = None if hasattr(self, "openai_api_type"): api_type = self.openai_api_type if api_type == "azure": if hasattr(self, "openai_api_base"): api_base = self.openai_api_base if hasattr(self, "openai_api_version"): api_version = self.openai_api_version openai.api_type = api_type openai.api_base = api_base openai.api_version = api_version # Monkey patching langchain # pylint: disable=protected-access ChatOpenAI._generate = _wrap(ChatOpenAI._generate, _set_credentials) # type: ignore ChatOpenAI._agenerate = _async_wrap(ChatOpenAI._agenerate, _set_credentials) # type: ignore BaseOpenAI._generate = _wrap(BaseOpenAI._generate, _set_credentials) # type: ignore BaseOpenAI._agenerate = _async_wrap(BaseOpenAI._agenerate, _set_credentials) # type: ignore # pylint: enable=protected-access ######################################################################### # Monkey patching end # ######################################################################### def _use_azure_credentials(): openai.api_type = "azure" openai.api_key = os.environ.get("LLM_AZURE_OPENAI_API_KEY") openai.api_base = os.environ.get("LLM_AZURE_OPENAI_API_BASE") # os.environ.get("LLM_AZURE_OPENAI_API_VERSION") openai.api_version = "2023-03-15-preview" def _use_openai_credentials(): openai.api_type = "open_ai" openai.api_key = os.environ.get("LLM_OPENAI_API_KEY") openai.api_base = "https://api.openai.com/v1" openai.api_version = None openai_available = os.environ.get("LLM_OPENAI_API_KEY") is not None azure_openai_available = os.environ.get("LLM_AZURE_OPENAI_API_KEY") is not None # This is a hack to make sure that the openai api is set correctly # Right now it is overkill, but it will be useful when the api gets fixed and we no longer # hardcode the model names (i.e. OpenAI fixes their api) @contextmanager def _openai_client(use_azure_api: bool, is_preference: bool): """Set the openai client to use the correct api type, if available Args: use_azure_api (bool): If true, use the azure api, else use the openai api is_preference (bool): If true, it can fall back to the other api if the preferred one is not available """ if use_azure_api: if azure_openai_available: _use_azure_credentials() elif is_preference and openai_available: _use_openai_credentials() elif is_preference: raise EnvironmentError( "No OpenAI api available, please set LLM_AZURE_OPENAI_API_KEY, LLM_AZURE_OPENAI_API_BASE and " "LLM_AZURE_OPENAI_API_VERSION environment variables or LLM_OPENAI_API_KEY environment variable" ) else: raise EnvironmentError( "Azure OpenAI api not available, please set LLM_AZURE_OPENAI_API_KEY, LLM_AZURE_OPENAI_API_BASE and " "LLM_AZURE_OPENAI_API_VERSION environment variables" ) else: if openai_available: _use_openai_credentials() elif is_preference and azure_openai_available: _use_azure_credentials() elif is_preference: raise EnvironmentError( "No OpenAI api available, please set LLM_OPENAI_API_KEY environment variable or LLM_AZURE_OPENAI_API_KEY, " "LLM_AZURE_OPENAI_API_BASE and LLM_AZURE_OPENAI_API_VERSION environment variables" ) else: raise EnvironmentError( "OpenAI api not available, please set LLM_OPENAI_API_KEY environment variable" ) # API client is setup correctly yield def _get_available_deployments(openai_models: Dict[str, List[str]], model_aliases: Dict[str, str]): available_deployments: Dict[str, Dict[str, Any]] = { "chat_completion": {}, "completion": {}, "fine_tuneing": {}, } if azure_openai_available: with _openai_client(use_azure_api=True, is_preference=False): deployments = openai.Deployment.list().get("data") or [] # type: ignore for deployment in deployments: model_name = deployment.model if model_name in model_aliases: model_name = model_aliases[model_name] if model_name in openai_models["chat_completion"]: available_deployments["chat_completion"][deployment.id] = deployment elif model_name in openai_models["completion"]: available_deployments["completion"][deployment.id] = deployment elif model_name in openai_models["fine_tuneing"]: available_deployments["fine_tuneing"][deployment.id] = deployment return available_deployments def _get_available_models(openai_models: Dict[str, List[str]], available_deployments: Dict[str, Dict[str, Any]]): available_models: Dict[str, BaseLanguageModel] = {} if openai_available: openai_api_key = os.environ["LLM_OPENAI_API_KEY"] for model_name in openai_models["chat_completion"]: available_models[OPENAI_PREFIX + model_name] = ChatOpenAI( model=model_name, openai_api_key=openai_api_key, client="") for model_name in openai_models["completion"]: available_models[OPENAI_PREFIX + model_name] = OpenAI( model=model_name, openai_api_key=openai_api_key, client="") if azure_openai_available: azure_openai_api_key = os.environ["LLM_AZURE_OPENAI_API_KEY"] azure_openai_api_base = os.environ["LLM_AZURE_OPENAI_API_BASE"] azure_openai_api_version = os.environ["LLM_AZURE_OPENAI_API_VERSION"] for model_type, Model in [("chat_completion", AzureChatOpenAI), ("completion", AzureOpenAI)]: for deployment_name, deployment in available_deployments[model_type].items(): available_models[AZURE_OPENAI_PREFIX + deployment_name] = Model( model=deployment.model, deployment_name=deployment_name, openai_api_base=azure_openai_api_base, openai_api_version=azure_openai_api_version, openai_api_key=azure_openai_api_key, client="", ) return available_models _model_aliases = { "gpt-35-turbo": "gpt-3.5-turbo", } # Hardcoded because openai can't provide a trustworthly api to get the list of models and capabilities... openai_models = { "chat_completion": [ "gpt-4", "gpt-4-32k", "gpt-3.5-turbo", "gpt-3.5-turbo-16k" ], "completion": [ "text-davinci-003", "text-curie-001", "text-babbage-001", "text-ada-001", ], "fine_tuneing": [ "davinci", "curie", "babbage", "ada", ] } available_deployments = _get_available_deployments(openai_models, _model_aliases) available_models = _get_available_models(openai_models, available_deployments) logger.info("Available openai models: %s", ", ".join(available_models.keys())) OpenAIModel = Enum('OpenAIModel', {name: name for name in available_models}) # type: ignore default_openai_model = OpenAIModel[os.environ.get("LLM_DEFAULT_MODEL", "gpt-3.5-turbo")] # Long descriptions will be displayed in the playground UI and are copied from the OpenAI docs class OpenAIModelConfig(ModelConfig):
"""OpenAI LLM configuration.""" model_name: OpenAIModel = Field(default=default_openai_model, # type: ignore description="The name of the model to use.") max_tokens: PositiveInt = Field(1024, description="""\ The maximum number of [tokens](https://platform.openai.com/tokenizer) to generate in the chat completion. The total length of input tokens and generated tokens is limited by the model's context length. \ [Example Python code](https://github.com/openai/openai-cookbook/blob/main/examples/How_to_count_tokens_with_tiktoken.ipynb) for counting tokens.\ """) temperature: float = Field(default=0.0, ge=0, le=2, description="""\ What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, \ while lower values like 0.2 will make it more focused and deterministic. We generally recommend altering this or `top_p` but not both.\ """) top_p: float = Field(default=1, ge=0, le=1, description="""\ An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. \ So 0.1 means only the tokens comprising the top 10% probability mass are considered. We generally recommend altering this or `temperature` but not both.\ """) presence_penalty: float = Field(default=0, ge=-2, le=2, description="""\ Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far, \ increasing the model's likelihood to talk about new topics. [See more information about frequency and presence penalties.](https://platform.openai.com/docs/api-reference/parameter-details)\ """) frequency_penalty: float = Field(default=0, ge=-2, le=2, description="""\ Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, \ decreasing the model's likelihood to repeat the same line verbatim. [See more information about frequency and presence penalties.](https://platform.openai.com/docs/api-reference/parameter-details)\ """) @validator('max_tokens') def max_tokens_must_be_positive(cls, v): """ Validate that max_tokens is a positive integer. """ if v <= 0: raise ValueError('max_tokens must be a positive integer') return v def get_model(self) -> BaseLanguageModel: """Get the model from the configuration. Returns: BaseLanguageModel: The model. """ model = available_models[self.model_name.value] kwargs = model._lc_kwargs secrets = {secret: getattr(model, secret) for secret in model.lc_secrets.keys()} kwargs.update(secrets) model_kwargs = kwargs.get("model_kwargs", {}) for attr, value in self.dict().items(): if attr == "model_name": # Skip model_name continue if hasattr(model, attr): # If the model has the attribute, add it to kwargs kwargs[attr] = value else: # Otherwise, add it to model_kwargs (necessary for chat models) model_kwargs[attr] = value kwargs["model_kwargs"] = model_kwargs # Initialize a copy of the model using the config model = model.__class__(**kwargs) return model
identifier_body
openai.py
import os from contextlib import contextmanager from typing import Any, Callable, Dict, List from pydantic import Field, validator, PositiveInt from enum import Enum import openai from langchain.chat_models import AzureChatOpenAI, ChatOpenAI from langchain.llms import AzureOpenAI, OpenAI from langchain.llms.openai import BaseOpenAI from langchain.base_language import BaseLanguageModel from athena.logger import logger from .model_config import ModelConfig OPENAI_PREFIX = "openai_" AZURE_OPENAI_PREFIX = "azure_openai_" ######################################################################### # Monkey patching openai/langchain api # # ===================================================================== # # This allows us to have multiple api keys i.e. mixing # # openai and azure openai api keys so we can use not only deployed # # models but also models from the non-azure openai api. # # This is mostly for testing purposes, in production we can just deploy # # the models to azure that we want to use. # ######################################################################### def _wrap(old: Any, new: Any) -> Callable: def repl(*args: Any, **kwargs: Any) -> Any: new(args[0]) # args[0] is self return old(*args, **kwargs) return repl def
(old: Any, new: Any): async def repl(*args, **kwargs): new(args[0]) # args[0] is self return await old(*args, **kwargs) return repl def _set_credentials(self): openai.api_key = self.openai_api_key api_type = "open_ai" api_base = "https://api.openai.com/v1" api_version = None if hasattr(self, "openai_api_type"): api_type = self.openai_api_type if api_type == "azure": if hasattr(self, "openai_api_base"): api_base = self.openai_api_base if hasattr(self, "openai_api_version"): api_version = self.openai_api_version openai.api_type = api_type openai.api_base = api_base openai.api_version = api_version # Monkey patching langchain # pylint: disable=protected-access ChatOpenAI._generate = _wrap(ChatOpenAI._generate, _set_credentials) # type: ignore ChatOpenAI._agenerate = _async_wrap(ChatOpenAI._agenerate, _set_credentials) # type: ignore BaseOpenAI._generate = _wrap(BaseOpenAI._generate, _set_credentials) # type: ignore BaseOpenAI._agenerate = _async_wrap(BaseOpenAI._agenerate, _set_credentials) # type: ignore # pylint: enable=protected-access ######################################################################### # Monkey patching end # ######################################################################### def _use_azure_credentials(): openai.api_type = "azure" openai.api_key = os.environ.get("LLM_AZURE_OPENAI_API_KEY") openai.api_base = os.environ.get("LLM_AZURE_OPENAI_API_BASE") # os.environ.get("LLM_AZURE_OPENAI_API_VERSION") openai.api_version = "2023-03-15-preview" def _use_openai_credentials(): openai.api_type = "open_ai" openai.api_key = os.environ.get("LLM_OPENAI_API_KEY") openai.api_base = "https://api.openai.com/v1" openai.api_version = None openai_available = os.environ.get("LLM_OPENAI_API_KEY") is not None azure_openai_available = os.environ.get("LLM_AZURE_OPENAI_API_KEY") is not None # This is a hack to make sure that the openai api is set correctly # Right now it is overkill, but it will be useful when the api gets fixed and we no longer # hardcode the model names (i.e. OpenAI fixes their api) @contextmanager def _openai_client(use_azure_api: bool, is_preference: bool): """Set the openai client to use the correct api type, if available Args: use_azure_api (bool): If true, use the azure api, else use the openai api is_preference (bool): If true, it can fall back to the other api if the preferred one is not available """ if use_azure_api: if azure_openai_available: _use_azure_credentials() elif is_preference and openai_available: _use_openai_credentials() elif is_preference: raise EnvironmentError( "No OpenAI api available, please set LLM_AZURE_OPENAI_API_KEY, LLM_AZURE_OPENAI_API_BASE and " "LLM_AZURE_OPENAI_API_VERSION environment variables or LLM_OPENAI_API_KEY environment variable" ) else: raise EnvironmentError( "Azure OpenAI api not available, please set LLM_AZURE_OPENAI_API_KEY, LLM_AZURE_OPENAI_API_BASE and " "LLM_AZURE_OPENAI_API_VERSION environment variables" ) else: if openai_available: _use_openai_credentials() elif is_preference and azure_openai_available: _use_azure_credentials() elif is_preference: raise EnvironmentError( "No OpenAI api available, please set LLM_OPENAI_API_KEY environment variable or LLM_AZURE_OPENAI_API_KEY, " "LLM_AZURE_OPENAI_API_BASE and LLM_AZURE_OPENAI_API_VERSION environment variables" ) else: raise EnvironmentError( "OpenAI api not available, please set LLM_OPENAI_API_KEY environment variable" ) # API client is setup correctly yield def _get_available_deployments(openai_models: Dict[str, List[str]], model_aliases: Dict[str, str]): available_deployments: Dict[str, Dict[str, Any]] = { "chat_completion": {}, "completion": {}, "fine_tuneing": {}, } if azure_openai_available: with _openai_client(use_azure_api=True, is_preference=False): deployments = openai.Deployment.list().get("data") or [] # type: ignore for deployment in deployments: model_name = deployment.model if model_name in model_aliases: model_name = model_aliases[model_name] if model_name in openai_models["chat_completion"]: available_deployments["chat_completion"][deployment.id] = deployment elif model_name in openai_models["completion"]: available_deployments["completion"][deployment.id] = deployment elif model_name in openai_models["fine_tuneing"]: available_deployments["fine_tuneing"][deployment.id] = deployment return available_deployments def _get_available_models(openai_models: Dict[str, List[str]], available_deployments: Dict[str, Dict[str, Any]]): available_models: Dict[str, BaseLanguageModel] = {} if openai_available: openai_api_key = os.environ["LLM_OPENAI_API_KEY"] for model_name in openai_models["chat_completion"]: available_models[OPENAI_PREFIX + model_name] = ChatOpenAI( model=model_name, openai_api_key=openai_api_key, client="") for model_name in openai_models["completion"]: available_models[OPENAI_PREFIX + model_name] = OpenAI( model=model_name, openai_api_key=openai_api_key, client="") if azure_openai_available: azure_openai_api_key = os.environ["LLM_AZURE_OPENAI_API_KEY"] azure_openai_api_base = os.environ["LLM_AZURE_OPENAI_API_BASE"] azure_openai_api_version = os.environ["LLM_AZURE_OPENAI_API_VERSION"] for model_type, Model in [("chat_completion", AzureChatOpenAI), ("completion", AzureOpenAI)]: for deployment_name, deployment in available_deployments[model_type].items(): available_models[AZURE_OPENAI_PREFIX + deployment_name] = Model( model=deployment.model, deployment_name=deployment_name, openai_api_base=azure_openai_api_base, openai_api_version=azure_openai_api_version, openai_api_key=azure_openai_api_key, client="", ) return available_models _model_aliases = { "gpt-35-turbo": "gpt-3.5-turbo", } # Hardcoded because openai can't provide a trustworthly api to get the list of models and capabilities... openai_models = { "chat_completion": [ "gpt-4", "gpt-4-32k", "gpt-3.5-turbo", "gpt-3.5-turbo-16k" ], "completion": [ "text-davinci-003", "text-curie-001", "text-babbage-001", "text-ada-001", ], "fine_tuneing": [ "davinci", "curie", "babbage", "ada", ] } available_deployments = _get_available_deployments(openai_models, _model_aliases) available_models = _get_available_models(openai_models, available_deployments) logger.info("Available openai models: %s", ", ".join(available_models.keys())) OpenAIModel = Enum('OpenAIModel', {name: name for name in available_models}) # type: ignore default_openai_model = OpenAIModel[os.environ.get("LLM_DEFAULT_MODEL", "gpt-3.5-turbo")] # Long descriptions will be displayed in the playground UI and are copied from the OpenAI docs class OpenAIModelConfig(ModelConfig): """OpenAI LLM configuration.""" model_name: OpenAIModel = Field(default=default_openai_model, # type: ignore description="The name of the model to use.") max_tokens: PositiveInt = Field(1024, description="""\ The maximum number of [tokens](https://platform.openai.com/tokenizer) to generate in the chat completion. The total length of input tokens and generated tokens is limited by the model's context length. \ [Example Python code](https://github.com/openai/openai-cookbook/blob/main/examples/How_to_count_tokens_with_tiktoken.ipynb) for counting tokens.\ """) temperature: float = Field(default=0.0, ge=0, le=2, description="""\ What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, \ while lower values like 0.2 will make it more focused and deterministic. We generally recommend altering this or `top_p` but not both.\ """) top_p: float = Field(default=1, ge=0, le=1, description="""\ An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. \ So 0.1 means only the tokens comprising the top 10% probability mass are considered. We generally recommend altering this or `temperature` but not both.\ """) presence_penalty: float = Field(default=0, ge=-2, le=2, description="""\ Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far, \ increasing the model's likelihood to talk about new topics. [See more information about frequency and presence penalties.](https://platform.openai.com/docs/api-reference/parameter-details)\ """) frequency_penalty: float = Field(default=0, ge=-2, le=2, description="""\ Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, \ decreasing the model's likelihood to repeat the same line verbatim. [See more information about frequency and presence penalties.](https://platform.openai.com/docs/api-reference/parameter-details)\ """) @validator('max_tokens') def max_tokens_must_be_positive(cls, v): """ Validate that max_tokens is a positive integer. """ if v <= 0: raise ValueError('max_tokens must be a positive integer') return v def get_model(self) -> BaseLanguageModel: """Get the model from the configuration. Returns: BaseLanguageModel: The model. """ model = available_models[self.model_name.value] kwargs = model._lc_kwargs secrets = {secret: getattr(model, secret) for secret in model.lc_secrets.keys()} kwargs.update(secrets) model_kwargs = kwargs.get("model_kwargs", {}) for attr, value in self.dict().items(): if attr == "model_name": # Skip model_name continue if hasattr(model, attr): # If the model has the attribute, add it to kwargs kwargs[attr] = value else: # Otherwise, add it to model_kwargs (necessary for chat models) model_kwargs[attr] = value kwargs["model_kwargs"] = model_kwargs # Initialize a copy of the model using the config model = model.__class__(**kwargs) return model
_async_wrap
identifier_name
devicecash_trend.js
/** * Created by wanli on 2015/4/21. */ //页面加载 var OldData=null; $(document.body).ready(function(){ d3.csv("../data/cashtrend.csv", function(csv) { OldData=DataGroupManager(csv,"名称"); Refreshtrend(); }); }); //数据分组处理 function DataGroupManager(_Data,_GroupColumn){ var groupdata=[];//item:{group:"组名称",items:[],cashsum:0} var groupnames=[]; var groupindex=-1; for(var i=0;i<_Data.length;i++){ groupindex=groupnames.indexOf(_Data[i][_GroupColumn]); if(groupindex>-1){ groupdata[groupindex].items.push(_Data[i]); }else{ groupnames.push(_Data[i][_GroupColumn]); groupdata.push({group:_Data[i][_GroupColumn],items:[_Data[i]]}); } } return groupdata; } var fill,w,h,words,max,scale,complete,keyword,tags,fontSize,maxLength,fetcher,statusText=null; var layout,vis,svg,background=null; function Refreshtrend(){ fill = d3.scale.category20b(); w = $("#cloudchart").width(); h = $("#cloudchart").height(); words = [],max,scale = 1,complete = 0, keyword = "", tags, fontSize, maxLength = 30, fetcher, statusText =""; layout = d3.layout.cloud() .timeInterval(10) .size([w, h]) .fontSize(function(d) { return fontSize(+d.value); }) .text(function(d) { return d.key; }) .on("end", draw); svg = d3.select("#cloudchart").append("svg").attr("width", w).attr("height", h); background = svg.append("g"), vis = svg.append("g").attr("transform", "translate(" + [w >> 1, h >> 1] + ")"); tags=[]; for(var i=0;i<OldData.length;i++){ tags.push({key:OldData[i].group,value:Math.abs(parseInt(OldData[i].items[0]["日现金流"]))}); } generate(); DeviceChange(OldData[0].group);//默认显示第一个装置 } var fontfamily="黑体",spiralvalue="archimedean",fontsizecaltype="log",maxlength=1000; function generate() { layout .font(fontfamily) .spiral(spiralvalue); fontSize = d3.scale[fontsizecaltype]().range([10, 18]); if (tags.length){ fontSize.domain([+tags[tags.length - 1].value || 1, +tags[0].value]); } complete = 0; words = []; layout.stop().words(tags.slice(0, max = Math.min(tags.length, +maxlength))).start(); } function draw(data, bounds) { scale = bounds ? Math.min( w / Math.abs(bounds[1].x - w / 2), w / Math.abs(bounds[0].x - w / 2), h / Math.abs(bounds[1].y - h / 2), h / Math.abs(bounds[0].y - h / 2)) / 2 : 1; words = data; var text = vis.selectAll("text") .data(words, function(d) { return d.text.toLowerCase(); }); text.transition() .duration(1000) .attr("transform", function(d) { return "translate(" + [d.x, d.y] + ")rotate(" + d.rotate + ")"; }) .style("font-size", function(d) { return d.size + "px"; }); text.enter().append("text") .attr("text-anchor", "middle") .attr("transform", function(d) { return "translate(" + [d.x, d.y] + ")rotate(" + d.rotate + ")"; }) .style("font-size", "1px") .transition() .duration(1000) .style("font-size", function(d) { return d.size + "px"; }); text.style("font-family", function(d) { return d.font; }) .style("cursor","pointer") .style("fill", function(d) { var tempvalue=getdevicevalue(d.text,"日现金流"); if(tempvalue>0){ return "#0cc7a2"; }else{ return "#d45e58"; } //return fill(d.text.toLowerCase()); }) .text(function(d) { return d.text; }); var exitGroup = background.append("g") .attr("transform", vis.attr("transform")); var exitGroupNode = exitGroup.node(); text.exit().each(function() { exitGroupNode.appendChild(this); }); exitGroup.transition() .duration(1000) .style("opacity", 1e-6) .remove(); vis.transition() .delay(1000) .duration(750) .attr("transform", "translate(" + [w >> 1, h >> 1] + ")scale(" + scale + ")"); vis.selectAll("text").on("click",function(ev){ //ev:{"text":"1#乙二醇-环氧乙烷","font":,"rotate","size","padding","width":,"height":,"xoff":,"yoff":,"x1":,"y1":,"x0":,"y0":,"x":,"y"} DeviceChange(ev.text); }); } // function getdevicevalue(_devicename,_cashcol){ var tempvalue=0; for(var i=0;i<OldData.length;i++){ if(_devicename==OldData[i].group){ for(var j=0;j<OldData[i].items.length;j++){ tempvalue+=OldData[i].items[j][_cashcol]; } break; } } return tempvalue; } //获取装置信息 function getdeviceinfo(_devicename){ var rowdata=null; for(var i=0;i<OldData.length;i++){
name==OldData[i].group){ rowdata=OldData[i].items; break; } } return rowdata; } function DeviceChange(_deviceName){ var _deviceitems=getdeviceinfo(_deviceName); KPICompare(_deviceitems[0]); DrawLineChart(_deviceitems); } //装置环比信息KPI function KPICompare(_deviceinfo){ $("#cashkpi").html(""); var KPIItems=[ {v1:"日现金流",value1:parseInt(_deviceinfo["日现金流"]),v2:"上月日现金流",value2:parseInt(_deviceinfo["上月日现金流"])}, {v1:"累计现金流",value1:parseInt(_deviceinfo["累计现金流"]),v2:"上月累计现金流",value2:parseInt(_deviceinfo["上月累计现金流"])}, {v1:"预计现金流",value1:parseInt(_deviceinfo["预计现金流"]),v2:"上月预计现金流",value2:parseInt(_deviceinfo["上月预计现金流"])} ] $("#cashkpi").append("<div id='kpicompare_item1' class='rowpanel'></div>"); $("#cashkpi").append("<div id='kpicompare_item2' class='rowpanel'></div>"); $("#cashkpi").append("<div id='kpicompare_item3' class='rowpanel'></div>"); for(var i=1;i<=KPIItems.length;i++){ DrawCashBar(KPIItems[i-1].value1,KPIItems[i-1].value2,"kpicompare_item"+i,KPIItems[i-1].v1,KPIItems[i-1].v2); } } function DrawCashBar(_value,_firstvalue,_Panelid,_valuename,_firstname){ var differencevalue=_value-_firstvalue; var sumvalue=0; if(differencevalue>0){ sumvalue=differencevalue+_value; }else{ sumvalue=Math.abs(differencevalue)+_firstvalue; } var Panel=$("#"+_Panelid); var TextPanel=$("<div class='textpanel'></div>"); var BarPanel=$("<div class='Barpanel'></div>"); Panel.append(TextPanel); Panel.append(BarPanel); var steppx=(BarPanel.width()-200)/sumvalue; var leftbarwidth=steppx*Math.abs(_value); var leftpanel=$("<div class='panelbar'></div>"); var lefttext=$("<div>"+_valuename+":</br>"+_value+"万元</div>") BarPanel.append(leftpanel); TextPanel.append(lefttext); var backgroundcolor="#d45e58"; if(differencevalue>=0 && _value>=0){ backgroundcolor="#0cc7a2"; lefttext.css({ "color":backgroundcolor, "border-left":"solid 1px "+backgroundcolor }) }else{ if(_value>=0){ lefttext.css({ "color":backgroundcolor, "border-left":"solid 1px "+backgroundcolor }) }else{ lefttext.css({ "position":"absolute", "color":backgroundcolor, "border-left":"solid 1px "+backgroundcolor, "left":(Panel.offset().left+10)+"px" }) } } leftpanel.css({ "width":leftbarwidth+"px", "height":"20px", "background-color":backgroundcolor }); var centerpanel=$("<div class='centerpanelbar'></div>") var centertext=$("<div class='centertext'>"+_firstname+":</br>"+_firstvalue+"万元</div>") BarPanel.append(centerpanel); TextPanel.append(centertext); centerpanel.css({ "left":(Math.abs(_firstvalue)*steppx)+Panel.offset().left+"px" }) centertext.css({ "left":(Math.abs(_firstvalue)*steppx)+Panel.offset().left+"px" }) var rightpanelleftvalue=0; if(differencevalue>=0 && _value>=0){ rightpanelleftvalue=steppx*Math.abs(_value)+150; }else{ rightpanelleftvalue=steppx*Math.abs(_firstvalue)+150; } var rightbarwidth=steppx*Math.abs(differencevalue); var rightpanel=$("<div class='rightpanelbar'></div>"); var righttext=$("<div>差:</br>"+differencevalue+"万元</div>"); BarPanel.append(rightpanel); TextPanel.append(righttext); backgroundcolor="#d45e58"; if(differencevalue>=0 && _value>=0){ backgroundcolor="#0cc7a2"; righttext.css({ "color":backgroundcolor, "position":"absolute", "left":rightpanelleftvalue+Panel.offset().left+"px", "border-left":"solid 1px "+backgroundcolor }) }else{ righttext.css({ "position":"absolute", "color":backgroundcolor, "position":"absolute", "border-right":"solid 1px "+backgroundcolor, "left":(rightpanelleftvalue+Panel.offset().left+rightbarwidth-righttext.width()-2)+"px" }) } rightpanel.css({ "width":rightbarwidth+"px", "height":"20px", "background-color":backgroundcolor, "left":rightpanelleftvalue+Panel.offset().left+"px" }); } function DrawLineChart(_Deviceitems){ var SeriesData=[{name:"日现金流",data:[],color:"#12a771"},{name:"上月日现金流",color:"#b54f3e",data:[]}]; if(_Deviceitems!=null && _Deviceitems.length>0){ for(var i=0;i<_Deviceitems.length;i++){ for(var j=0;j<SeriesData.length;j++){ SeriesData[j].data.push([i+1+"日",parseInt(_Deviceitems[i][SeriesData[j].name])]); } } } $('#monthchart').highcharts({ chart:{ backgroundColor:"" }, credits:{enabled:false}, xAxis:{ labels:{ style:{ color:"#666666", fontFamily:"微软雅黑" }, formatter:function(){ return this.value+1+"日"; } } }, yAxis:{ labels:{ style:{ color:"#666666" } }, title:{ text:"现金流(万元)", style:{ color:"#666666", fontFamily:"微软雅黑" } } }, title:{ text:"" }, tooltip:{ crosshairs:true, shared:true }, legend:{ backgroundColor:"", borderColor:"#dcdcdc", borderRadius:5, borderWidth:1, itemStyle:{ color:"#666666", fontFamily:"微软雅黑" } }, series: SeriesData }); }
if(_device
identifier_name
devicecash_trend.js
/** * Created by wanli on 2015/4/21. */ //页面加载 var OldData=null; $(document.body).ready(function(){ d3.csv("../data/cashtrend.csv", function(csv) { OldData=DataGroupManager(csv,"名称"); Refreshtrend(); }); }); //数据分组处理 function DataGroupManager(_Data,_GroupColumn){ var groupdata=[];//item:{group:"组名称",items:[],cashsum:0} var groupnames=[]; var groupindex=-1; for(var i=0;i<_Data.length;i++){ groupindex=groupnames.indexOf(_Data[i][_GroupColumn]); if(groupindex>-1){ groupdata[groupindex].items.push(_Data[i]); }else{ groupnames.push(_Data[i][_GroupColumn]); groupdata.push({group:_Data[i][_GroupColumn],items:[_Data[i]]}); } } return groupdata; } var fill,w,h,words,max,scale,complete,keyword,tags,fontSize,maxLength,fetcher,statusText=null; var layout,vis,svg,background=null; function Refreshtrend(){ fill = d3.scale.category20b(); w = $("#cloudchart").width(); h = $("#cloudchart").height(); words = [],max,scale = 1,complete = 0, keyword = "", tags, fontSize, maxLength = 30, fetcher, statusText =""; layout = d3.layout.cloud() .timeInterval(10) .size([w, h]) .fontSize(function(d) { return fontSize(+d.value); }) .text(function(d) { return d.key; }) .on("end", draw); svg = d3.select("#cloudchart").append("svg").attr("width", w).attr("height", h); background = svg.append("g"), vis = svg.append("g").attr("transform", "translate(" + [w >> 1, h >> 1] + ")"); tags=[]; for(var i=0;i<OldData.length;i++){ tags.push({key:OldData[i].group,value:Math.abs(parseInt(OldData[i].items[0]["日现金流"]))}); } generate(); DeviceChange(OldData[0].group);//默认显示第一个装置 } var fontfamily="黑体",spiralvalue="archimedean",fontsizecaltype="log",maxlength=1000; function generate() { layout .font(fontfamily) .spiral(spiralvalue); fontSize = d3.scale[fontsizecaltype]().range([10, 18]); if (tags.length){ fontSize.domain([+tags[tags.length - 1].value || 1, +tags[0].value]); } complete = 0; words = []; layout.stop().words(tags.slice(0, max = Math.min(tags.length, +maxlength))).start(); } function draw(data, bounds) { scale = bounds ? Math.min( w / Math.abs(bounds[1].x - w / 2), w / Math.abs(bounds[0].x - w / 2), h / Math.abs(bounds[1].y - h / 2), h / Math.abs(bounds[0].y - h / 2)) / 2 : 1; words = data; var text = vis.selectAll("text") .data(words, function(d) { return d.text.toLowerCase(); }); text.transition() .duration(1000) .attr("transform", function(d) { return "translate(" + [d.x, d.y] + ")rotate(" + d.rotate + ")"; }) .style("font-size", function(d) { return d.size + "px"; }); text.enter().append("text") .attr("text-anchor", "middle") .attr("transform", function(d) { return "translate(" + [d.x, d.y] + ")rotate(" + d.rotate + ")"; }) .style("font-size", "1px") .transition() .duration(1000) .style("font-size", function(d) { return d.size + "px"; }); text.style("font-family", function(d) { return d.font; }) .style("cursor","pointer") .style("fill", function(d) { var tempvalue=getdevicevalue(d.text,"日现金流"); if(tempvalue>0){ return "#0cc7a2"; }else{ return "#d45e58"; } //return fill(d.text.toLowerCase()); }) .text(function(d) { return d.text; }); var exitGroup = background.append("g") .attr("transform", vis.attr("transform")); var exitGroupNode = exitGroup.node(); text.exit().each(function() { exitGroupNode.appendChild(this); }); exitGroup.transition() .duration(1000) .style("opacity", 1e-6) .remove(); vis.transition() .delay(1000) .duration(750) .attr("transform", "translate(" + [w >> 1, h >> 1] + ")scale(" + scale + ")"); vis.selectAll("text").on("click",function(ev){ //ev:{"text":"1#乙二醇-环氧乙烷","font":,"rotate","size","padding","width":,"height":,"xoff":,"yoff":,"x1":,"y1":,"x0":,"y0":,"x":,"y"} DeviceChange(ev.text); }); } // function getdevicevalue(_devicename,_cashcol){ var tempvalue=0; for(var i=0;i<OldData.length;i++){ if(_devicename==OldData[i].group){ for(var j=0;j<OldData[i].items.length;j++){ tempvalue+=OldData[i].items[j][_cashcol]; } break; } } return tempvalue; } //获取装置信息 function getdeviceinfo(_devicename){ var rowdata=null; for(var i=0;i<OldData.length;i++){ if(_devicename==OldData[i].group){ rowdata=OldData[i].items; break; } } return rowdata; } function DeviceChange(_deviceName){ var _deviceitems=getdeviceinfo(_deviceName); KPICompare(_deviceitems[0]); DrawLineChart(_deviceitems); } //装置环比信息KPI function KPICompare(_deviceinfo){ $("#cashkpi").html(""); var KPIItems=[ {v1:"日现金流",value1:parseInt(_deviceinfo["日现金流"]),v2:"上月日现金流",value2:parseInt(_deviceinfo["上月日现金流"])}, {v1:"累计现金流",value1:parseInt(_deviceinfo["累计现金流"]),v2:"上月累计现金流",value2:parseInt(_deviceinfo["上月累计现金流"])}, {v1:"预计现金流",value1:parseInt(_deviceinfo["预计现金流"]),v2:"上月预计现金流",value2:parseInt(_deviceinfo["上月预计现金流"])} ] $("#cashkpi").append("<div id='kpicompare_item1' class='rowpanel'></div>"); $("#cashkpi").append("<div id='kpicompare_item2' class='rowpanel'></div>"); $("#cashkpi").append("<div id='kpicompare_item3' class='rowpanel'></div>"); for(var i=1;i<=KPIItems.length;i++){ DrawCashBar(KPIItems[i-1].value1,KPIItems[i-1].value2,"kpicompare_item"+i,KPIItems[i-1].v1,KPIItems[i-1].v2); } } function DrawCashBar(_value,_firstvalue,_Panelid,_valuename,_firstname){ var differencevalue=_value-_firstvalue; var sumvalue=0; if(differencevalue>0){ sumvalue=differencevalue+_value; }else{ sumvalue=Math.abs(differencevalue)+_firstvalue; } var Panel=$("#"+_Panelid); var TextPanel=$("<div class='textpanel'></div>"); var BarPanel=$("<div class='Barpanel'></div>"); Panel.append(TextPanel); Panel.append(BarPanel); var steppx=(BarPanel.width()-200)/sumvalue; var leftbarwidth=steppx*Math.abs(_value); var leftpanel=$("<div class='panelbar'></div>"); var lefttext=$("<div>"+_valuename+":</br>"+_value+"万元</div>") BarPanel.append(leftpanel); TextPanel.append(lefttext); var backgroundcolor="#d45e58"; if(differencevalue>=0 && _value>=0){ backgroundcolor="#0cc7a2"; lefttext.css({ "color":backgroundcolor, "border-left":"solid 1px "+backgroundcolor }) }else{ if(_value>=0){ lefttext.css({ "color":backgroundcolor, "border-left":"solid 1px "+backgroundcolor }) }else{ lefttext.css({ "position":"absolute", "color":backgroundcolor, "border-left":"solid 1px "+backgroundcolor, "left":(Panel.offset().left+10)+"px" }) } } leftpanel.css({ "width":leftbarwidth+"px", "height":"20px", "background-color":backgroundcolor }); var centerpanel=$("<div class='centerpanelbar'></div>") var centertext=$("<div class='centertext'>"+_firstname+":</br>"+_firstvalue+"万元</div>") BarPanel.append(centerpanel); TextPanel.append(centertext); centerpanel.css({ "left":(Math.abs(_firstvalue)*steppx)+Panel.offset().left+"px" }) centertext.css({ "left":(Math.abs(_firstvalue)*steppx)+Panel.offset().left+"px" }) var rightpanelleftvalue=0; if(differencevalue>=0 && _value>=0){ rightpanelleftvalue=steppx*Math.abs(_value)+150; }else{ rightpanelleftvalue=steppx*Math.abs(_firstvalue)+150; } var rightbarwidth=steppx*Math.abs(differencevalue); var rightpanel=$("<div class='rightpanelbar'></div>"); var righttext=$("<div>差:</br>"+differencevalue+"万元</div>"); BarPanel.append(rightpanel); TextPanel.append(righttext); backgroundcolor="#d45e58"; if(differencevalue>=0 && _value>=0){ backgroundcolor="#0cc7a2"; righttext.css({ "color":backgroundcolor, "position":"absolute", "left":rightpanelleftvalue+Panel.offset().left+"px", "border-left":"solid 1px "+backgroundcolor }) }else{ righttext.css({ "position":"absolute", "color":backgroundcolor, "position":"absolute", "border-right":"solid 1px "+backgroundcolor, "left":(rightpanelleftvalue+Panel.offset().left+rightbarwidth-righttext.width()-2)+"px" }) } rightpanel.css({ "width":rightbarwidth+"px", "height":"20px", "background-color":backgroundcolor, "left":rightpanelleftvalue+Panel.offset().left+"px" }); } function DrawLineChart(_Deviceitems){ var SeriesData=[{name:"日现金流",data:[],color:"#12a771"},{name:"上月日现金流",color:"#b54f3e",data:[]}]; if(_Deviceitems!=null && _Deviceitems.length>0){ for(var i=0;i<_Deviceitems.length;i++){ for(var j=0;j<SeriesData.length;j++){ SeriesData[j].data.push([i+1+"日",parseInt(_Deviceitems[i][SeriesData[j].name])]); } } } $('#monthchart').highcharts({ chart:{ backgroundColor:""
style:{ color:"#666666", fontFamily:"微软雅黑" }, formatter:function(){ return this.value+1+"日"; } } }, yAxis:{ labels:{ style:{ color:"#666666" } }, title:{ text:"现金流(万元)", style:{ color:"#666666", fontFamily:"微软雅黑" } } }, title:{ text:"" }, tooltip:{ crosshairs:true, shared:true }, legend:{ backgroundColor:"", borderColor:"#dcdcdc", borderRadius:5, borderWidth:1, itemStyle:{ color:"#666666", fontFamily:"微软雅黑" } }, series: SeriesData }); }
}, credits:{enabled:false}, xAxis:{ labels:{
random_line_split
devicecash_trend.js
/** * Created by wanli on 2015/4/21. */ //页面加载 var OldData=null; $(document.body).ready(function(){ d3.csv("../data/cashtrend.csv", function(csv) { OldData=DataGroupManager(csv,"名称"); Refreshtrend(); }); }); //数据分组处理 function DataGroupManager(_Data,_GroupColumn){ var groupdata=[];//item:{group:"组名称",items:[],cashsum:0} var groupnames=[]; var groupindex=-1; for(var i=0;i<_Data.length;i++){ groupindex=groupnames.indexOf(_Data[i][_GroupColumn]); if(groupindex>-1){ groupdata[groupindex].items.push(_Data[i]); }else{ groupnames.push(_Data[i][_GroupColumn]); groupdata.push({group:_Data[i][_GroupColumn],items:[_Data[i]]}); } } return groupdata; } var fill,w,h,words,max,scale,complete,keyword,tags,fontSize,maxLength,fetcher,statusText=null; var layout,vis,svg,background=null; function Refreshtrend(){ fill = d3.scale.category20b(); w = $("#cloudchart").width(); h = $("#cloudchart").height(); words = [],max,scale = 1,complete = 0, keyword = "", tags, fontSize, maxLength = 30, fetcher, statusText =""; layout = d3.layout.cloud() .timeInterval(10) .size([w, h]) .fontSize(function(d) { return fontSize(+d.value); }) .text(function(d) { return d.key; }) .on("end", draw); svg = d3.select("#cloudchart").append("svg").attr("width", w).attr("height", h); background = svg.append("g"), vis = svg.append("g").attr("transform", "translate(" + [w >> 1, h >> 1] + ")"); tags=[]; for(var i=0;i<OldData.length;i++){ tags.push({key:OldData[i].group,value:Math.abs(parseInt(OldData[i].items[0]["日现金流"]))}); } generate(); DeviceChange(OldData[0].group);//默认显示第一个装置 } var fontfamily="黑体",spiralvalue="archimedean",fontsizecaltype="log",maxlength=1000; function generate() { layout .font(fontfamily) .spiral(spiralvalue); fontSize = d3.scale[fontsizecaltype]().range([10, 18]); if (tags.length){ fontSize.domain([+tags[tags.length - 1].value || 1, +tags[0].value]); } complete = 0; words = []; layout.stop().words(tags.slice(0, max = Math.min(tags.length, +maxlength))).start(); } function draw(data, bounds) { scale = bounds ? Math.min( w / Math.abs(bounds[1].x - w / 2), w / Math.abs(bounds[0].x - w / 2), h / Math.abs(bounds[1].y - h / 2), h / Math.abs(bounds[0].y - h / 2)) / 2 : 1; words = data; var text = vis.selectAll("text") .data(words, function(d) { return d.text.toLowerCase(); }); text.transition() .duration(1000) .attr("transform", function(d) { return "translate(" + [d.x, d.y] + ")rotate(" + d.rotate + ")"; }) .style("font-size", function(d) { return d.size + "px"; }); text.enter().append("text") .attr("text-anchor", "middle") .attr("transform", function(d) { return "translate(" + [d.x, d.y] + ")rotate(" + d.rotate + ")"; }) .style("font-size", "1px") .transition() .duration(1000) .style("font-size", function(d) { return d.size + "px"; }); text.style("font-family", function(d) { return d.font; }) .style("cursor","pointer") .style("fill", function(d) { var tempvalue=getdevicevalue(d.text,"日现金流"); if(tempvalue>0){ return "#0cc7a2"; }else{ return "#d45e58"; } //return fill(d.text.toLowerCase()); }) .text(function(d) { return d.text; }); var exitGroup = background.append("g") .attr("transform", vis.attr("transform")); var exitGroupNode = exitGroup.node(); text.exit().each(function() { exitGroupNode.appendChild(this); }); exitGroup.transition() .duration(1000) .style("opacity", 1e-6) .remove(); vis.transition() .delay(1000) .duration(750) .attr("transform", "translate(" + [w >> 1, h >> 1] + ")scale(" + scale + ")"); vis.selectAll("text").on("click",function(ev){ //ev:{"text":"1#乙二醇-环氧乙烷","font":,"rotate","size","padding","width":,"height":,"xoff":,"yoff":,"x1":,"y1":,"x0":,"y0":,"x":,"y"} DeviceChange(ev.text); }); } // function getdevicevalue(_devicename,_cashcol){ var tempvalue=0; for(var i=0;i<OldData.length;i++){ if(_devicena
=0;i<OldData.length;i++){ if(_devicename==OldData[i].group){ rowdata=OldData[i].items; break; } } return rowdata; } function DeviceChange(_deviceName){ var _deviceitems=getdeviceinfo(_deviceName); KPICompare(_deviceitems[0]); DrawLineChart(_deviceitems); } //装置环比信息KPI function KPICompare(_deviceinfo){ $("#cashkpi").html(""); var KPIItems=[ {v1:"日现金流",value1:parseInt(_deviceinfo["日现金流"]),v2:"上月日现金流",value2:parseInt(_deviceinfo["上月日现金流"])}, {v1:"累计现金流",value1:parseInt(_deviceinfo["累计现金流"]),v2:"上月累计现金流",value2:parseInt(_deviceinfo["上月累计现金流"])}, {v1:"预计现金流",value1:parseInt(_deviceinfo["预计现金流"]),v2:"上月预计现金流",value2:parseInt(_deviceinfo["上月预计现金流"])} ] $("#cashkpi").append("<div id='kpicompare_item1' class='rowpanel'></div>"); $("#cashkpi").append("<div id='kpicompare_item2' class='rowpanel'></div>"); $("#cashkpi").append("<div id='kpicompare_item3' class='rowpanel'></div>"); for(var i=1;i<=KPIItems.length;i++){ DrawCashBar(KPIItems[i-1].value1,KPIItems[i-1].value2,"kpicompare_item"+i,KPIItems[i-1].v1,KPIItems[i-1].v2); } } function DrawCashBar(_value,_firstvalue,_Panelid,_valuename,_firstname){ var differencevalue=_value-_firstvalue; var sumvalue=0; if(differencevalue>0){ sumvalue=differencevalue+_value; }else{ sumvalue=Math.abs(differencevalue)+_firstvalue; } var Panel=$("#"+_Panelid); var TextPanel=$("<div class='textpanel'></div>"); var BarPanel=$("<div class='Barpanel'></div>"); Panel.append(TextPanel); Panel.append(BarPanel); var steppx=(BarPanel.width()-200)/sumvalue; var leftbarwidth=steppx*Math.abs(_value); var leftpanel=$("<div class='panelbar'></div>"); var lefttext=$("<div>"+_valuename+":</br>"+_value+"万元</div>") BarPanel.append(leftpanel); TextPanel.append(lefttext); var backgroundcolor="#d45e58"; if(differencevalue>=0 && _value>=0){ backgroundcolor="#0cc7a2"; lefttext.css({ "color":backgroundcolor, "border-left":"solid 1px "+backgroundcolor }) }else{ if(_value>=0){ lefttext.css({ "color":backgroundcolor, "border-left":"solid 1px "+backgroundcolor }) }else{ lefttext.css({ "position":"absolute", "color":backgroundcolor, "border-left":"solid 1px "+backgroundcolor, "left":(Panel.offset().left+10)+"px" }) } } leftpanel.css({ "width":leftbarwidth+"px", "height":"20px", "background-color":backgroundcolor }); var centerpanel=$("<div class='centerpanelbar'></div>") var centertext=$("<div class='centertext'>"+_firstname+":</br>"+_firstvalue+"万元</div>") BarPanel.append(centerpanel); TextPanel.append(centertext); centerpanel.css({ "left":(Math.abs(_firstvalue)*steppx)+Panel.offset().left+"px" }) centertext.css({ "left":(Math.abs(_firstvalue)*steppx)+Panel.offset().left+"px" }) var rightpanelleftvalue=0; if(differencevalue>=0 && _value>=0){ rightpanelleftvalue=steppx*Math.abs(_value)+150; }else{ rightpanelleftvalue=steppx*Math.abs(_firstvalue)+150; } var rightbarwidth=steppx*Math.abs(differencevalue); var rightpanel=$("<div class='rightpanelbar'></div>"); var righttext=$("<div>差:</br>"+differencevalue+"万元</div>"); BarPanel.append(rightpanel); TextPanel.append(righttext); backgroundcolor="#d45e58"; if(differencevalue>=0 && _value>=0){ backgroundcolor="#0cc7a2"; righttext.css({ "color":backgroundcolor, "position":"absolute", "left":rightpanelleftvalue+Panel.offset().left+"px", "border-left":"solid 1px "+backgroundcolor }) }else{ righttext.css({ "position":"absolute", "color":backgroundcolor, "position":"absolute", "border-right":"solid 1px "+backgroundcolor, "left":(rightpanelleftvalue+Panel.offset().left+rightbarwidth-righttext.width()-2)+"px" }) } rightpanel.css({ "width":rightbarwidth+"px", "height":"20px", "background-color":backgroundcolor, "left":rightpanelleftvalue+Panel.offset().left+"px" }); } function DrawLineChart(_Deviceitems){ var SeriesData=[{name:"日现金流",data:[],color:"#12a771"},{name:"上月日现金流",color:"#b54f3e",data:[]}]; if(_Deviceitems!=null && _Deviceitems.length>0){ for(var i=0;i<_Deviceitems.length;i++){ for(var j=0;j<SeriesData.length;j++){ SeriesData[j].data.push([i+1+"日",parseInt(_Deviceitems[i][SeriesData[j].name])]); } } } $('#monthchart').highcharts({ chart:{ backgroundColor:"" }, credits:{enabled:false}, xAxis:{ labels:{ style:{ color:"#666666", fontFamily:"微软雅黑" }, formatter:function(){ return this.value+1+"日"; } } }, yAxis:{ labels:{ style:{ color:"#666666" } }, title:{ text:"现金流(万元)", style:{ color:"#666666", fontFamily:"微软雅黑" } } }, title:{ text:"" }, tooltip:{ crosshairs:true, shared:true }, legend:{ backgroundColor:"", borderColor:"#dcdcdc", borderRadius:5, borderWidth:1, itemStyle:{ color:"#666666", fontFamily:"微软雅黑" } }, series: SeriesData }); }
me==OldData[i].group){ for(var j=0;j<OldData[i].items.length;j++){ tempvalue+=OldData[i].items[j][_cashcol]; } break; } } return tempvalue; } //获取装置信息 function getdeviceinfo(_devicename){ var rowdata=null; for(var i
identifier_body
devicecash_trend.js
/** * Created by wanli on 2015/4/21. */ //页面加载 var OldData=null; $(document.body).ready(function(){ d3.csv("../data/cashtrend.csv", function(csv) { OldData=DataGroupManager(csv,"名称"); Refreshtrend(); }); }); //数据分组处理 function DataGroupManager(_Data,_GroupColumn){ var groupdata=[];//item:{group:"组名称",items:[],cashsum:0} var groupnames=[]; var groupindex=-1; for(var i=0;i<_Data.length;i++){ groupindex=groupnames.indexOf(_Data[i][_GroupColumn]); if(groupindex>-1){ groupdata[groupindex].items.push(_Data[i]); }else{ groupnames.push(_Data[i][_GroupColumn]); groupdata.push({group:_Data[i][_GroupColumn],items:[_Data[i]]}); } } return groupdata; } var fill,w,h,words,max,scale,complete,keyword,tags,fontSize,maxLength,fetcher,statusText=null; var layout,vis,svg,background=null; function Refreshtrend(){ fill = d3.scale.category20b(); w = $("#cloudchart").width(); h = $("#cloudchart").height(); words = [],max,scale = 1,complete = 0, keyword = "", tags, fontSize, maxLength = 30, fetcher, statusText =""; layout = d3.layout.cloud() .timeInterval(10) .size([w, h]) .fontSize(function(d) { return fontSize(+d.value); }) .text(function(d) { return d.key; }) .on("end", draw); svg = d3.select("#cloudchart").append("svg").attr("width", w).attr("height", h); background = svg.append("g"), vis = svg.append("g").attr("transform", "translate(" + [w >> 1, h >> 1] + ")"); tags=[]; for(var i=0;i<OldData.length;i++){ tags.push({key:OldData[i].group,value:Math.abs(parseInt(OldData[i].items[0]["日现金流"]))}); } generate(); DeviceChange(OldData[0].group);//默认显示第一个装置 } var fontfamily="黑体",spiralvalue="archimedean",fontsizecaltype="log",maxlength=1000; function generate() { layout .font(fontfamily) .spiral(spiralvalue); fontSize = d3.scale[fontsizecaltype]().range([10, 18]); if (tags.length){ fontSize.domain([+tags[tags.length - 1].value || 1, +tags[0].value]); } complete = 0; words = []; layout.stop().words(tags.slice(0, max = Math.min(tags.length, +maxlength))).start(); } function draw(data, bounds) { scale = bounds ? Math.min( w / Math.abs(bounds[1].x - w / 2), w / Math.abs(bounds[0].x - w / 2), h / Math.abs(bounds[1].y - h / 2), h / Math.abs(bounds[0].y - h / 2)) / 2 : 1; words = data; var text = vis.selectAll("text") .data(words, function(d) { return d.text.toLowerCase(); }); text.transition() .duration(1000) .attr("transform", function(d) { return "translate(" + [d.x, d.y] + ")rotate(" + d.rotate + ")"; }) .style("font-size", function(d) { return d.size + "px"; }); text.enter().append("text") .attr("text-anchor", "middle") .attr("transform", function(d) { return "translate(" + [d.x, d.y] + ")rotate(" + d.rotate + ")"; }) .style("font-size", "1px") .transition() .duration(1000) .style("font-size", function(d) { return d.size + "px"; }); text.style("font-family", function(d) { return d.font; }) .style("cursor","pointer") .style("fill", function(d) { var tempvalue=getdevicevalue(d.text,"日现金流"); if(tempvalue>0){ return "#0cc7a2"; }else{ return "#d45e58"; } //return fill(d.text.toLowerCase()); }) .text(function(d) { return d.text; }); var exitGroup = background.append("g") .attr("transform", vis.attr("transform")); var exitGroupNode = exitGroup.node(); text.exit().each(function() { exitGroupNode.appendChild(this); }); exitGroup.transition() .duration(1000) .style("opacity", 1e-6) .remove(); vis.transition() .delay(1000) .duration(750) .attr("transform", "translate(" + [w >> 1, h >> 1] + ")scale(" + scale + ")"); vis.selectAll("text").on("click",function(ev){ //ev:{"text":"1#乙二醇-环氧乙烷","font":,"rotate","size","padding","width":,"height":,"xoff":,"yoff":,"x1":,"y1":,"x0":,"y0":,"x":,"y"} DeviceChange(ev.text); }); } // function getdevicevalue(_devicename,_cashcol){ var tempvalue=0; for(var i=0;i<OldData.length;i++){ if(_devicename==OldData[i].group){ for(var j=0;j<OldData[i].items.length;j++){ tempvalu
ar rowdata=null; for(var i=0;i<OldData.length;i++){ if(_devicename==OldData[i].group){ rowdata=OldData[i].items; break; } } return rowdata; } function DeviceChange(_deviceName){ var _deviceitems=getdeviceinfo(_deviceName); KPICompare(_deviceitems[0]); DrawLineChart(_deviceitems); } //装置环比信息KPI function KPICompare(_deviceinfo){ $("#cashkpi").html(""); var KPIItems=[ {v1:"日现金流",value1:parseInt(_deviceinfo["日现金流"]),v2:"上月日现金流",value2:parseInt(_deviceinfo["上月日现金流"])}, {v1:"累计现金流",value1:parseInt(_deviceinfo["累计现金流"]),v2:"上月累计现金流",value2:parseInt(_deviceinfo["上月累计现金流"])}, {v1:"预计现金流",value1:parseInt(_deviceinfo["预计现金流"]),v2:"上月预计现金流",value2:parseInt(_deviceinfo["上月预计现金流"])} ] $("#cashkpi").append("<div id='kpicompare_item1' class='rowpanel'></div>"); $("#cashkpi").append("<div id='kpicompare_item2' class='rowpanel'></div>"); $("#cashkpi").append("<div id='kpicompare_item3' class='rowpanel'></div>"); for(var i=1;i<=KPIItems.length;i++){ DrawCashBar(KPIItems[i-1].value1,KPIItems[i-1].value2,"kpicompare_item"+i,KPIItems[i-1].v1,KPIItems[i-1].v2); } } function DrawCashBar(_value,_firstvalue,_Panelid,_valuename,_firstname){ var differencevalue=_value-_firstvalue; var sumvalue=0; if(differencevalue>0){ sumvalue=differencevalue+_value; }else{ sumvalue=Math.abs(differencevalue)+_firstvalue; } var Panel=$("#"+_Panelid); var TextPanel=$("<div class='textpanel'></div>"); var BarPanel=$("<div class='Barpanel'></div>"); Panel.append(TextPanel); Panel.append(BarPanel); var steppx=(BarPanel.width()-200)/sumvalue; var leftbarwidth=steppx*Math.abs(_value); var leftpanel=$("<div class='panelbar'></div>"); var lefttext=$("<div>"+_valuename+":</br>"+_value+"万元</div>") BarPanel.append(leftpanel); TextPanel.append(lefttext); var backgroundcolor="#d45e58"; if(differencevalue>=0 && _value>=0){ backgroundcolor="#0cc7a2"; lefttext.css({ "color":backgroundcolor, "border-left":"solid 1px "+backgroundcolor }) }else{ if(_value>=0){ lefttext.css({ "color":backgroundcolor, "border-left":"solid 1px "+backgroundcolor }) }else{ lefttext.css({ "position":"absolute", "color":backgroundcolor, "border-left":"solid 1px "+backgroundcolor, "left":(Panel.offset().left+10)+"px" }) } } leftpanel.css({ "width":leftbarwidth+"px", "height":"20px", "background-color":backgroundcolor }); var centerpanel=$("<div class='centerpanelbar'></div>") var centertext=$("<div class='centertext'>"+_firstname+":</br>"+_firstvalue+"万元</div>") BarPanel.append(centerpanel); TextPanel.append(centertext); centerpanel.css({ "left":(Math.abs(_firstvalue)*steppx)+Panel.offset().left+"px" }) centertext.css({ "left":(Math.abs(_firstvalue)*steppx)+Panel.offset().left+"px" }) var rightpanelleftvalue=0; if(differencevalue>=0 && _value>=0){ rightpanelleftvalue=steppx*Math.abs(_value)+150; }else{ rightpanelleftvalue=steppx*Math.abs(_firstvalue)+150; } var rightbarwidth=steppx*Math.abs(differencevalue); var rightpanel=$("<div class='rightpanelbar'></div>"); var righttext=$("<div>差:</br>"+differencevalue+"万元</div>"); BarPanel.append(rightpanel); TextPanel.append(righttext); backgroundcolor="#d45e58"; if(differencevalue>=0 && _value>=0){ backgroundcolor="#0cc7a2"; righttext.css({ "color":backgroundcolor, "position":"absolute", "left":rightpanelleftvalue+Panel.offset().left+"px", "border-left":"solid 1px "+backgroundcolor }) }else{ righttext.css({ "position":"absolute", "color":backgroundcolor, "position":"absolute", "border-right":"solid 1px "+backgroundcolor, "left":(rightpanelleftvalue+Panel.offset().left+rightbarwidth-righttext.width()-2)+"px" }) } rightpanel.css({ "width":rightbarwidth+"px", "height":"20px", "background-color":backgroundcolor, "left":rightpanelleftvalue+Panel.offset().left+"px" }); } function DrawLineChart(_Deviceitems){ var SeriesData=[{name:"日现金流",data:[],color:"#12a771"},{name:"上月日现金流",color:"#b54f3e",data:[]}]; if(_Deviceitems!=null && _Deviceitems.length>0){ for(var i=0;i<_Deviceitems.length;i++){ for(var j=0;j<SeriesData.length;j++){ SeriesData[j].data.push([i+1+"日",parseInt(_Deviceitems[i][SeriesData[j].name])]); } } } $('#monthchart').highcharts({ chart:{ backgroundColor:"" }, credits:{enabled:false}, xAxis:{ labels:{ style:{ color:"#666666", fontFamily:"微软雅黑" }, formatter:function(){ return this.value+1+"日"; } } }, yAxis:{ labels:{ style:{ color:"#666666" } }, title:{ text:"现金流(万元)", style:{ color:"#666666", fontFamily:"微软雅黑" } } }, title:{ text:"" }, tooltip:{ crosshairs:true, shared:true }, legend:{ backgroundColor:"", borderColor:"#dcdcdc", borderRadius:5, borderWidth:1, itemStyle:{ color:"#666666", fontFamily:"微软雅黑" } }, series: SeriesData }); }
e+=OldData[i].items[j][_cashcol]; } break; } } return tempvalue; } //获取装置信息 function getdeviceinfo(_devicename){ v
conditional_block
train_models.go
// Workflow written in SciPipe. // For more information about SciPipe, see: http://scipipe.org package main import ( "flag" "fmt" "runtime" "strconv" str "strings" sp "github.com/scipipe/scipipe" spc "github.com/scipipe/scipipe/components" ) var ( maxTasks = flag.Int("maxtasks", 4, "Max number of local cores to use") threads = flag.Int("threads", 1, "Number of threads that Go is allowed to start") geneSet = flag.String("geneset", "smallest1", "Gene set to use (one of smallest1, smallest3, smallest4, bowes44)") runSlurm = flag.Bool("slurm", false, "Start computationally heavy jobs via SLURM") debug = flag.Bool("debug", false, "Increase logging level to include DEBUG messages") cpSignPath = "../../bin/cpsign-0.6.3.jar" geneSets = map[string][]string{ "bowes44": []string{ // Not available in dataset: "CHRNA1". // Not available in dataset: "KCNE1" // Instead we use MinK1 as they both share the same alias // "MinK", and also confirmed by Wes to be the same. "ADORA2A", "ADRA1A", "ADRA2A", "ADRB1", "ADRB2", "CNR1", "CNR2", "CCKAR", "DRD1", "DRD2", "EDNRA", "HRH1", "HRH2", "OPRD1", "OPRK1", "OPRM1", "CHRM1", "CHRM2", "CHRM3", "HTR1A", "HTR1B", "HTR2A", "HTR2B", "AVPR1A", "CHRNA4", "CACNA1C", "GABRA1", "KCNH2", "KCNQ1", "MINK1", "GRIN1", "HTR3A", "SCN5A", "ACHE", "PTGS1", "PTGS2", "MAOA", "PDE3A", "PDE4D", "LCK", "SLC6A3", "SLC6A2", "SLC6A4", "AR", "NR3C1", }, "bowes44min100percls": []string{ "PDE3A", "SCN5A", "CCKAR", "ADRB1", "PTGS1", "CHRM3", "CHRM2", "EDNRA", "MAOA", "LCK", "PTGS2", "SLC6A2", "ACHE", "CNR2", "CNR1", "ADORA2A", "OPRD1", "NR3C1", "AR", "SLC6A4", "OPRM1", "HTR1A", "SLC6A3", "OPRK1", "AVPR1A", "ADRB2", "DRD2", "KCNH2", "DRD1", "HTR2A", "CHRM1", }, "smallest1": []string{ "PDE3A", }, "smallest3": []string{ "PDE3A", "SCN5A", "CCKAR", }, "smallest4": []string{ "PDE3A", "SCN5A", "CCKAR", "ADRB1", }, } costVals = []string{ "1", "10", "100", } gammaVals = []string{ "0.1", "0.01", "0.001", } replicates = []string{ "r1", "r2", "r3", } ) func main()
// -------------------------------------------------------------------------------- // JSON types // -------------------------------------------------------------------------------- // JSON output of cpSign crossvalidate // { // "classConfidence": 0.855, // "observedFuzziness": { // "A": 0.253, // "N": 0.207, // "overall": 0.231 // }, // "validity": 0.917, // "efficiency": 0.333, // "classCredibility": 0.631 // } // -------------------------------------------------------------------------------- type cpSignCrossValOutput struct { ClassConfidence float64 `json:"classConfidence"` ObservedFuzziness cpSignObservedFuzziness `json:"observedFuzziness"` Validity float64 `json:"validity"` Efficiency float64 `json:"efficiency"` ClassCredibility float64 `json:"classCredibility"` } type cpSignObservedFuzziness struct { Active float64 `json:"A"` Nonactive float64 `json:"N"` Overall float64 `json:"overall"` }
{ // -------------------------------- // Parse flags and stuff // -------------------------------- flag.Parse() if *debug { sp.InitLogDebug() } else { sp.InitLogAudit() } if len(geneSets[*geneSet]) == 0 { names := []string{} for n, _ := range geneSets { names = append(names, n) } sp.Error.Fatalf("Incorrect gene set %s specified! Only allowed values are: %s\n", *geneSet, str.Join(names, ", ")) } runtime.GOMAXPROCS(*threads) // -------------------------------- // Show startup messages // -------------------------------- sp.Info.Printf("Using max %d OS threads to schedule max %d tasks\n", *threads, *maxTasks) sp.Info.Printf("Starting workflow for %s geneset\n", *geneSet) // -------------------------------- // Initialize processes and add to runner // -------------------------------- wf := sp.NewWorkflow("train_models", *maxTasks) dbFileName := "pubchem.chembl.dataset4publication_inchi_smiles.tsv.xz" dlExcapeDB := wf.NewProc("dlDB", fmt.Sprintf("wget https://zenodo.org/record/173258/files/%s -O {o:excapexz}", dbFileName)) dlExcapeDB.SetPathStatic("excapexz", "../../raw/"+dbFileName) unPackDB := wf.NewProc("unPackDB", "xzcat {i:xzfile} > {o:unxzed}") unPackDB.SetPathReplace("xzfile", "unxzed", ".xz", "") unPackDB.In("xzfile").Connect(dlExcapeDB.Out("excapexz")) //unPackDB.Prepend = "salloc -A snic2017-7-89 -n 2 -t 8:00:00 -J unpack_excapedb" finalModelsSummary := NewFinalModelSummarizer(wf, "finalmodels_summary_creator", "res/final_models_summary.tsv", '\t') // -------------------------------- // Set up gene-specific workflow branches // -------------------------------- for _, gene := range geneSets[*geneSet] { geneLC := str.ToLower(gene) uniq_gene := geneLC // -------------------------------------------------------------------------------- // Extract target data step // -------------------------------------------------------------------------------- extractTargetData := wf.NewProc("extract_target_data_"+uniq_gene, `awk -F"\t" '$9 == "{p:gene}" { print $12"\t"$4 }' {i:raw_data} > {o:target_data}`) extractTargetData.ParamPort("gene").ConnectStr(gene) extractTargetData.SetPathStatic("target_data", fmt.Sprintf("dat/%s/%s.tsv", geneLC, geneLC)) extractTargetData.In("raw_data").Connect(unPackDB.Out("unxzed")) if *runSlurm { extractTargetData.Prepend = "salloc -A snic2017-7-89 -n 4 -c 4 -t 1:00:00 -J scipipe_cnt_comp_" + geneLC // SLURM string } countTargetDataRows := wf.NewProc("cnt_targetdata_rows_"+uniq_gene, `awk '$2 == "A" { a += 1 } $2 == "N" { n += 1 } END { print a "\t" n }' {i:targetdata} > {o:count} # {p:gene}`) countTargetDataRows.SetPathExtend("targetdata", "count", ".count") countTargetDataRows.In("targetdata").Connect(extractTargetData.Out("target_data")) countTargetDataRows.ParamPort("gene").ConnectStr(gene) // -------------------------------------------------------------------------------- // Pre-compute step // -------------------------------------------------------------------------------- cpSignPrecomp := wf.NewProc("cpsign_precomp_"+uniq_gene, `java -jar `+cpSignPath+` precompute \ --license ../../bin/cpsign.lic \ --cptype 1 \ --trainfile {i:traindata} \ --labels A, N \ --model-out {o:precomp} \ --model-name "`+gene+` target profile"`) cpSignPrecomp.In("traindata").Connect(extractTargetData.Out("target_data")) cpSignPrecomp.SetPathExtend("traindata", "precomp", ".precomp") if *runSlurm { cpSignPrecomp.Prepend = "salloc -A snic2017-7-89 -n 4 -c 4 -t 1-00:00:00 -J precmp_" + geneLC // SLURM string } for _, replicate := range replicates { uniq_repl := uniq_gene + "_" + replicate // -------------------------------------------------------------------------------- // Optimize cost/gamma-step // -------------------------------------------------------------------------------- includeGamma := false // For liblinear summarize := NewSummarizeCostGammaPerf(wf, "summarize_cost_gamma_perf_"+uniq_repl, "dat/"+geneLC+"/"+replicate+"/"+geneLC+"_cost_gamma_perf_stats.tsv", includeGamma) for _, cost := range costVals { uniq_cost := uniq_repl + "_" + cost // If Liblinear evalCost := wf.NewProc("crossval_"+uniq_cost, `java -jar `+cpSignPath+` crossvalidate \ --license ../../bin/cpsign.lic \ --cptype 1 \ --trainfile {i:traindata} \ --impl liblinear \ --labels A, N \ --nr-models {p:nrmdl} \ --cost {p:cost} \ --cv-folds {p:cvfolds} \ --output-format json \ --confidence {p:confidence} | grep -P "^{" > {o:stats} # {p:gene} {p:replicate}`) evalCost.SetPathCustom("stats", func(t *sp.SciTask) string { c, err := strconv.ParseInt(t.Param("cost"), 10, 0) geneLC := str.ToLower(t.Param("gene")) sp.CheckErr(err) return str.Replace(t.InPath("traindata"), geneLC+".tsv", t.Param("replicate")+"/"+geneLC+".tsv", 1) + fmt.Sprintf(".liblin_c%03d", c) + "_crossval_stats.json" }) evalCost.In("traindata").Connect(extractTargetData.Out("target_data")) evalCost.ParamPort("nrmdl").ConnectStr("10") evalCost.ParamPort("cvfolds").ConnectStr("10") evalCost.ParamPort("confidence").ConnectStr("0.9") evalCost.ParamPort("gene").ConnectStr(gene) evalCost.ParamPort("replicate").ConnectStr(replicate) evalCost.ParamPort("cost").ConnectStr(cost) if *runSlurm { evalCost.Prepend = "salloc -A snic2017-7-89 -n 4 -c 4 -t 1-00:00:00 -J evalcg_" + uniq_cost // SLURM string } extractCostGammaStats := spc.NewMapToKeys(wf, "extract_cgstats_"+uniq_cost, func(ip *sp.InformationPacket) map[string]string { crossValOut := &cpSignCrossValOutput{} ip.UnMarshalJson(crossValOut) newKeys := map[string]string{} newKeys["validity"] = fmt.Sprintf("%.3f", crossValOut.Validity) newKeys["efficiency"] = fmt.Sprintf("%.3f", crossValOut.Efficiency) newKeys["class_confidence"] = fmt.Sprintf("%.3f", crossValOut.ClassConfidence) newKeys["class_credibility"] = fmt.Sprintf("%.3f", crossValOut.ClassCredibility) newKeys["obsfuzz_active"] = fmt.Sprintf("%.3f", crossValOut.ObservedFuzziness.Active) newKeys["obsfuzz_nonactive"] = fmt.Sprintf("%.3f", crossValOut.ObservedFuzziness.Nonactive) newKeys["obsfuzz_overall"] = fmt.Sprintf("%.3f", crossValOut.ObservedFuzziness.Overall) return newKeys }) extractCostGammaStats.In.Connect(evalCost.Out("stats")) summarize.In.Connect(extractCostGammaStats.Out) } // end for cost // TODO: Let select best operate directly on the stream of IPs, not // via the summarize component, so that we can retain the keys in // the IP! selectBest := NewBestCostGamma(wf, "select_best_cost_gamma_"+uniq_repl, '\t', false, includeGamma) selectBest.InCSVFile.Connect(summarize.OutStats) // -------------------------------------------------------------------------------- // Train step // -------------------------------------------------------------------------------- cpSignTrain := wf.NewProc("cpsign_train_"+uniq_repl, `java -jar `+cpSignPath+` train \ --license ../../bin/cpsign.lic \ --cptype 1 \ --modelfile {i:model} \ --labels A, N \ --impl liblinear \ --nr-models {p:nrmdl} \ --cost {p:cost} \ --model-out {o:model} \ --model-name "{p:gene} target profile" # {p:replicate} Validity: {p:validity} Efficiency: {p:efficiency} Class-Equalized Observed Fuzziness: {p:obsfuzz_classavg} Observed Fuzziness (Overall): {p:obsfuzz_overall} Observed Fuzziness (Active class): {p:obsfuzz_active} Observed Fuzziness (Non-active class): {p:obsfuzz_nonactive} Class Confidence: {p:class_confidence} Class Credibility: {p:class_credibility}`) cpSignTrain.In("model").Connect(cpSignPrecomp.Out("precomp")) cpSignTrain.ParamPort("nrmdl").ConnectStr("10") cpSignTrain.ParamPort("gene").ConnectStr(gene) cpSignTrain.ParamPort("replicate").ConnectStr(replicate) cpSignTrain.ParamPort("validity").Connect(selectBest.OutBestValidity) cpSignTrain.ParamPort("efficiency").Connect(selectBest.OutBestEfficiency) cpSignTrain.ParamPort("obsfuzz_classavg").Connect(selectBest.OutBestObsFuzzClassAvg) cpSignTrain.ParamPort("obsfuzz_overall").Connect(selectBest.OutBestObsFuzzOverall) cpSignTrain.ParamPort("obsfuzz_active").Connect(selectBest.OutBestObsFuzzActive) cpSignTrain.ParamPort("obsfuzz_nonactive").Connect(selectBest.OutBestObsFuzzNonactive) cpSignTrain.ParamPort("class_confidence").Connect(selectBest.OutBestClassConfidence) cpSignTrain.ParamPort("class_credibility").Connect(selectBest.OutBestClassCredibility) cpSignTrain.ParamPort("cost").Connect(selectBest.OutBestCost) cpSignTrain.SetPathCustom("model", func(t *sp.SciTask) string { return fmt.Sprintf("dat/final_models/%s/%s_c%s_nrmdl%s_%s.mdl", str.ToLower(t.Param("gene")), "liblin", t.Param("cost"), t.Param("nrmdl"), t.Param("replicate")) }) if *runSlurm { cpSignTrain.Prepend = "salloc -A snic2017-7-89 -n 4 -c 4 -t 1-00:00:00 -J train_" + uniq_repl // SLURM string } finalModelsSummary.InModel.Connect(cpSignTrain.Out("model")) finalModelsSummary.InTargetDataCount.Connect(countTargetDataRows.Out("count")) } // end for replicate } // end for gene sortSummaryOnDataSize := wf.NewProc("sort_summary", "head -n 1 {i:summary} > {o:sorted} && tail -n +2 {i:summary} | sort -nk 16 >> {o:sorted}") sortSummaryOnDataSize.SetPathReplace("summary", "sorted", ".tsv", ".sorted.tsv") sortSummaryOnDataSize.In("summary").Connect(finalModelsSummary.OutSummary) plotSummary := wf.NewProc("plot_summary", "Rscript bin/plot_summary.r -i {i:summary} -o {o:plot} -f png") plotSummary.SetPathExtend("summary", "plot", ".plot.png") plotSummary.In("summary").Connect(sortSummaryOnDataSize.Out("sorted")) wf.ConnectLast(plotSummary.Out("plot")) // -------------------------------- // Run the pipeline! // -------------------------------- wf.Run() }
identifier_body
train_models.go
// Workflow written in SciPipe. // For more information about SciPipe, see: http://scipipe.org package main import ( "flag" "fmt" "runtime" "strconv" str "strings" sp "github.com/scipipe/scipipe" spc "github.com/scipipe/scipipe/components" ) var ( maxTasks = flag.Int("maxtasks", 4, "Max number of local cores to use") threads = flag.Int("threads", 1, "Number of threads that Go is allowed to start") geneSet = flag.String("geneset", "smallest1", "Gene set to use (one of smallest1, smallest3, smallest4, bowes44)") runSlurm = flag.Bool("slurm", false, "Start computationally heavy jobs via SLURM") debug = flag.Bool("debug", false, "Increase logging level to include DEBUG messages") cpSignPath = "../../bin/cpsign-0.6.3.jar" geneSets = map[string][]string{ "bowes44": []string{ // Not available in dataset: "CHRNA1". // Not available in dataset: "KCNE1" // Instead we use MinK1 as they both share the same alias // "MinK", and also confirmed by Wes to be the same. "ADORA2A", "ADRA1A", "ADRA2A", "ADRB1", "ADRB2", "CNR1", "CNR2", "CCKAR", "DRD1", "DRD2", "EDNRA", "HRH1", "HRH2", "OPRD1", "OPRK1", "OPRM1", "CHRM1", "CHRM2", "CHRM3", "HTR1A", "HTR1B", "HTR2A", "HTR2B", "AVPR1A", "CHRNA4", "CACNA1C", "GABRA1", "KCNH2", "KCNQ1", "MINK1", "GRIN1", "HTR3A", "SCN5A", "ACHE", "PTGS1", "PTGS2", "MAOA", "PDE3A", "PDE4D", "LCK", "SLC6A3", "SLC6A2", "SLC6A4", "AR", "NR3C1", }, "bowes44min100percls": []string{ "PDE3A", "SCN5A", "CCKAR", "ADRB1", "PTGS1", "CHRM3", "CHRM2", "EDNRA", "MAOA", "LCK", "PTGS2", "SLC6A2", "ACHE", "CNR2", "CNR1", "ADORA2A", "OPRD1", "NR3C1", "AR", "SLC6A4", "OPRM1", "HTR1A", "SLC6A3", "OPRK1", "AVPR1A", "ADRB2", "DRD2", "KCNH2", "DRD1", "HTR2A", "CHRM1", }, "smallest1": []string{ "PDE3A", }, "smallest3": []string{ "PDE3A", "SCN5A", "CCKAR", }, "smallest4": []string{ "PDE3A", "SCN5A", "CCKAR", "ADRB1", }, } costVals = []string{ "1", "10", "100", }
"0.001", } replicates = []string{ "r1", "r2", "r3", } ) func main() { // -------------------------------- // Parse flags and stuff // -------------------------------- flag.Parse() if *debug { sp.InitLogDebug() } else { sp.InitLogAudit() } if len(geneSets[*geneSet]) == 0 { names := []string{} for n, _ := range geneSets { names = append(names, n) } sp.Error.Fatalf("Incorrect gene set %s specified! Only allowed values are: %s\n", *geneSet, str.Join(names, ", ")) } runtime.GOMAXPROCS(*threads) // -------------------------------- // Show startup messages // -------------------------------- sp.Info.Printf("Using max %d OS threads to schedule max %d tasks\n", *threads, *maxTasks) sp.Info.Printf("Starting workflow for %s geneset\n", *geneSet) // -------------------------------- // Initialize processes and add to runner // -------------------------------- wf := sp.NewWorkflow("train_models", *maxTasks) dbFileName := "pubchem.chembl.dataset4publication_inchi_smiles.tsv.xz" dlExcapeDB := wf.NewProc("dlDB", fmt.Sprintf("wget https://zenodo.org/record/173258/files/%s -O {o:excapexz}", dbFileName)) dlExcapeDB.SetPathStatic("excapexz", "../../raw/"+dbFileName) unPackDB := wf.NewProc("unPackDB", "xzcat {i:xzfile} > {o:unxzed}") unPackDB.SetPathReplace("xzfile", "unxzed", ".xz", "") unPackDB.In("xzfile").Connect(dlExcapeDB.Out("excapexz")) //unPackDB.Prepend = "salloc -A snic2017-7-89 -n 2 -t 8:00:00 -J unpack_excapedb" finalModelsSummary := NewFinalModelSummarizer(wf, "finalmodels_summary_creator", "res/final_models_summary.tsv", '\t') // -------------------------------- // Set up gene-specific workflow branches // -------------------------------- for _, gene := range geneSets[*geneSet] { geneLC := str.ToLower(gene) uniq_gene := geneLC // -------------------------------------------------------------------------------- // Extract target data step // -------------------------------------------------------------------------------- extractTargetData := wf.NewProc("extract_target_data_"+uniq_gene, `awk -F"\t" '$9 == "{p:gene}" { print $12"\t"$4 }' {i:raw_data} > {o:target_data}`) extractTargetData.ParamPort("gene").ConnectStr(gene) extractTargetData.SetPathStatic("target_data", fmt.Sprintf("dat/%s/%s.tsv", geneLC, geneLC)) extractTargetData.In("raw_data").Connect(unPackDB.Out("unxzed")) if *runSlurm { extractTargetData.Prepend = "salloc -A snic2017-7-89 -n 4 -c 4 -t 1:00:00 -J scipipe_cnt_comp_" + geneLC // SLURM string } countTargetDataRows := wf.NewProc("cnt_targetdata_rows_"+uniq_gene, `awk '$2 == "A" { a += 1 } $2 == "N" { n += 1 } END { print a "\t" n }' {i:targetdata} > {o:count} # {p:gene}`) countTargetDataRows.SetPathExtend("targetdata", "count", ".count") countTargetDataRows.In("targetdata").Connect(extractTargetData.Out("target_data")) countTargetDataRows.ParamPort("gene").ConnectStr(gene) // -------------------------------------------------------------------------------- // Pre-compute step // -------------------------------------------------------------------------------- cpSignPrecomp := wf.NewProc("cpsign_precomp_"+uniq_gene, `java -jar `+cpSignPath+` precompute \ --license ../../bin/cpsign.lic \ --cptype 1 \ --trainfile {i:traindata} \ --labels A, N \ --model-out {o:precomp} \ --model-name "`+gene+` target profile"`) cpSignPrecomp.In("traindata").Connect(extractTargetData.Out("target_data")) cpSignPrecomp.SetPathExtend("traindata", "precomp", ".precomp") if *runSlurm { cpSignPrecomp.Prepend = "salloc -A snic2017-7-89 -n 4 -c 4 -t 1-00:00:00 -J precmp_" + geneLC // SLURM string } for _, replicate := range replicates { uniq_repl := uniq_gene + "_" + replicate // -------------------------------------------------------------------------------- // Optimize cost/gamma-step // -------------------------------------------------------------------------------- includeGamma := false // For liblinear summarize := NewSummarizeCostGammaPerf(wf, "summarize_cost_gamma_perf_"+uniq_repl, "dat/"+geneLC+"/"+replicate+"/"+geneLC+"_cost_gamma_perf_stats.tsv", includeGamma) for _, cost := range costVals { uniq_cost := uniq_repl + "_" + cost // If Liblinear evalCost := wf.NewProc("crossval_"+uniq_cost, `java -jar `+cpSignPath+` crossvalidate \ --license ../../bin/cpsign.lic \ --cptype 1 \ --trainfile {i:traindata} \ --impl liblinear \ --labels A, N \ --nr-models {p:nrmdl} \ --cost {p:cost} \ --cv-folds {p:cvfolds} \ --output-format json \ --confidence {p:confidence} | grep -P "^{" > {o:stats} # {p:gene} {p:replicate}`) evalCost.SetPathCustom("stats", func(t *sp.SciTask) string { c, err := strconv.ParseInt(t.Param("cost"), 10, 0) geneLC := str.ToLower(t.Param("gene")) sp.CheckErr(err) return str.Replace(t.InPath("traindata"), geneLC+".tsv", t.Param("replicate")+"/"+geneLC+".tsv", 1) + fmt.Sprintf(".liblin_c%03d", c) + "_crossval_stats.json" }) evalCost.In("traindata").Connect(extractTargetData.Out("target_data")) evalCost.ParamPort("nrmdl").ConnectStr("10") evalCost.ParamPort("cvfolds").ConnectStr("10") evalCost.ParamPort("confidence").ConnectStr("0.9") evalCost.ParamPort("gene").ConnectStr(gene) evalCost.ParamPort("replicate").ConnectStr(replicate) evalCost.ParamPort("cost").ConnectStr(cost) if *runSlurm { evalCost.Prepend = "salloc -A snic2017-7-89 -n 4 -c 4 -t 1-00:00:00 -J evalcg_" + uniq_cost // SLURM string } extractCostGammaStats := spc.NewMapToKeys(wf, "extract_cgstats_"+uniq_cost, func(ip *sp.InformationPacket) map[string]string { crossValOut := &cpSignCrossValOutput{} ip.UnMarshalJson(crossValOut) newKeys := map[string]string{} newKeys["validity"] = fmt.Sprintf("%.3f", crossValOut.Validity) newKeys["efficiency"] = fmt.Sprintf("%.3f", crossValOut.Efficiency) newKeys["class_confidence"] = fmt.Sprintf("%.3f", crossValOut.ClassConfidence) newKeys["class_credibility"] = fmt.Sprintf("%.3f", crossValOut.ClassCredibility) newKeys["obsfuzz_active"] = fmt.Sprintf("%.3f", crossValOut.ObservedFuzziness.Active) newKeys["obsfuzz_nonactive"] = fmt.Sprintf("%.3f", crossValOut.ObservedFuzziness.Nonactive) newKeys["obsfuzz_overall"] = fmt.Sprintf("%.3f", crossValOut.ObservedFuzziness.Overall) return newKeys }) extractCostGammaStats.In.Connect(evalCost.Out("stats")) summarize.In.Connect(extractCostGammaStats.Out) } // end for cost // TODO: Let select best operate directly on the stream of IPs, not // via the summarize component, so that we can retain the keys in // the IP! selectBest := NewBestCostGamma(wf, "select_best_cost_gamma_"+uniq_repl, '\t', false, includeGamma) selectBest.InCSVFile.Connect(summarize.OutStats) // -------------------------------------------------------------------------------- // Train step // -------------------------------------------------------------------------------- cpSignTrain := wf.NewProc("cpsign_train_"+uniq_repl, `java -jar `+cpSignPath+` train \ --license ../../bin/cpsign.lic \ --cptype 1 \ --modelfile {i:model} \ --labels A, N \ --impl liblinear \ --nr-models {p:nrmdl} \ --cost {p:cost} \ --model-out {o:model} \ --model-name "{p:gene} target profile" # {p:replicate} Validity: {p:validity} Efficiency: {p:efficiency} Class-Equalized Observed Fuzziness: {p:obsfuzz_classavg} Observed Fuzziness (Overall): {p:obsfuzz_overall} Observed Fuzziness (Active class): {p:obsfuzz_active} Observed Fuzziness (Non-active class): {p:obsfuzz_nonactive} Class Confidence: {p:class_confidence} Class Credibility: {p:class_credibility}`) cpSignTrain.In("model").Connect(cpSignPrecomp.Out("precomp")) cpSignTrain.ParamPort("nrmdl").ConnectStr("10") cpSignTrain.ParamPort("gene").ConnectStr(gene) cpSignTrain.ParamPort("replicate").ConnectStr(replicate) cpSignTrain.ParamPort("validity").Connect(selectBest.OutBestValidity) cpSignTrain.ParamPort("efficiency").Connect(selectBest.OutBestEfficiency) cpSignTrain.ParamPort("obsfuzz_classavg").Connect(selectBest.OutBestObsFuzzClassAvg) cpSignTrain.ParamPort("obsfuzz_overall").Connect(selectBest.OutBestObsFuzzOverall) cpSignTrain.ParamPort("obsfuzz_active").Connect(selectBest.OutBestObsFuzzActive) cpSignTrain.ParamPort("obsfuzz_nonactive").Connect(selectBest.OutBestObsFuzzNonactive) cpSignTrain.ParamPort("class_confidence").Connect(selectBest.OutBestClassConfidence) cpSignTrain.ParamPort("class_credibility").Connect(selectBest.OutBestClassCredibility) cpSignTrain.ParamPort("cost").Connect(selectBest.OutBestCost) cpSignTrain.SetPathCustom("model", func(t *sp.SciTask) string { return fmt.Sprintf("dat/final_models/%s/%s_c%s_nrmdl%s_%s.mdl", str.ToLower(t.Param("gene")), "liblin", t.Param("cost"), t.Param("nrmdl"), t.Param("replicate")) }) if *runSlurm { cpSignTrain.Prepend = "salloc -A snic2017-7-89 -n 4 -c 4 -t 1-00:00:00 -J train_" + uniq_repl // SLURM string } finalModelsSummary.InModel.Connect(cpSignTrain.Out("model")) finalModelsSummary.InTargetDataCount.Connect(countTargetDataRows.Out("count")) } // end for replicate } // end for gene sortSummaryOnDataSize := wf.NewProc("sort_summary", "head -n 1 {i:summary} > {o:sorted} && tail -n +2 {i:summary} | sort -nk 16 >> {o:sorted}") sortSummaryOnDataSize.SetPathReplace("summary", "sorted", ".tsv", ".sorted.tsv") sortSummaryOnDataSize.In("summary").Connect(finalModelsSummary.OutSummary) plotSummary := wf.NewProc("plot_summary", "Rscript bin/plot_summary.r -i {i:summary} -o {o:plot} -f png") plotSummary.SetPathExtend("summary", "plot", ".plot.png") plotSummary.In("summary").Connect(sortSummaryOnDataSize.Out("sorted")) wf.ConnectLast(plotSummary.Out("plot")) // -------------------------------- // Run the pipeline! // -------------------------------- wf.Run() } // -------------------------------------------------------------------------------- // JSON types // -------------------------------------------------------------------------------- // JSON output of cpSign crossvalidate // { // "classConfidence": 0.855, // "observedFuzziness": { // "A": 0.253, // "N": 0.207, // "overall": 0.231 // }, // "validity": 0.917, // "efficiency": 0.333, // "classCredibility": 0.631 // } // -------------------------------------------------------------------------------- type cpSignCrossValOutput struct { ClassConfidence float64 `json:"classConfidence"` ObservedFuzziness cpSignObservedFuzziness `json:"observedFuzziness"` Validity float64 `json:"validity"` Efficiency float64 `json:"efficiency"` ClassCredibility float64 `json:"classCredibility"` } type cpSignObservedFuzziness struct { Active float64 `json:"A"` Nonactive float64 `json:"N"` Overall float64 `json:"overall"` }
gammaVals = []string{ "0.1", "0.01",
random_line_split
train_models.go
// Workflow written in SciPipe. // For more information about SciPipe, see: http://scipipe.org package main import ( "flag" "fmt" "runtime" "strconv" str "strings" sp "github.com/scipipe/scipipe" spc "github.com/scipipe/scipipe/components" ) var ( maxTasks = flag.Int("maxtasks", 4, "Max number of local cores to use") threads = flag.Int("threads", 1, "Number of threads that Go is allowed to start") geneSet = flag.String("geneset", "smallest1", "Gene set to use (one of smallest1, smallest3, smallest4, bowes44)") runSlurm = flag.Bool("slurm", false, "Start computationally heavy jobs via SLURM") debug = flag.Bool("debug", false, "Increase logging level to include DEBUG messages") cpSignPath = "../../bin/cpsign-0.6.3.jar" geneSets = map[string][]string{ "bowes44": []string{ // Not available in dataset: "CHRNA1". // Not available in dataset: "KCNE1" // Instead we use MinK1 as they both share the same alias // "MinK", and also confirmed by Wes to be the same. "ADORA2A", "ADRA1A", "ADRA2A", "ADRB1", "ADRB2", "CNR1", "CNR2", "CCKAR", "DRD1", "DRD2", "EDNRA", "HRH1", "HRH2", "OPRD1", "OPRK1", "OPRM1", "CHRM1", "CHRM2", "CHRM3", "HTR1A", "HTR1B", "HTR2A", "HTR2B", "AVPR1A", "CHRNA4", "CACNA1C", "GABRA1", "KCNH2", "KCNQ1", "MINK1", "GRIN1", "HTR3A", "SCN5A", "ACHE", "PTGS1", "PTGS2", "MAOA", "PDE3A", "PDE4D", "LCK", "SLC6A3", "SLC6A2", "SLC6A4", "AR", "NR3C1", }, "bowes44min100percls": []string{ "PDE3A", "SCN5A", "CCKAR", "ADRB1", "PTGS1", "CHRM3", "CHRM2", "EDNRA", "MAOA", "LCK", "PTGS2", "SLC6A2", "ACHE", "CNR2", "CNR1", "ADORA2A", "OPRD1", "NR3C1", "AR", "SLC6A4", "OPRM1", "HTR1A", "SLC6A3", "OPRK1", "AVPR1A", "ADRB2", "DRD2", "KCNH2", "DRD1", "HTR2A", "CHRM1", }, "smallest1": []string{ "PDE3A", }, "smallest3": []string{ "PDE3A", "SCN5A", "CCKAR", }, "smallest4": []string{ "PDE3A", "SCN5A", "CCKAR", "ADRB1", }, } costVals = []string{ "1", "10", "100", } gammaVals = []string{ "0.1", "0.01", "0.001", } replicates = []string{ "r1", "r2", "r3", } ) func
() { // -------------------------------- // Parse flags and stuff // -------------------------------- flag.Parse() if *debug { sp.InitLogDebug() } else { sp.InitLogAudit() } if len(geneSets[*geneSet]) == 0 { names := []string{} for n, _ := range geneSets { names = append(names, n) } sp.Error.Fatalf("Incorrect gene set %s specified! Only allowed values are: %s\n", *geneSet, str.Join(names, ", ")) } runtime.GOMAXPROCS(*threads) // -------------------------------- // Show startup messages // -------------------------------- sp.Info.Printf("Using max %d OS threads to schedule max %d tasks\n", *threads, *maxTasks) sp.Info.Printf("Starting workflow for %s geneset\n", *geneSet) // -------------------------------- // Initialize processes and add to runner // -------------------------------- wf := sp.NewWorkflow("train_models", *maxTasks) dbFileName := "pubchem.chembl.dataset4publication_inchi_smiles.tsv.xz" dlExcapeDB := wf.NewProc("dlDB", fmt.Sprintf("wget https://zenodo.org/record/173258/files/%s -O {o:excapexz}", dbFileName)) dlExcapeDB.SetPathStatic("excapexz", "../../raw/"+dbFileName) unPackDB := wf.NewProc("unPackDB", "xzcat {i:xzfile} > {o:unxzed}") unPackDB.SetPathReplace("xzfile", "unxzed", ".xz", "") unPackDB.In("xzfile").Connect(dlExcapeDB.Out("excapexz")) //unPackDB.Prepend = "salloc -A snic2017-7-89 -n 2 -t 8:00:00 -J unpack_excapedb" finalModelsSummary := NewFinalModelSummarizer(wf, "finalmodels_summary_creator", "res/final_models_summary.tsv", '\t') // -------------------------------- // Set up gene-specific workflow branches // -------------------------------- for _, gene := range geneSets[*geneSet] { geneLC := str.ToLower(gene) uniq_gene := geneLC // -------------------------------------------------------------------------------- // Extract target data step // -------------------------------------------------------------------------------- extractTargetData := wf.NewProc("extract_target_data_"+uniq_gene, `awk -F"\t" '$9 == "{p:gene}" { print $12"\t"$4 }' {i:raw_data} > {o:target_data}`) extractTargetData.ParamPort("gene").ConnectStr(gene) extractTargetData.SetPathStatic("target_data", fmt.Sprintf("dat/%s/%s.tsv", geneLC, geneLC)) extractTargetData.In("raw_data").Connect(unPackDB.Out("unxzed")) if *runSlurm { extractTargetData.Prepend = "salloc -A snic2017-7-89 -n 4 -c 4 -t 1:00:00 -J scipipe_cnt_comp_" + geneLC // SLURM string } countTargetDataRows := wf.NewProc("cnt_targetdata_rows_"+uniq_gene, `awk '$2 == "A" { a += 1 } $2 == "N" { n += 1 } END { print a "\t" n }' {i:targetdata} > {o:count} # {p:gene}`) countTargetDataRows.SetPathExtend("targetdata", "count", ".count") countTargetDataRows.In("targetdata").Connect(extractTargetData.Out("target_data")) countTargetDataRows.ParamPort("gene").ConnectStr(gene) // -------------------------------------------------------------------------------- // Pre-compute step // -------------------------------------------------------------------------------- cpSignPrecomp := wf.NewProc("cpsign_precomp_"+uniq_gene, `java -jar `+cpSignPath+` precompute \ --license ../../bin/cpsign.lic \ --cptype 1 \ --trainfile {i:traindata} \ --labels A, N \ --model-out {o:precomp} \ --model-name "`+gene+` target profile"`) cpSignPrecomp.In("traindata").Connect(extractTargetData.Out("target_data")) cpSignPrecomp.SetPathExtend("traindata", "precomp", ".precomp") if *runSlurm { cpSignPrecomp.Prepend = "salloc -A snic2017-7-89 -n 4 -c 4 -t 1-00:00:00 -J precmp_" + geneLC // SLURM string } for _, replicate := range replicates { uniq_repl := uniq_gene + "_" + replicate // -------------------------------------------------------------------------------- // Optimize cost/gamma-step // -------------------------------------------------------------------------------- includeGamma := false // For liblinear summarize := NewSummarizeCostGammaPerf(wf, "summarize_cost_gamma_perf_"+uniq_repl, "dat/"+geneLC+"/"+replicate+"/"+geneLC+"_cost_gamma_perf_stats.tsv", includeGamma) for _, cost := range costVals { uniq_cost := uniq_repl + "_" + cost // If Liblinear evalCost := wf.NewProc("crossval_"+uniq_cost, `java -jar `+cpSignPath+` crossvalidate \ --license ../../bin/cpsign.lic \ --cptype 1 \ --trainfile {i:traindata} \ --impl liblinear \ --labels A, N \ --nr-models {p:nrmdl} \ --cost {p:cost} \ --cv-folds {p:cvfolds} \ --output-format json \ --confidence {p:confidence} | grep -P "^{" > {o:stats} # {p:gene} {p:replicate}`) evalCost.SetPathCustom("stats", func(t *sp.SciTask) string { c, err := strconv.ParseInt(t.Param("cost"), 10, 0) geneLC := str.ToLower(t.Param("gene")) sp.CheckErr(err) return str.Replace(t.InPath("traindata"), geneLC+".tsv", t.Param("replicate")+"/"+geneLC+".tsv", 1) + fmt.Sprintf(".liblin_c%03d", c) + "_crossval_stats.json" }) evalCost.In("traindata").Connect(extractTargetData.Out("target_data")) evalCost.ParamPort("nrmdl").ConnectStr("10") evalCost.ParamPort("cvfolds").ConnectStr("10") evalCost.ParamPort("confidence").ConnectStr("0.9") evalCost.ParamPort("gene").ConnectStr(gene) evalCost.ParamPort("replicate").ConnectStr(replicate) evalCost.ParamPort("cost").ConnectStr(cost) if *runSlurm { evalCost.Prepend = "salloc -A snic2017-7-89 -n 4 -c 4 -t 1-00:00:00 -J evalcg_" + uniq_cost // SLURM string } extractCostGammaStats := spc.NewMapToKeys(wf, "extract_cgstats_"+uniq_cost, func(ip *sp.InformationPacket) map[string]string { crossValOut := &cpSignCrossValOutput{} ip.UnMarshalJson(crossValOut) newKeys := map[string]string{} newKeys["validity"] = fmt.Sprintf("%.3f", crossValOut.Validity) newKeys["efficiency"] = fmt.Sprintf("%.3f", crossValOut.Efficiency) newKeys["class_confidence"] = fmt.Sprintf("%.3f", crossValOut.ClassConfidence) newKeys["class_credibility"] = fmt.Sprintf("%.3f", crossValOut.ClassCredibility) newKeys["obsfuzz_active"] = fmt.Sprintf("%.3f", crossValOut.ObservedFuzziness.Active) newKeys["obsfuzz_nonactive"] = fmt.Sprintf("%.3f", crossValOut.ObservedFuzziness.Nonactive) newKeys["obsfuzz_overall"] = fmt.Sprintf("%.3f", crossValOut.ObservedFuzziness.Overall) return newKeys }) extractCostGammaStats.In.Connect(evalCost.Out("stats")) summarize.In.Connect(extractCostGammaStats.Out) } // end for cost // TODO: Let select best operate directly on the stream of IPs, not // via the summarize component, so that we can retain the keys in // the IP! selectBest := NewBestCostGamma(wf, "select_best_cost_gamma_"+uniq_repl, '\t', false, includeGamma) selectBest.InCSVFile.Connect(summarize.OutStats) // -------------------------------------------------------------------------------- // Train step // -------------------------------------------------------------------------------- cpSignTrain := wf.NewProc("cpsign_train_"+uniq_repl, `java -jar `+cpSignPath+` train \ --license ../../bin/cpsign.lic \ --cptype 1 \ --modelfile {i:model} \ --labels A, N \ --impl liblinear \ --nr-models {p:nrmdl} \ --cost {p:cost} \ --model-out {o:model} \ --model-name "{p:gene} target profile" # {p:replicate} Validity: {p:validity} Efficiency: {p:efficiency} Class-Equalized Observed Fuzziness: {p:obsfuzz_classavg} Observed Fuzziness (Overall): {p:obsfuzz_overall} Observed Fuzziness (Active class): {p:obsfuzz_active} Observed Fuzziness (Non-active class): {p:obsfuzz_nonactive} Class Confidence: {p:class_confidence} Class Credibility: {p:class_credibility}`) cpSignTrain.In("model").Connect(cpSignPrecomp.Out("precomp")) cpSignTrain.ParamPort("nrmdl").ConnectStr("10") cpSignTrain.ParamPort("gene").ConnectStr(gene) cpSignTrain.ParamPort("replicate").ConnectStr(replicate) cpSignTrain.ParamPort("validity").Connect(selectBest.OutBestValidity) cpSignTrain.ParamPort("efficiency").Connect(selectBest.OutBestEfficiency) cpSignTrain.ParamPort("obsfuzz_classavg").Connect(selectBest.OutBestObsFuzzClassAvg) cpSignTrain.ParamPort("obsfuzz_overall").Connect(selectBest.OutBestObsFuzzOverall) cpSignTrain.ParamPort("obsfuzz_active").Connect(selectBest.OutBestObsFuzzActive) cpSignTrain.ParamPort("obsfuzz_nonactive").Connect(selectBest.OutBestObsFuzzNonactive) cpSignTrain.ParamPort("class_confidence").Connect(selectBest.OutBestClassConfidence) cpSignTrain.ParamPort("class_credibility").Connect(selectBest.OutBestClassCredibility) cpSignTrain.ParamPort("cost").Connect(selectBest.OutBestCost) cpSignTrain.SetPathCustom("model", func(t *sp.SciTask) string { return fmt.Sprintf("dat/final_models/%s/%s_c%s_nrmdl%s_%s.mdl", str.ToLower(t.Param("gene")), "liblin", t.Param("cost"), t.Param("nrmdl"), t.Param("replicate")) }) if *runSlurm { cpSignTrain.Prepend = "salloc -A snic2017-7-89 -n 4 -c 4 -t 1-00:00:00 -J train_" + uniq_repl // SLURM string } finalModelsSummary.InModel.Connect(cpSignTrain.Out("model")) finalModelsSummary.InTargetDataCount.Connect(countTargetDataRows.Out("count")) } // end for replicate } // end for gene sortSummaryOnDataSize := wf.NewProc("sort_summary", "head -n 1 {i:summary} > {o:sorted} && tail -n +2 {i:summary} | sort -nk 16 >> {o:sorted}") sortSummaryOnDataSize.SetPathReplace("summary", "sorted", ".tsv", ".sorted.tsv") sortSummaryOnDataSize.In("summary").Connect(finalModelsSummary.OutSummary) plotSummary := wf.NewProc("plot_summary", "Rscript bin/plot_summary.r -i {i:summary} -o {o:plot} -f png") plotSummary.SetPathExtend("summary", "plot", ".plot.png") plotSummary.In("summary").Connect(sortSummaryOnDataSize.Out("sorted")) wf.ConnectLast(plotSummary.Out("plot")) // -------------------------------- // Run the pipeline! // -------------------------------- wf.Run() } // -------------------------------------------------------------------------------- // JSON types // -------------------------------------------------------------------------------- // JSON output of cpSign crossvalidate // { // "classConfidence": 0.855, // "observedFuzziness": { // "A": 0.253, // "N": 0.207, // "overall": 0.231 // }, // "validity": 0.917, // "efficiency": 0.333, // "classCredibility": 0.631 // } // -------------------------------------------------------------------------------- type cpSignCrossValOutput struct { ClassConfidence float64 `json:"classConfidence"` ObservedFuzziness cpSignObservedFuzziness `json:"observedFuzziness"` Validity float64 `json:"validity"` Efficiency float64 `json:"efficiency"` ClassCredibility float64 `json:"classCredibility"` } type cpSignObservedFuzziness struct { Active float64 `json:"A"` Nonactive float64 `json:"N"` Overall float64 `json:"overall"` }
main
identifier_name
train_models.go
// Workflow written in SciPipe. // For more information about SciPipe, see: http://scipipe.org package main import ( "flag" "fmt" "runtime" "strconv" str "strings" sp "github.com/scipipe/scipipe" spc "github.com/scipipe/scipipe/components" ) var ( maxTasks = flag.Int("maxtasks", 4, "Max number of local cores to use") threads = flag.Int("threads", 1, "Number of threads that Go is allowed to start") geneSet = flag.String("geneset", "smallest1", "Gene set to use (one of smallest1, smallest3, smallest4, bowes44)") runSlurm = flag.Bool("slurm", false, "Start computationally heavy jobs via SLURM") debug = flag.Bool("debug", false, "Increase logging level to include DEBUG messages") cpSignPath = "../../bin/cpsign-0.6.3.jar" geneSets = map[string][]string{ "bowes44": []string{ // Not available in dataset: "CHRNA1". // Not available in dataset: "KCNE1" // Instead we use MinK1 as they both share the same alias // "MinK", and also confirmed by Wes to be the same. "ADORA2A", "ADRA1A", "ADRA2A", "ADRB1", "ADRB2", "CNR1", "CNR2", "CCKAR", "DRD1", "DRD2", "EDNRA", "HRH1", "HRH2", "OPRD1", "OPRK1", "OPRM1", "CHRM1", "CHRM2", "CHRM3", "HTR1A", "HTR1B", "HTR2A", "HTR2B", "AVPR1A", "CHRNA4", "CACNA1C", "GABRA1", "KCNH2", "KCNQ1", "MINK1", "GRIN1", "HTR3A", "SCN5A", "ACHE", "PTGS1", "PTGS2", "MAOA", "PDE3A", "PDE4D", "LCK", "SLC6A3", "SLC6A2", "SLC6A4", "AR", "NR3C1", }, "bowes44min100percls": []string{ "PDE3A", "SCN5A", "CCKAR", "ADRB1", "PTGS1", "CHRM3", "CHRM2", "EDNRA", "MAOA", "LCK", "PTGS2", "SLC6A2", "ACHE", "CNR2", "CNR1", "ADORA2A", "OPRD1", "NR3C1", "AR", "SLC6A4", "OPRM1", "HTR1A", "SLC6A3", "OPRK1", "AVPR1A", "ADRB2", "DRD2", "KCNH2", "DRD1", "HTR2A", "CHRM1", }, "smallest1": []string{ "PDE3A", }, "smallest3": []string{ "PDE3A", "SCN5A", "CCKAR", }, "smallest4": []string{ "PDE3A", "SCN5A", "CCKAR", "ADRB1", }, } costVals = []string{ "1", "10", "100", } gammaVals = []string{ "0.1", "0.01", "0.001", } replicates = []string{ "r1", "r2", "r3", } ) func main() { // -------------------------------- // Parse flags and stuff // -------------------------------- flag.Parse() if *debug { sp.InitLogDebug() } else { sp.InitLogAudit() } if len(geneSets[*geneSet]) == 0 { names := []string{} for n, _ := range geneSets
sp.Error.Fatalf("Incorrect gene set %s specified! Only allowed values are: %s\n", *geneSet, str.Join(names, ", ")) } runtime.GOMAXPROCS(*threads) // -------------------------------- // Show startup messages // -------------------------------- sp.Info.Printf("Using max %d OS threads to schedule max %d tasks\n", *threads, *maxTasks) sp.Info.Printf("Starting workflow for %s geneset\n", *geneSet) // -------------------------------- // Initialize processes and add to runner // -------------------------------- wf := sp.NewWorkflow("train_models", *maxTasks) dbFileName := "pubchem.chembl.dataset4publication_inchi_smiles.tsv.xz" dlExcapeDB := wf.NewProc("dlDB", fmt.Sprintf("wget https://zenodo.org/record/173258/files/%s -O {o:excapexz}", dbFileName)) dlExcapeDB.SetPathStatic("excapexz", "../../raw/"+dbFileName) unPackDB := wf.NewProc("unPackDB", "xzcat {i:xzfile} > {o:unxzed}") unPackDB.SetPathReplace("xzfile", "unxzed", ".xz", "") unPackDB.In("xzfile").Connect(dlExcapeDB.Out("excapexz")) //unPackDB.Prepend = "salloc -A snic2017-7-89 -n 2 -t 8:00:00 -J unpack_excapedb" finalModelsSummary := NewFinalModelSummarizer(wf, "finalmodels_summary_creator", "res/final_models_summary.tsv", '\t') // -------------------------------- // Set up gene-specific workflow branches // -------------------------------- for _, gene := range geneSets[*geneSet] { geneLC := str.ToLower(gene) uniq_gene := geneLC // -------------------------------------------------------------------------------- // Extract target data step // -------------------------------------------------------------------------------- extractTargetData := wf.NewProc("extract_target_data_"+uniq_gene, `awk -F"\t" '$9 == "{p:gene}" { print $12"\t"$4 }' {i:raw_data} > {o:target_data}`) extractTargetData.ParamPort("gene").ConnectStr(gene) extractTargetData.SetPathStatic("target_data", fmt.Sprintf("dat/%s/%s.tsv", geneLC, geneLC)) extractTargetData.In("raw_data").Connect(unPackDB.Out("unxzed")) if *runSlurm { extractTargetData.Prepend = "salloc -A snic2017-7-89 -n 4 -c 4 -t 1:00:00 -J scipipe_cnt_comp_" + geneLC // SLURM string } countTargetDataRows := wf.NewProc("cnt_targetdata_rows_"+uniq_gene, `awk '$2 == "A" { a += 1 } $2 == "N" { n += 1 } END { print a "\t" n }' {i:targetdata} > {o:count} # {p:gene}`) countTargetDataRows.SetPathExtend("targetdata", "count", ".count") countTargetDataRows.In("targetdata").Connect(extractTargetData.Out("target_data")) countTargetDataRows.ParamPort("gene").ConnectStr(gene) // -------------------------------------------------------------------------------- // Pre-compute step // -------------------------------------------------------------------------------- cpSignPrecomp := wf.NewProc("cpsign_precomp_"+uniq_gene, `java -jar `+cpSignPath+` precompute \ --license ../../bin/cpsign.lic \ --cptype 1 \ --trainfile {i:traindata} \ --labels A, N \ --model-out {o:precomp} \ --model-name "`+gene+` target profile"`) cpSignPrecomp.In("traindata").Connect(extractTargetData.Out("target_data")) cpSignPrecomp.SetPathExtend("traindata", "precomp", ".precomp") if *runSlurm { cpSignPrecomp.Prepend = "salloc -A snic2017-7-89 -n 4 -c 4 -t 1-00:00:00 -J precmp_" + geneLC // SLURM string } for _, replicate := range replicates { uniq_repl := uniq_gene + "_" + replicate // -------------------------------------------------------------------------------- // Optimize cost/gamma-step // -------------------------------------------------------------------------------- includeGamma := false // For liblinear summarize := NewSummarizeCostGammaPerf(wf, "summarize_cost_gamma_perf_"+uniq_repl, "dat/"+geneLC+"/"+replicate+"/"+geneLC+"_cost_gamma_perf_stats.tsv", includeGamma) for _, cost := range costVals { uniq_cost := uniq_repl + "_" + cost // If Liblinear evalCost := wf.NewProc("crossval_"+uniq_cost, `java -jar `+cpSignPath+` crossvalidate \ --license ../../bin/cpsign.lic \ --cptype 1 \ --trainfile {i:traindata} \ --impl liblinear \ --labels A, N \ --nr-models {p:nrmdl} \ --cost {p:cost} \ --cv-folds {p:cvfolds} \ --output-format json \ --confidence {p:confidence} | grep -P "^{" > {o:stats} # {p:gene} {p:replicate}`) evalCost.SetPathCustom("stats", func(t *sp.SciTask) string { c, err := strconv.ParseInt(t.Param("cost"), 10, 0) geneLC := str.ToLower(t.Param("gene")) sp.CheckErr(err) return str.Replace(t.InPath("traindata"), geneLC+".tsv", t.Param("replicate")+"/"+geneLC+".tsv", 1) + fmt.Sprintf(".liblin_c%03d", c) + "_crossval_stats.json" }) evalCost.In("traindata").Connect(extractTargetData.Out("target_data")) evalCost.ParamPort("nrmdl").ConnectStr("10") evalCost.ParamPort("cvfolds").ConnectStr("10") evalCost.ParamPort("confidence").ConnectStr("0.9") evalCost.ParamPort("gene").ConnectStr(gene) evalCost.ParamPort("replicate").ConnectStr(replicate) evalCost.ParamPort("cost").ConnectStr(cost) if *runSlurm { evalCost.Prepend = "salloc -A snic2017-7-89 -n 4 -c 4 -t 1-00:00:00 -J evalcg_" + uniq_cost // SLURM string } extractCostGammaStats := spc.NewMapToKeys(wf, "extract_cgstats_"+uniq_cost, func(ip *sp.InformationPacket) map[string]string { crossValOut := &cpSignCrossValOutput{} ip.UnMarshalJson(crossValOut) newKeys := map[string]string{} newKeys["validity"] = fmt.Sprintf("%.3f", crossValOut.Validity) newKeys["efficiency"] = fmt.Sprintf("%.3f", crossValOut.Efficiency) newKeys["class_confidence"] = fmt.Sprintf("%.3f", crossValOut.ClassConfidence) newKeys["class_credibility"] = fmt.Sprintf("%.3f", crossValOut.ClassCredibility) newKeys["obsfuzz_active"] = fmt.Sprintf("%.3f", crossValOut.ObservedFuzziness.Active) newKeys["obsfuzz_nonactive"] = fmt.Sprintf("%.3f", crossValOut.ObservedFuzziness.Nonactive) newKeys["obsfuzz_overall"] = fmt.Sprintf("%.3f", crossValOut.ObservedFuzziness.Overall) return newKeys }) extractCostGammaStats.In.Connect(evalCost.Out("stats")) summarize.In.Connect(extractCostGammaStats.Out) } // end for cost // TODO: Let select best operate directly on the stream of IPs, not // via the summarize component, so that we can retain the keys in // the IP! selectBest := NewBestCostGamma(wf, "select_best_cost_gamma_"+uniq_repl, '\t', false, includeGamma) selectBest.InCSVFile.Connect(summarize.OutStats) // -------------------------------------------------------------------------------- // Train step // -------------------------------------------------------------------------------- cpSignTrain := wf.NewProc("cpsign_train_"+uniq_repl, `java -jar `+cpSignPath+` train \ --license ../../bin/cpsign.lic \ --cptype 1 \ --modelfile {i:model} \ --labels A, N \ --impl liblinear \ --nr-models {p:nrmdl} \ --cost {p:cost} \ --model-out {o:model} \ --model-name "{p:gene} target profile" # {p:replicate} Validity: {p:validity} Efficiency: {p:efficiency} Class-Equalized Observed Fuzziness: {p:obsfuzz_classavg} Observed Fuzziness (Overall): {p:obsfuzz_overall} Observed Fuzziness (Active class): {p:obsfuzz_active} Observed Fuzziness (Non-active class): {p:obsfuzz_nonactive} Class Confidence: {p:class_confidence} Class Credibility: {p:class_credibility}`) cpSignTrain.In("model").Connect(cpSignPrecomp.Out("precomp")) cpSignTrain.ParamPort("nrmdl").ConnectStr("10") cpSignTrain.ParamPort("gene").ConnectStr(gene) cpSignTrain.ParamPort("replicate").ConnectStr(replicate) cpSignTrain.ParamPort("validity").Connect(selectBest.OutBestValidity) cpSignTrain.ParamPort("efficiency").Connect(selectBest.OutBestEfficiency) cpSignTrain.ParamPort("obsfuzz_classavg").Connect(selectBest.OutBestObsFuzzClassAvg) cpSignTrain.ParamPort("obsfuzz_overall").Connect(selectBest.OutBestObsFuzzOverall) cpSignTrain.ParamPort("obsfuzz_active").Connect(selectBest.OutBestObsFuzzActive) cpSignTrain.ParamPort("obsfuzz_nonactive").Connect(selectBest.OutBestObsFuzzNonactive) cpSignTrain.ParamPort("class_confidence").Connect(selectBest.OutBestClassConfidence) cpSignTrain.ParamPort("class_credibility").Connect(selectBest.OutBestClassCredibility) cpSignTrain.ParamPort("cost").Connect(selectBest.OutBestCost) cpSignTrain.SetPathCustom("model", func(t *sp.SciTask) string { return fmt.Sprintf("dat/final_models/%s/%s_c%s_nrmdl%s_%s.mdl", str.ToLower(t.Param("gene")), "liblin", t.Param("cost"), t.Param("nrmdl"), t.Param("replicate")) }) if *runSlurm { cpSignTrain.Prepend = "salloc -A snic2017-7-89 -n 4 -c 4 -t 1-00:00:00 -J train_" + uniq_repl // SLURM string } finalModelsSummary.InModel.Connect(cpSignTrain.Out("model")) finalModelsSummary.InTargetDataCount.Connect(countTargetDataRows.Out("count")) } // end for replicate } // end for gene sortSummaryOnDataSize := wf.NewProc("sort_summary", "head -n 1 {i:summary} > {o:sorted} && tail -n +2 {i:summary} | sort -nk 16 >> {o:sorted}") sortSummaryOnDataSize.SetPathReplace("summary", "sorted", ".tsv", ".sorted.tsv") sortSummaryOnDataSize.In("summary").Connect(finalModelsSummary.OutSummary) plotSummary := wf.NewProc("plot_summary", "Rscript bin/plot_summary.r -i {i:summary} -o {o:plot} -f png") plotSummary.SetPathExtend("summary", "plot", ".plot.png") plotSummary.In("summary").Connect(sortSummaryOnDataSize.Out("sorted")) wf.ConnectLast(plotSummary.Out("plot")) // -------------------------------- // Run the pipeline! // -------------------------------- wf.Run() } // -------------------------------------------------------------------------------- // JSON types // -------------------------------------------------------------------------------- // JSON output of cpSign crossvalidate // { // "classConfidence": 0.855, // "observedFuzziness": { // "A": 0.253, // "N": 0.207, // "overall": 0.231 // }, // "validity": 0.917, // "efficiency": 0.333, // "classCredibility": 0.631 // } // -------------------------------------------------------------------------------- type cpSignCrossValOutput struct { ClassConfidence float64 `json:"classConfidence"` ObservedFuzziness cpSignObservedFuzziness `json:"observedFuzziness"` Validity float64 `json:"validity"` Efficiency float64 `json:"efficiency"` ClassCredibility float64 `json:"classCredibility"` } type cpSignObservedFuzziness struct { Active float64 `json:"A"` Nonactive float64 `json:"N"` Overall float64 `json:"overall"` }
{ names = append(names, n) }
conditional_block
beacon_chain_builder.rs
use crate::{BeaconChain, BeaconChainTypes}; use eth2_hashing::hash; use lighthouse_bootstrap::Bootstrapper; use merkle_proof::MerkleTree; use rayon::prelude::*; use slog::Logger; use ssz::{Decode, Encode}; use state_processing::initialize_beacon_state_from_eth1; use std::fs::File; use std::io::prelude::*; use std::path::PathBuf; use std::sync::Arc; use std::time::SystemTime; use tree_hash::{SignedRoot, TreeHash}; use types::{ BeaconBlock, BeaconState, ChainSpec, Deposit, DepositData, Domain, EthSpec, Fork, Hash256, Keypair, PublicKey, Signature, }; enum BuildStrategy<T: BeaconChainTypes> { FromGenesis { genesis_state: Box<BeaconState<T::EthSpec>>, genesis_block: Box<BeaconBlock<T::EthSpec>>, }, LoadFromStore, } pub struct BeaconChainBuilder<T: BeaconChainTypes> { build_strategy: BuildStrategy<T>, spec: ChainSpec, log: Logger, } impl<T: BeaconChainTypes> BeaconChainBuilder<T> { pub fn recent_genesis( keypairs: &[Keypair], minutes: u64, spec: ChainSpec, log: Logger, ) -> Result<Self, String> { Self::quick_start(recent_genesis_time(minutes), keypairs, spec, log) } pub fn quick_start( genesis_time: u64, keypairs: &[Keypair], spec: ChainSpec, log: Logger, ) -> Result<Self, String> { let genesis_state = interop_genesis_state(keypairs, genesis_time, &spec)?; Ok(Self::from_genesis_state(genesis_state, spec, log)) } pub fn yaml_state(file: &PathBuf, spec: ChainSpec, log: Logger) -> Result<Self, String> { let file = File::open(file.clone()) .map_err(|e| format!("Unable to open YAML genesis state file {:?}: {:?}", file, e))?; let genesis_state = serde_yaml::from_reader(file) .map_err(|e| format!("Unable to parse YAML genesis state file: {:?}", e))?; Ok(Self::from_genesis_state(genesis_state, spec, log)) } pub fn ssz_state(file: &PathBuf, spec: ChainSpec, log: Logger) -> Result<Self, String> { let mut file = File::open(file.clone()) .map_err(|e| format!("Unable to open SSZ genesis state file {:?}: {:?}", file, e))?; let mut bytes = vec![]; file.read_to_end(&mut bytes) .map_err(|e| format!("Failed to read SSZ file: {:?}", e))?; let genesis_state = BeaconState::from_ssz_bytes(&bytes) .map_err(|e| format!("Unable to parse SSZ genesis state file: {:?}", e))?; Ok(Self::from_genesis_state(genesis_state, spec, log)) } pub fn json_state(file: &PathBuf, spec: ChainSpec, log: Logger) -> Result<Self, String> { let file = File::open(file.clone()) .map_err(|e| format!("Unable to open JSON genesis state file {:?}: {:?}", file, e))?; let genesis_state = serde_json::from_reader(file) .map_err(|e| format!("Unable to parse JSON genesis state file: {:?}", e))?; Ok(Self::from_genesis_state(genesis_state, spec, log)) } pub fn http_bootstrap(server: &str, spec: ChainSpec, log: Logger) -> Result<Self, String> { let bootstrapper = Bootstrapper::connect(server.to_string(), &log) .map_err(|e| format!("Failed to initialize bootstrap client: {}", e))?; let (genesis_state, genesis_block) = bootstrapper .genesis() .map_err(|e| format!("Failed to bootstrap genesis state: {}", e))?; Ok(Self { build_strategy: BuildStrategy::FromGenesis { genesis_block: Box::new(genesis_block), genesis_state: Box::new(genesis_state), }, spec, log, }) } fn from_genesis_state( genesis_state: BeaconState<T::EthSpec>, spec: ChainSpec, log: Logger, ) -> Self { Self { build_strategy: BuildStrategy::FromGenesis { genesis_block: Box::new(genesis_block(&genesis_state, &spec)), genesis_state: Box::new(genesis_state), }, spec, log, } } pub fn from_store(spec: ChainSpec, log: Logger) -> Self { Self { build_strategy: BuildStrategy::LoadFromStore, spec, log, } } pub fn build( self, store: Arc<T::Store>, eth1_backend: T::Eth1Chain, event_handler: T::EventHandler, ) -> Result<BeaconChain<T>, String> { Ok(match self.build_strategy { BuildStrategy::LoadFromStore => { BeaconChain::from_store(store, eth1_backend, event_handler, self.spec, self.log) .map_err(|e| format!("Error loading BeaconChain from database: {:?}", e))? .ok_or_else(|| "Unable to find exising BeaconChain in database.".to_string())? } BuildStrategy::FromGenesis { genesis_block, genesis_state, } => BeaconChain::from_genesis( store, eth1_backend, event_handler, genesis_state.as_ref().clone(), genesis_block.as_ref().clone(), self.spec, self.log, ) .map_err(|e| format!("Failed to initialize new beacon chain: {:?}", e))?, }) } } fn genesis_block<T: EthSpec>(genesis_state: &BeaconState<T>, spec: &ChainSpec) -> BeaconBlock<T> { let mut genesis_block = BeaconBlock::empty(&spec); genesis_block.state_root = genesis_state.canonical_root(); genesis_block } /// Builds a genesis state as defined by the Eth2 interop procedure (see below). /// /// Reference: /// https://github.com/ethereum/eth2.0-pm/tree/6e41fcf383ebeb5125938850d8e9b4e9888389b4/interop/mocked_start fn interop_genesis_state<T: EthSpec>( keypairs: &[Keypair], genesis_time: u64, spec: &ChainSpec, ) -> Result<BeaconState<T>, String> { let eth1_block_hash = Hash256::from_slice(&[0x42; 32]); let eth1_timestamp = 2_u64.pow(40); let amount = spec.max_effective_balance; let withdrawal_credentials = |pubkey: &PublicKey| { let mut credentials = hash(&pubkey.as_ssz_bytes()); credentials[0] = spec.bls_withdrawal_prefix_byte; Hash256::from_slice(&credentials) }; let datas = keypairs .into_par_iter() .map(|keypair| { let mut data = DepositData { withdrawal_credentials: withdrawal_credentials(&keypair.pk), pubkey: keypair.pk.clone().into(), amount, signature: Signature::empty_signature().into(), }; let domain = spec.get_domain( spec.genesis_slot.epoch(T::slots_per_epoch()), Domain::Deposit, &Fork::default(), ); data.signature = Signature::new(&data.signed_root()[..], domain, &keypair.sk).into(); data }) .collect::<Vec<_>>(); let deposit_root_leaves = datas .par_iter() .map(|data| Hash256::from_slice(&data.tree_hash_root())) .collect::<Vec<_>>(); let mut proofs = vec![]; let depth = spec.deposit_contract_tree_depth as usize; let mut tree = MerkleTree::create(&[], depth); for (i, deposit_leaf) in deposit_root_leaves.iter().enumerate() { if let Err(_) = tree.push_leaf(*deposit_leaf, depth)
let (_, mut proof) = tree.generate_proof(i, depth); proof.push(Hash256::from_slice(&int_to_bytes32(i + 1))); assert_eq!( proof.len(), depth + 1, "Deposit proof should be correct len" ); proofs.push(proof); } let deposits = datas .into_par_iter() .zip(proofs.into_par_iter()) .map(|(data, proof)| (data, proof.into())) .map(|(data, proof)| Deposit { proof, data }) .collect::<Vec<_>>(); let mut state = initialize_beacon_state_from_eth1(eth1_block_hash, eth1_timestamp, deposits, spec) .map_err(|e| format!("Unable to initialize genesis state: {:?}", e))?; state.genesis_time = genesis_time; // Invalid all the caches after all the manual state surgery. state.drop_all_caches(); Ok(state) } /// Returns `int` as little-endian bytes with a length of 32. fn int_to_bytes32(int: usize) -> Vec<u8> { let mut vec = int.to_le_bytes().to_vec(); vec.resize(32, 0); vec } /// Returns the system time, mod 30 minutes. /// /// Used for easily creating testnets. fn recent_genesis_time(minutes: u64) -> u64 { let now = SystemTime::now() .duration_since(SystemTime::UNIX_EPOCH) .unwrap() .as_secs(); let secs_after_last_period = now.checked_rem(minutes * 60).unwrap_or(0); now - secs_after_last_period } #[cfg(test)] mod test { use super::*; use types::{test_utils::generate_deterministic_keypairs, EthSpec, MinimalEthSpec}; type TestEthSpec = MinimalEthSpec; #[test] fn interop_state() { let validator_count = 16; let genesis_time = 42; let spec = &TestEthSpec::default_spec(); let keypairs = generate_deterministic_keypairs(validator_count); let state = interop_genesis_state::<TestEthSpec>(&keypairs, genesis_time, spec) .expect("should build state"); assert_eq!( state.eth1_data.block_hash, Hash256::from_slice(&[0x42; 32]), "eth1 block hash should be co-ordinated junk" ); assert_eq!( state.genesis_time, genesis_time, "genesis time should be as specified" ); for b in &state.balances { assert_eq!( *b, spec.max_effective_balance, "validator balances should be max effective balance" ); } for v in &state.validators { let creds = v.withdrawal_credentials.as_bytes(); assert_eq!( creds[0], spec.bls_withdrawal_prefix_byte, "first byte of withdrawal creds should be bls prefix" ); assert_eq!( &creds[1..], &hash(&v.pubkey.as_ssz_bytes())[1..], "rest of withdrawal creds should be pubkey hash" ) } assert_eq!( state.balances.len(), validator_count, "validator balances len should be correct" ); assert_eq!( state.validators.len(), validator_count, "validator count should be correct" ); } }
{ return Err(String::from("Failed to push leaf")); }
conditional_block
beacon_chain_builder.rs
use crate::{BeaconChain, BeaconChainTypes}; use eth2_hashing::hash; use lighthouse_bootstrap::Bootstrapper; use merkle_proof::MerkleTree; use rayon::prelude::*; use slog::Logger; use ssz::{Decode, Encode}; use state_processing::initialize_beacon_state_from_eth1; use std::fs::File; use std::io::prelude::*; use std::path::PathBuf; use std::sync::Arc; use std::time::SystemTime; use tree_hash::{SignedRoot, TreeHash}; use types::{ BeaconBlock, BeaconState, ChainSpec, Deposit, DepositData, Domain, EthSpec, Fork, Hash256, Keypair, PublicKey, Signature, }; enum BuildStrategy<T: BeaconChainTypes> { FromGenesis { genesis_state: Box<BeaconState<T::EthSpec>>, genesis_block: Box<BeaconBlock<T::EthSpec>>, }, LoadFromStore, } pub struct BeaconChainBuilder<T: BeaconChainTypes> { build_strategy: BuildStrategy<T>, spec: ChainSpec, log: Logger, } impl<T: BeaconChainTypes> BeaconChainBuilder<T> { pub fn recent_genesis( keypairs: &[Keypair], minutes: u64, spec: ChainSpec, log: Logger, ) -> Result<Self, String> { Self::quick_start(recent_genesis_time(minutes), keypairs, spec, log) } pub fn quick_start( genesis_time: u64, keypairs: &[Keypair], spec: ChainSpec, log: Logger, ) -> Result<Self, String> { let genesis_state = interop_genesis_state(keypairs, genesis_time, &spec)?; Ok(Self::from_genesis_state(genesis_state, spec, log)) } pub fn yaml_state(file: &PathBuf, spec: ChainSpec, log: Logger) -> Result<Self, String> { let file = File::open(file.clone()) .map_err(|e| format!("Unable to open YAML genesis state file {:?}: {:?}", file, e))?; let genesis_state = serde_yaml::from_reader(file) .map_err(|e| format!("Unable to parse YAML genesis state file: {:?}", e))?; Ok(Self::from_genesis_state(genesis_state, spec, log)) } pub fn ssz_state(file: &PathBuf, spec: ChainSpec, log: Logger) -> Result<Self, String> { let mut file = File::open(file.clone()) .map_err(|e| format!("Unable to open SSZ genesis state file {:?}: {:?}", file, e))?; let mut bytes = vec![]; file.read_to_end(&mut bytes) .map_err(|e| format!("Failed to read SSZ file: {:?}", e))?; let genesis_state = BeaconState::from_ssz_bytes(&bytes) .map_err(|e| format!("Unable to parse SSZ genesis state file: {:?}", e))?; Ok(Self::from_genesis_state(genesis_state, spec, log)) } pub fn json_state(file: &PathBuf, spec: ChainSpec, log: Logger) -> Result<Self, String> { let file = File::open(file.clone()) .map_err(|e| format!("Unable to open JSON genesis state file {:?}: {:?}", file, e))?; let genesis_state = serde_json::from_reader(file) .map_err(|e| format!("Unable to parse JSON genesis state file: {:?}", e))?; Ok(Self::from_genesis_state(genesis_state, spec, log)) } pub fn http_bootstrap(server: &str, spec: ChainSpec, log: Logger) -> Result<Self, String> { let bootstrapper = Bootstrapper::connect(server.to_string(), &log) .map_err(|e| format!("Failed to initialize bootstrap client: {}", e))?; let (genesis_state, genesis_block) = bootstrapper .genesis() .map_err(|e| format!("Failed to bootstrap genesis state: {}", e))?; Ok(Self { build_strategy: BuildStrategy::FromGenesis { genesis_block: Box::new(genesis_block), genesis_state: Box::new(genesis_state), }, spec, log, }) } fn from_genesis_state( genesis_state: BeaconState<T::EthSpec>, spec: ChainSpec, log: Logger, ) -> Self { Self { build_strategy: BuildStrategy::FromGenesis { genesis_block: Box::new(genesis_block(&genesis_state, &spec)), genesis_state: Box::new(genesis_state), }, spec, log, } } pub fn from_store(spec: ChainSpec, log: Logger) -> Self { Self { build_strategy: BuildStrategy::LoadFromStore, spec, log, } } pub fn build( self, store: Arc<T::Store>, eth1_backend: T::Eth1Chain, event_handler: T::EventHandler, ) -> Result<BeaconChain<T>, String> { Ok(match self.build_strategy { BuildStrategy::LoadFromStore => { BeaconChain::from_store(store, eth1_backend, event_handler, self.spec, self.log) .map_err(|e| format!("Error loading BeaconChain from database: {:?}", e))? .ok_or_else(|| "Unable to find exising BeaconChain in database.".to_string())? } BuildStrategy::FromGenesis { genesis_block, genesis_state, } => BeaconChain::from_genesis( store, eth1_backend, event_handler, genesis_state.as_ref().clone(), genesis_block.as_ref().clone(), self.spec, self.log, ) .map_err(|e| format!("Failed to initialize new beacon chain: {:?}", e))?, }) } } fn genesis_block<T: EthSpec>(genesis_state: &BeaconState<T>, spec: &ChainSpec) -> BeaconBlock<T> { let mut genesis_block = BeaconBlock::empty(&spec); genesis_block.state_root = genesis_state.canonical_root(); genesis_block } /// Builds a genesis state as defined by the Eth2 interop procedure (see below). /// /// Reference: /// https://github.com/ethereum/eth2.0-pm/tree/6e41fcf383ebeb5125938850d8e9b4e9888389b4/interop/mocked_start fn interop_genesis_state<T: EthSpec>( keypairs: &[Keypair], genesis_time: u64, spec: &ChainSpec, ) -> Result<BeaconState<T>, String> { let eth1_block_hash = Hash256::from_slice(&[0x42; 32]); let eth1_timestamp = 2_u64.pow(40); let amount = spec.max_effective_balance; let withdrawal_credentials = |pubkey: &PublicKey| { let mut credentials = hash(&pubkey.as_ssz_bytes()); credentials[0] = spec.bls_withdrawal_prefix_byte; Hash256::from_slice(&credentials) }; let datas = keypairs .into_par_iter() .map(|keypair| { let mut data = DepositData { withdrawal_credentials: withdrawal_credentials(&keypair.pk), pubkey: keypair.pk.clone().into(), amount, signature: Signature::empty_signature().into(), }; let domain = spec.get_domain( spec.genesis_slot.epoch(T::slots_per_epoch()), Domain::Deposit, &Fork::default(), ); data.signature = Signature::new(&data.signed_root()[..], domain, &keypair.sk).into(); data }) .collect::<Vec<_>>(); let deposit_root_leaves = datas .par_iter() .map(|data| Hash256::from_slice(&data.tree_hash_root())) .collect::<Vec<_>>(); let mut proofs = vec![]; let depth = spec.deposit_contract_tree_depth as usize; let mut tree = MerkleTree::create(&[], depth); for (i, deposit_leaf) in deposit_root_leaves.iter().enumerate() { if let Err(_) = tree.push_leaf(*deposit_leaf, depth) { return Err(String::from("Failed to push leaf")); } let (_, mut proof) = tree.generate_proof(i, depth); proof.push(Hash256::from_slice(&int_to_bytes32(i + 1))); assert_eq!( proof.len(), depth + 1, "Deposit proof should be correct len" ); proofs.push(proof); } let deposits = datas .into_par_iter() .zip(proofs.into_par_iter()) .map(|(data, proof)| (data, proof.into())) .map(|(data, proof)| Deposit { proof, data }) .collect::<Vec<_>>(); let mut state = initialize_beacon_state_from_eth1(eth1_block_hash, eth1_timestamp, deposits, spec) .map_err(|e| format!("Unable to initialize genesis state: {:?}", e))?; state.genesis_time = genesis_time; // Invalid all the caches after all the manual state surgery. state.drop_all_caches(); Ok(state) } /// Returns `int` as little-endian bytes with a length of 32. fn int_to_bytes32(int: usize) -> Vec<u8> { let mut vec = int.to_le_bytes().to_vec(); vec.resize(32, 0); vec } /// Returns the system time, mod 30 minutes. /// /// Used for easily creating testnets. fn recent_genesis_time(minutes: u64) -> u64 { let now = SystemTime::now() .duration_since(SystemTime::UNIX_EPOCH) .unwrap() .as_secs(); let secs_after_last_period = now.checked_rem(minutes * 60).unwrap_or(0); now - secs_after_last_period } #[cfg(test)] mod test { use super::*; use types::{test_utils::generate_deterministic_keypairs, EthSpec, MinimalEthSpec}; type TestEthSpec = MinimalEthSpec; #[test] fn interop_state() { let validator_count = 16; let genesis_time = 42; let spec = &TestEthSpec::default_spec(); let keypairs = generate_deterministic_keypairs(validator_count); let state = interop_genesis_state::<TestEthSpec>(&keypairs, genesis_time, spec) .expect("should build state"); assert_eq!( state.eth1_data.block_hash, Hash256::from_slice(&[0x42; 32]), "eth1 block hash should be co-ordinated junk" ); assert_eq!( state.genesis_time, genesis_time, "genesis time should be as specified" ); for b in &state.balances { assert_eq!( *b, spec.max_effective_balance,
let creds = v.withdrawal_credentials.as_bytes(); assert_eq!( creds[0], spec.bls_withdrawal_prefix_byte, "first byte of withdrawal creds should be bls prefix" ); assert_eq!( &creds[1..], &hash(&v.pubkey.as_ssz_bytes())[1..], "rest of withdrawal creds should be pubkey hash" ) } assert_eq!( state.balances.len(), validator_count, "validator balances len should be correct" ); assert_eq!( state.validators.len(), validator_count, "validator count should be correct" ); } }
"validator balances should be max effective balance" ); } for v in &state.validators {
random_line_split
beacon_chain_builder.rs
use crate::{BeaconChain, BeaconChainTypes}; use eth2_hashing::hash; use lighthouse_bootstrap::Bootstrapper; use merkle_proof::MerkleTree; use rayon::prelude::*; use slog::Logger; use ssz::{Decode, Encode}; use state_processing::initialize_beacon_state_from_eth1; use std::fs::File; use std::io::prelude::*; use std::path::PathBuf; use std::sync::Arc; use std::time::SystemTime; use tree_hash::{SignedRoot, TreeHash}; use types::{ BeaconBlock, BeaconState, ChainSpec, Deposit, DepositData, Domain, EthSpec, Fork, Hash256, Keypair, PublicKey, Signature, }; enum BuildStrategy<T: BeaconChainTypes> { FromGenesis { genesis_state: Box<BeaconState<T::EthSpec>>, genesis_block: Box<BeaconBlock<T::EthSpec>>, }, LoadFromStore, } pub struct BeaconChainBuilder<T: BeaconChainTypes> { build_strategy: BuildStrategy<T>, spec: ChainSpec, log: Logger, } impl<T: BeaconChainTypes> BeaconChainBuilder<T> { pub fn recent_genesis( keypairs: &[Keypair], minutes: u64, spec: ChainSpec, log: Logger, ) -> Result<Self, String> { Self::quick_start(recent_genesis_time(minutes), keypairs, spec, log) } pub fn quick_start( genesis_time: u64, keypairs: &[Keypair], spec: ChainSpec, log: Logger, ) -> Result<Self, String> { let genesis_state = interop_genesis_state(keypairs, genesis_time, &spec)?; Ok(Self::from_genesis_state(genesis_state, spec, log)) } pub fn yaml_state(file: &PathBuf, spec: ChainSpec, log: Logger) -> Result<Self, String> { let file = File::open(file.clone()) .map_err(|e| format!("Unable to open YAML genesis state file {:?}: {:?}", file, e))?; let genesis_state = serde_yaml::from_reader(file) .map_err(|e| format!("Unable to parse YAML genesis state file: {:?}", e))?; Ok(Self::from_genesis_state(genesis_state, spec, log)) } pub fn
(file: &PathBuf, spec: ChainSpec, log: Logger) -> Result<Self, String> { let mut file = File::open(file.clone()) .map_err(|e| format!("Unable to open SSZ genesis state file {:?}: {:?}", file, e))?; let mut bytes = vec![]; file.read_to_end(&mut bytes) .map_err(|e| format!("Failed to read SSZ file: {:?}", e))?; let genesis_state = BeaconState::from_ssz_bytes(&bytes) .map_err(|e| format!("Unable to parse SSZ genesis state file: {:?}", e))?; Ok(Self::from_genesis_state(genesis_state, spec, log)) } pub fn json_state(file: &PathBuf, spec: ChainSpec, log: Logger) -> Result<Self, String> { let file = File::open(file.clone()) .map_err(|e| format!("Unable to open JSON genesis state file {:?}: {:?}", file, e))?; let genesis_state = serde_json::from_reader(file) .map_err(|e| format!("Unable to parse JSON genesis state file: {:?}", e))?; Ok(Self::from_genesis_state(genesis_state, spec, log)) } pub fn http_bootstrap(server: &str, spec: ChainSpec, log: Logger) -> Result<Self, String> { let bootstrapper = Bootstrapper::connect(server.to_string(), &log) .map_err(|e| format!("Failed to initialize bootstrap client: {}", e))?; let (genesis_state, genesis_block) = bootstrapper .genesis() .map_err(|e| format!("Failed to bootstrap genesis state: {}", e))?; Ok(Self { build_strategy: BuildStrategy::FromGenesis { genesis_block: Box::new(genesis_block), genesis_state: Box::new(genesis_state), }, spec, log, }) } fn from_genesis_state( genesis_state: BeaconState<T::EthSpec>, spec: ChainSpec, log: Logger, ) -> Self { Self { build_strategy: BuildStrategy::FromGenesis { genesis_block: Box::new(genesis_block(&genesis_state, &spec)), genesis_state: Box::new(genesis_state), }, spec, log, } } pub fn from_store(spec: ChainSpec, log: Logger) -> Self { Self { build_strategy: BuildStrategy::LoadFromStore, spec, log, } } pub fn build( self, store: Arc<T::Store>, eth1_backend: T::Eth1Chain, event_handler: T::EventHandler, ) -> Result<BeaconChain<T>, String> { Ok(match self.build_strategy { BuildStrategy::LoadFromStore => { BeaconChain::from_store(store, eth1_backend, event_handler, self.spec, self.log) .map_err(|e| format!("Error loading BeaconChain from database: {:?}", e))? .ok_or_else(|| "Unable to find exising BeaconChain in database.".to_string())? } BuildStrategy::FromGenesis { genesis_block, genesis_state, } => BeaconChain::from_genesis( store, eth1_backend, event_handler, genesis_state.as_ref().clone(), genesis_block.as_ref().clone(), self.spec, self.log, ) .map_err(|e| format!("Failed to initialize new beacon chain: {:?}", e))?, }) } } fn genesis_block<T: EthSpec>(genesis_state: &BeaconState<T>, spec: &ChainSpec) -> BeaconBlock<T> { let mut genesis_block = BeaconBlock::empty(&spec); genesis_block.state_root = genesis_state.canonical_root(); genesis_block } /// Builds a genesis state as defined by the Eth2 interop procedure (see below). /// /// Reference: /// https://github.com/ethereum/eth2.0-pm/tree/6e41fcf383ebeb5125938850d8e9b4e9888389b4/interop/mocked_start fn interop_genesis_state<T: EthSpec>( keypairs: &[Keypair], genesis_time: u64, spec: &ChainSpec, ) -> Result<BeaconState<T>, String> { let eth1_block_hash = Hash256::from_slice(&[0x42; 32]); let eth1_timestamp = 2_u64.pow(40); let amount = spec.max_effective_balance; let withdrawal_credentials = |pubkey: &PublicKey| { let mut credentials = hash(&pubkey.as_ssz_bytes()); credentials[0] = spec.bls_withdrawal_prefix_byte; Hash256::from_slice(&credentials) }; let datas = keypairs .into_par_iter() .map(|keypair| { let mut data = DepositData { withdrawal_credentials: withdrawal_credentials(&keypair.pk), pubkey: keypair.pk.clone().into(), amount, signature: Signature::empty_signature().into(), }; let domain = spec.get_domain( spec.genesis_slot.epoch(T::slots_per_epoch()), Domain::Deposit, &Fork::default(), ); data.signature = Signature::new(&data.signed_root()[..], domain, &keypair.sk).into(); data }) .collect::<Vec<_>>(); let deposit_root_leaves = datas .par_iter() .map(|data| Hash256::from_slice(&data.tree_hash_root())) .collect::<Vec<_>>(); let mut proofs = vec![]; let depth = spec.deposit_contract_tree_depth as usize; let mut tree = MerkleTree::create(&[], depth); for (i, deposit_leaf) in deposit_root_leaves.iter().enumerate() { if let Err(_) = tree.push_leaf(*deposit_leaf, depth) { return Err(String::from("Failed to push leaf")); } let (_, mut proof) = tree.generate_proof(i, depth); proof.push(Hash256::from_slice(&int_to_bytes32(i + 1))); assert_eq!( proof.len(), depth + 1, "Deposit proof should be correct len" ); proofs.push(proof); } let deposits = datas .into_par_iter() .zip(proofs.into_par_iter()) .map(|(data, proof)| (data, proof.into())) .map(|(data, proof)| Deposit { proof, data }) .collect::<Vec<_>>(); let mut state = initialize_beacon_state_from_eth1(eth1_block_hash, eth1_timestamp, deposits, spec) .map_err(|e| format!("Unable to initialize genesis state: {:?}", e))?; state.genesis_time = genesis_time; // Invalid all the caches after all the manual state surgery. state.drop_all_caches(); Ok(state) } /// Returns `int` as little-endian bytes with a length of 32. fn int_to_bytes32(int: usize) -> Vec<u8> { let mut vec = int.to_le_bytes().to_vec(); vec.resize(32, 0); vec } /// Returns the system time, mod 30 minutes. /// /// Used for easily creating testnets. fn recent_genesis_time(minutes: u64) -> u64 { let now = SystemTime::now() .duration_since(SystemTime::UNIX_EPOCH) .unwrap() .as_secs(); let secs_after_last_period = now.checked_rem(minutes * 60).unwrap_or(0); now - secs_after_last_period } #[cfg(test)] mod test { use super::*; use types::{test_utils::generate_deterministic_keypairs, EthSpec, MinimalEthSpec}; type TestEthSpec = MinimalEthSpec; #[test] fn interop_state() { let validator_count = 16; let genesis_time = 42; let spec = &TestEthSpec::default_spec(); let keypairs = generate_deterministic_keypairs(validator_count); let state = interop_genesis_state::<TestEthSpec>(&keypairs, genesis_time, spec) .expect("should build state"); assert_eq!( state.eth1_data.block_hash, Hash256::from_slice(&[0x42; 32]), "eth1 block hash should be co-ordinated junk" ); assert_eq!( state.genesis_time, genesis_time, "genesis time should be as specified" ); for b in &state.balances { assert_eq!( *b, spec.max_effective_balance, "validator balances should be max effective balance" ); } for v in &state.validators { let creds = v.withdrawal_credentials.as_bytes(); assert_eq!( creds[0], spec.bls_withdrawal_prefix_byte, "first byte of withdrawal creds should be bls prefix" ); assert_eq!( &creds[1..], &hash(&v.pubkey.as_ssz_bytes())[1..], "rest of withdrawal creds should be pubkey hash" ) } assert_eq!( state.balances.len(), validator_count, "validator balances len should be correct" ); assert_eq!( state.validators.len(), validator_count, "validator count should be correct" ); } }
ssz_state
identifier_name
beacon_chain_builder.rs
use crate::{BeaconChain, BeaconChainTypes}; use eth2_hashing::hash; use lighthouse_bootstrap::Bootstrapper; use merkle_proof::MerkleTree; use rayon::prelude::*; use slog::Logger; use ssz::{Decode, Encode}; use state_processing::initialize_beacon_state_from_eth1; use std::fs::File; use std::io::prelude::*; use std::path::PathBuf; use std::sync::Arc; use std::time::SystemTime; use tree_hash::{SignedRoot, TreeHash}; use types::{ BeaconBlock, BeaconState, ChainSpec, Deposit, DepositData, Domain, EthSpec, Fork, Hash256, Keypair, PublicKey, Signature, }; enum BuildStrategy<T: BeaconChainTypes> { FromGenesis { genesis_state: Box<BeaconState<T::EthSpec>>, genesis_block: Box<BeaconBlock<T::EthSpec>>, }, LoadFromStore, } pub struct BeaconChainBuilder<T: BeaconChainTypes> { build_strategy: BuildStrategy<T>, spec: ChainSpec, log: Logger, } impl<T: BeaconChainTypes> BeaconChainBuilder<T> { pub fn recent_genesis( keypairs: &[Keypair], minutes: u64, spec: ChainSpec, log: Logger, ) -> Result<Self, String> { Self::quick_start(recent_genesis_time(minutes), keypairs, spec, log) } pub fn quick_start( genesis_time: u64, keypairs: &[Keypair], spec: ChainSpec, log: Logger, ) -> Result<Self, String> { let genesis_state = interop_genesis_state(keypairs, genesis_time, &spec)?; Ok(Self::from_genesis_state(genesis_state, spec, log)) } pub fn yaml_state(file: &PathBuf, spec: ChainSpec, log: Logger) -> Result<Self, String> { let file = File::open(file.clone()) .map_err(|e| format!("Unable to open YAML genesis state file {:?}: {:?}", file, e))?; let genesis_state = serde_yaml::from_reader(file) .map_err(|e| format!("Unable to parse YAML genesis state file: {:?}", e))?; Ok(Self::from_genesis_state(genesis_state, spec, log)) } pub fn ssz_state(file: &PathBuf, spec: ChainSpec, log: Logger) -> Result<Self, String> { let mut file = File::open(file.clone()) .map_err(|e| format!("Unable to open SSZ genesis state file {:?}: {:?}", file, e))?; let mut bytes = vec![]; file.read_to_end(&mut bytes) .map_err(|e| format!("Failed to read SSZ file: {:?}", e))?; let genesis_state = BeaconState::from_ssz_bytes(&bytes) .map_err(|e| format!("Unable to parse SSZ genesis state file: {:?}", e))?; Ok(Self::from_genesis_state(genesis_state, spec, log)) } pub fn json_state(file: &PathBuf, spec: ChainSpec, log: Logger) -> Result<Self, String>
pub fn http_bootstrap(server: &str, spec: ChainSpec, log: Logger) -> Result<Self, String> { let bootstrapper = Bootstrapper::connect(server.to_string(), &log) .map_err(|e| format!("Failed to initialize bootstrap client: {}", e))?; let (genesis_state, genesis_block) = bootstrapper .genesis() .map_err(|e| format!("Failed to bootstrap genesis state: {}", e))?; Ok(Self { build_strategy: BuildStrategy::FromGenesis { genesis_block: Box::new(genesis_block), genesis_state: Box::new(genesis_state), }, spec, log, }) } fn from_genesis_state( genesis_state: BeaconState<T::EthSpec>, spec: ChainSpec, log: Logger, ) -> Self { Self { build_strategy: BuildStrategy::FromGenesis { genesis_block: Box::new(genesis_block(&genesis_state, &spec)), genesis_state: Box::new(genesis_state), }, spec, log, } } pub fn from_store(spec: ChainSpec, log: Logger) -> Self { Self { build_strategy: BuildStrategy::LoadFromStore, spec, log, } } pub fn build( self, store: Arc<T::Store>, eth1_backend: T::Eth1Chain, event_handler: T::EventHandler, ) -> Result<BeaconChain<T>, String> { Ok(match self.build_strategy { BuildStrategy::LoadFromStore => { BeaconChain::from_store(store, eth1_backend, event_handler, self.spec, self.log) .map_err(|e| format!("Error loading BeaconChain from database: {:?}", e))? .ok_or_else(|| "Unable to find exising BeaconChain in database.".to_string())? } BuildStrategy::FromGenesis { genesis_block, genesis_state, } => BeaconChain::from_genesis( store, eth1_backend, event_handler, genesis_state.as_ref().clone(), genesis_block.as_ref().clone(), self.spec, self.log, ) .map_err(|e| format!("Failed to initialize new beacon chain: {:?}", e))?, }) } } fn genesis_block<T: EthSpec>(genesis_state: &BeaconState<T>, spec: &ChainSpec) -> BeaconBlock<T> { let mut genesis_block = BeaconBlock::empty(&spec); genesis_block.state_root = genesis_state.canonical_root(); genesis_block } /// Builds a genesis state as defined by the Eth2 interop procedure (see below). /// /// Reference: /// https://github.com/ethereum/eth2.0-pm/tree/6e41fcf383ebeb5125938850d8e9b4e9888389b4/interop/mocked_start fn interop_genesis_state<T: EthSpec>( keypairs: &[Keypair], genesis_time: u64, spec: &ChainSpec, ) -> Result<BeaconState<T>, String> { let eth1_block_hash = Hash256::from_slice(&[0x42; 32]); let eth1_timestamp = 2_u64.pow(40); let amount = spec.max_effective_balance; let withdrawal_credentials = |pubkey: &PublicKey| { let mut credentials = hash(&pubkey.as_ssz_bytes()); credentials[0] = spec.bls_withdrawal_prefix_byte; Hash256::from_slice(&credentials) }; let datas = keypairs .into_par_iter() .map(|keypair| { let mut data = DepositData { withdrawal_credentials: withdrawal_credentials(&keypair.pk), pubkey: keypair.pk.clone().into(), amount, signature: Signature::empty_signature().into(), }; let domain = spec.get_domain( spec.genesis_slot.epoch(T::slots_per_epoch()), Domain::Deposit, &Fork::default(), ); data.signature = Signature::new(&data.signed_root()[..], domain, &keypair.sk).into(); data }) .collect::<Vec<_>>(); let deposit_root_leaves = datas .par_iter() .map(|data| Hash256::from_slice(&data.tree_hash_root())) .collect::<Vec<_>>(); let mut proofs = vec![]; let depth = spec.deposit_contract_tree_depth as usize; let mut tree = MerkleTree::create(&[], depth); for (i, deposit_leaf) in deposit_root_leaves.iter().enumerate() { if let Err(_) = tree.push_leaf(*deposit_leaf, depth) { return Err(String::from("Failed to push leaf")); } let (_, mut proof) = tree.generate_proof(i, depth); proof.push(Hash256::from_slice(&int_to_bytes32(i + 1))); assert_eq!( proof.len(), depth + 1, "Deposit proof should be correct len" ); proofs.push(proof); } let deposits = datas .into_par_iter() .zip(proofs.into_par_iter()) .map(|(data, proof)| (data, proof.into())) .map(|(data, proof)| Deposit { proof, data }) .collect::<Vec<_>>(); let mut state = initialize_beacon_state_from_eth1(eth1_block_hash, eth1_timestamp, deposits, spec) .map_err(|e| format!("Unable to initialize genesis state: {:?}", e))?; state.genesis_time = genesis_time; // Invalid all the caches after all the manual state surgery. state.drop_all_caches(); Ok(state) } /// Returns `int` as little-endian bytes with a length of 32. fn int_to_bytes32(int: usize) -> Vec<u8> { let mut vec = int.to_le_bytes().to_vec(); vec.resize(32, 0); vec } /// Returns the system time, mod 30 minutes. /// /// Used for easily creating testnets. fn recent_genesis_time(minutes: u64) -> u64 { let now = SystemTime::now() .duration_since(SystemTime::UNIX_EPOCH) .unwrap() .as_secs(); let secs_after_last_period = now.checked_rem(minutes * 60).unwrap_or(0); now - secs_after_last_period } #[cfg(test)] mod test { use super::*; use types::{test_utils::generate_deterministic_keypairs, EthSpec, MinimalEthSpec}; type TestEthSpec = MinimalEthSpec; #[test] fn interop_state() { let validator_count = 16; let genesis_time = 42; let spec = &TestEthSpec::default_spec(); let keypairs = generate_deterministic_keypairs(validator_count); let state = interop_genesis_state::<TestEthSpec>(&keypairs, genesis_time, spec) .expect("should build state"); assert_eq!( state.eth1_data.block_hash, Hash256::from_slice(&[0x42; 32]), "eth1 block hash should be co-ordinated junk" ); assert_eq!( state.genesis_time, genesis_time, "genesis time should be as specified" ); for b in &state.balances { assert_eq!( *b, spec.max_effective_balance, "validator balances should be max effective balance" ); } for v in &state.validators { let creds = v.withdrawal_credentials.as_bytes(); assert_eq!( creds[0], spec.bls_withdrawal_prefix_byte, "first byte of withdrawal creds should be bls prefix" ); assert_eq!( &creds[1..], &hash(&v.pubkey.as_ssz_bytes())[1..], "rest of withdrawal creds should be pubkey hash" ) } assert_eq!( state.balances.len(), validator_count, "validator balances len should be correct" ); assert_eq!( state.validators.len(), validator_count, "validator count should be correct" ); } }
{ let file = File::open(file.clone()) .map_err(|e| format!("Unable to open JSON genesis state file {:?}: {:?}", file, e))?; let genesis_state = serde_json::from_reader(file) .map_err(|e| format!("Unable to parse JSON genesis state file: {:?}", e))?; Ok(Self::from_genesis_state(genesis_state, spec, log)) }
identifier_body
networking.rs
use std::io::{Read, Write, Result, BufRead, BufReader, BufWriter}; use std::fs::File; use std::net::{TcpListener, TcpStream}; use std::mem::size_of; use std::sync::Arc; use std::sync::mpsc::{Sender, Receiver, channel}; use std::thread; use std::thread::sleep_ms; use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt}; use communication::communicator::{Binary, Process}; use drain::DrainExt; // TODO : Much of this only relates to BinaryWriter/BinaryReader based communication, not networking. // TODO : Could be moved somewhere less networking-specific. #[derive(Copy, Clone)] pub struct
{ pub graph: u64, // graph identifier pub channel: u64, // index of channel pub source: u64, // index of worker sending message pub target: u64, // index of worker receiving message pub length: u64, // number of bytes in message } impl MessageHeader { // returns a header when there is enough supporting data fn try_read(bytes: &mut &[u8]) -> Option<MessageHeader> { if bytes.len() > size_of::<MessageHeader>() { // capture original in case we need to rewind let original = *bytes; // unclear what order struct initializers run in, so ... let graph = bytes.read_u64::<LittleEndian>().unwrap(); let channel = bytes.read_u64::<LittleEndian>().unwrap(); let source = bytes.read_u64::<LittleEndian>().unwrap(); let target = bytes.read_u64::<LittleEndian>().unwrap(); let length = bytes.read_u64::<LittleEndian>().unwrap(); if bytes.len() >= length as usize { Some(MessageHeader { graph: graph, channel: channel, source: source, target: target, length: length, }) } else { // rewind the reader *bytes = original; None } } else { None } } fn write_to<W: Write>(&self, writer: &mut W) -> Result<()> { try!(writer.write_u64::<LittleEndian>(self.graph)); try!(writer.write_u64::<LittleEndian>(self.channel)); try!(writer.write_u64::<LittleEndian>(self.source)); try!(writer.write_u64::<LittleEndian>(self.target)); try!(writer.write_u64::<LittleEndian>(self.length)); Ok(()) } } // // structure in charge of receiving data from a Reader, for example the network // struct BinaryReceiver<R: Read> { // reader: R, // the generic reader // buffer: Vec<u8>, // current working buffer // double: Vec<u8>, // second working buffer // staging: Vec<u8>, // 1 << 20 of buffer to read into // targets: Switchboard<(Sender<Vec<u8>>, Receiver<Vec<u8>>)>, // } // // impl<R: Read> BinaryReceiver<R> { // fn new(reader: R, channels: Receiver<((u64, u64, u64), (Sender<Vec<u8>>, Receiver<Vec<u8>>))>) -> BinaryReceiver<R> { // BinaryReceiver { // reader: reader, // buffer: Vec::new(), // double: Vec::new(), // staging: vec![0u8; 1 << 20], // targets: Switchboard::new(channels), // } // } // // fn recv_loop(&mut self) { // loop { // // // attempt to read some more bytes into our buffer // // TODO : We read in to self.staging because extending a Vec<u8> is hard without // // TODO : using set_len, which is unsafe. // // TODO : Could consider optimizing for the self.buffer.len() == 0 case, swapping // // TODO : self.staging with self.buffer, rather than using write_all. // let read = self.reader.read(&mut self.staging[..]).unwrap_or(0); // self.buffer.write_all(&self.staging[..read]).unwrap(); // <-- shouldn't fail // // { // // get a view of available bytes // let mut slice = &self.buffer[..]; // // while let Some(header) = MessageHeader::try_read(&mut slice) { // // let h_len = header.length as usize; // length in bytes // let target = self.targets.ensure(header.target, header.graph, header.channel); // let mut buffer = target.1.try_recv().unwrap_or(Vec::new()); // // buffer.clear(); // buffer.write_all(&slice[..h_len]).unwrap(); // slice = &slice[h_len..]; // // target.0.send(buffer).unwrap(); // } // // // TODO: way inefficient... =/ Fix! :D // // if slice.len() < self.buffer.len() { // self.double.clear(); // self.double.write_all(slice).unwrap(); // // } // } // // // if self.double.len() > 0 { // mem::swap(&mut self.buffer, &mut self.double); // // self.double.clear(); // // } // } // } // } // structure in charge of receiving data from a Reader, for example the network struct BinaryReceiver<R: Read> { reader: R, // the generic reader buffer: Vec<u8>, // current working buffer length: usize, targets: Switchboard<(Sender<Vec<u8>>, Receiver<Vec<u8>>)>, } impl<R: Read> BinaryReceiver<R> { fn new(reader: R, channels: Receiver<((u64, u64, u64), (Sender<Vec<u8>>, Receiver<Vec<u8>>))>) -> BinaryReceiver<R> { BinaryReceiver { reader: reader, buffer: vec![0u8; 1 << 20], length: 0, targets: Switchboard::new(channels), } } fn recv_loop(&mut self) { loop { // if we've mostly filled our buffer and still can't read a whole message from it, // we'll need more space / to read more at once. let's double the buffer! if self.length >= self.buffer.len() / 2 { self.buffer.extend(::std::iter::repeat(0u8).take(self.length)); } // attempt to read some more bytes into our buffer let read = self.reader.read(&mut self.buffer[self.length..]).unwrap_or(0); self.length += read; let remaining = { let mut slice = &self.buffer[..self.length]; while let Some(header) = MessageHeader::try_read(&mut slice) { let h_len = header.length as usize; // length in bytes let target = &mut self.targets.ensure(header.target, header.graph, header.channel).0; target.send(slice[..h_len].to_vec()).unwrap(); slice = &slice[h_len..]; } slice.len() }; // we consumed bytes, must shift to beginning. // this should optimize to copy_overlapping; // would just do that if it weren't unsafe =/ if remaining < self.length { for index in 0..remaining { self.buffer[index] = self.buffer[index + self.length - remaining]; } self.length = remaining; } } } } // structure in charge of sending data to a Writer, for example the network struct BinarySender<W: Write> { id: u64, // destination process writer: W, sources: Receiver<(MessageHeader, Vec<u8>)>, returns: Switchboard<Sender<Vec<u8>>>, } impl<W: Write> BinarySender<W> { fn new(id: u64, writer: W, sources: Receiver<(MessageHeader, Vec<u8>)>, channels: Receiver<((u64, u64, u64), Sender<Vec<u8>>)>) -> BinarySender<W> { BinarySender { id: id, writer: writer, sources: sources, returns: Switchboard::new(channels), } } fn send_loop(&mut self) { let mut stash = Vec::new(); // block until data to recv while let Ok((header, buffer)) = self.sources.recv() { stash.push((header, buffer)); // collect any additional outstanding data to send while let Ok((header, buffer)) = self.sources.try_recv() { stash.push((header, buffer)); } // println!("send loop to process {}:\tstarting", self.id); for (mut header, mut buffer) in stash.drain_temp() { header.length = buffer.len() as u64; // <-- is this really our job? O.o header.write_to(&mut self.writer).unwrap(); self.writer.write_all(&buffer[..]).unwrap(); buffer.clear(); // self.returns.ensure(header.source, header.graph, header.channel).send(buffer).unwrap(); } self.writer.flush().unwrap(); // <-- because writer is buffered } } } struct Switchboard<T:Send> { source: Receiver<((u64, u64, u64), T)>, buffer: Vec<Vec<Vec<Option<T>>>>, } impl<T:Send> Switchboard<T> { pub fn new(source: Receiver<((u64, u64, u64), T)>) -> Switchboard<T> { Switchboard { source: source, buffer: Vec::new(), } } pub fn ensure(&mut self, a: u64, b: u64, c: u64) -> &mut T { let a = a as usize; let b = b as usize; let c = c as usize; while self.buffer.len() <= a { self.buffer.push(Vec::new()); } while self.buffer[a].len() <= b { self.buffer[a].push(Vec::new()); } while self.buffer[a][b].len() <= c { self.buffer[a][b].push(None); } while let None = self.buffer[a][b][c] { let ((x, y, z), s) = self.source.recv().unwrap(); let x = x as usize; let y = y as usize; let z = z as usize; while self.buffer.len() <= x { self.buffer.push(Vec::new()); } while self.buffer[x].len() <= y { self.buffer[x].push(Vec::new()); } while self.buffer[x][y].len() <= z { self.buffer[x][y].push(None); } self.buffer[x][y][z] = Some(s); } // we've just ensured that this is not None self.buffer[a][b][c].as_mut().unwrap() } } pub fn initialize_networking_from_file(filename: &str, my_index: u64, workers: u64) -> Result<Vec<Binary>> { let reader = BufReader::new(try!(File::open(filename))); let mut addresses = Vec::new(); for line in reader.lines() { addresses.push(try!(line)); } initialize_networking(addresses, my_index, workers) } pub fn initialize_networking(addresses: Vec<String>, my_index: u64, workers: u64) -> Result<Vec<Binary>> { let processes = addresses.len() as u64; let hosts1 = Arc::new(addresses); let hosts2 = hosts1.clone(); let start_task = thread::spawn(move || start_connections(hosts1, my_index)); let await_task = thread::spawn(move || await_connections(hosts2, my_index)); let mut results = try!(start_task.join().unwrap()); results.push(None); let mut to_extend = try!(await_task.join().unwrap()); results.extend(to_extend.drain_temp()); println!("worker {}:\tinitialization complete", my_index); let mut writers = Vec::new(); // handles to the BinarySenders (to present new channels) let mut readers = Vec::new(); // handles to the BinaryReceivers (to present new channels) let mut senders = Vec::new(); // destinations for serialized data (to send serialized data) // for each process, if a stream exists (i.e. not local) ... for index in (0..results.len()) { if let Some(stream) = results[index].take() { let (writer_channels_s, writer_channels_r) = channel(); let (reader_channels_s, reader_channels_r) = channel(); let (sender_channels_s, sender_channels_r) = channel(); writers.push(writer_channels_s); // readers.push(reader_channels_s); // senders.push(sender_channels_s); // let mut sender = BinarySender::new(index as u64, BufWriter::with_capacity(1 << 20, stream.try_clone().unwrap()), sender_channels_r, writer_channels_r); let mut recver = BinaryReceiver::new(stream.try_clone().unwrap(), reader_channels_r); // start senders and receivers associated with this stream thread::Builder::new().name(format!("send thread {}", index)) .spawn(move || sender.send_loop()) .unwrap(); thread::Builder::new().name(format!("recv thread {}", index)) .spawn(move || recver.recv_loop()) .unwrap(); } } let proc_comms = Process::new_vector(workers); let mut results = Vec::new(); for (index, proc_comm) in proc_comms.into_iter().enumerate() { results.push(Binary { inner: proc_comm, index: my_index * workers + index as u64, peers: workers * processes, graph: 0, // TODO : Fix this allocated: 0, writers: writers.clone(), readers: readers.clone(), senders: senders.clone(), }); } return Ok(results); } // result contains connections [0, my_index - 1]. fn start_connections(addresses: Arc<Vec<String>>, my_index: u64) -> Result<Vec<Option<TcpStream>>> { let mut results: Vec<_> = (0..my_index).map(|_| None).collect(); for index in (0..my_index) { let mut connected = false; while !connected { match TcpStream::connect(&addresses[index as usize][..]) { Ok(mut stream) => { try!(stream.write_u64::<LittleEndian>(my_index)); results[index as usize] = Some(stream); println!("worker {}:\tconnection to worker {}", my_index, index); connected = true; }, Err(error) => { println!("worker {}:\terror connecting to worker {}: {}; retrying", my_index, index, error); sleep_ms(1000); }, } } } return Ok(results); } // result contains connections [my_index + 1, addresses.len() - 1]. fn await_connections(addresses: Arc<Vec<String>>, my_index: u64) -> Result<Vec<Option<TcpStream>>> { let mut results: Vec<_> = (0..(addresses.len() - my_index as usize - 1)).map(|_| None).collect(); let listener = try!(TcpListener::bind(&addresses[my_index as usize][..])); for _ in (my_index as usize + 1 .. addresses.len()) { let mut stream = try!(listener.accept()).0; let identifier = try!(stream.read_u64::<LittleEndian>()) as usize; results[identifier - my_index as usize - 1] = Some(stream); println!("worker {}:\tconnection from worker {}", my_index, identifier); } return Ok(results); }
MessageHeader
identifier_name
networking.rs
use std::io::{Read, Write, Result, BufRead, BufReader, BufWriter}; use std::fs::File; use std::net::{TcpListener, TcpStream}; use std::mem::size_of; use std::sync::Arc; use std::sync::mpsc::{Sender, Receiver, channel}; use std::thread; use std::thread::sleep_ms; use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt}; use communication::communicator::{Binary, Process}; use drain::DrainExt; // TODO : Much of this only relates to BinaryWriter/BinaryReader based communication, not networking. // TODO : Could be moved somewhere less networking-specific. #[derive(Copy, Clone)] pub struct MessageHeader { pub graph: u64, // graph identifier pub channel: u64, // index of channel pub source: u64, // index of worker sending message pub target: u64, // index of worker receiving message pub length: u64, // number of bytes in message } impl MessageHeader { // returns a header when there is enough supporting data fn try_read(bytes: &mut &[u8]) -> Option<MessageHeader> { if bytes.len() > size_of::<MessageHeader>() { // capture original in case we need to rewind let original = *bytes; // unclear what order struct initializers run in, so ... let graph = bytes.read_u64::<LittleEndian>().unwrap(); let channel = bytes.read_u64::<LittleEndian>().unwrap(); let source = bytes.read_u64::<LittleEndian>().unwrap(); let target = bytes.read_u64::<LittleEndian>().unwrap(); let length = bytes.read_u64::<LittleEndian>().unwrap(); if bytes.len() >= length as usize { Some(MessageHeader { graph: graph, channel: channel, source: source, target: target, length: length, }) } else { // rewind the reader *bytes = original; None } } else { None } } fn write_to<W: Write>(&self, writer: &mut W) -> Result<()> { try!(writer.write_u64::<LittleEndian>(self.graph)); try!(writer.write_u64::<LittleEndian>(self.channel)); try!(writer.write_u64::<LittleEndian>(self.source)); try!(writer.write_u64::<LittleEndian>(self.target)); try!(writer.write_u64::<LittleEndian>(self.length)); Ok(()) } } // // structure in charge of receiving data from a Reader, for example the network // struct BinaryReceiver<R: Read> { // reader: R, // the generic reader // buffer: Vec<u8>, // current working buffer // double: Vec<u8>, // second working buffer // staging: Vec<u8>, // 1 << 20 of buffer to read into // targets: Switchboard<(Sender<Vec<u8>>, Receiver<Vec<u8>>)>, // } // // impl<R: Read> BinaryReceiver<R> { // fn new(reader: R, channels: Receiver<((u64, u64, u64), (Sender<Vec<u8>>, Receiver<Vec<u8>>))>) -> BinaryReceiver<R> { // BinaryReceiver { // reader: reader, // buffer: Vec::new(), // double: Vec::new(), // staging: vec![0u8; 1 << 20], // targets: Switchboard::new(channels), // } // } // // fn recv_loop(&mut self) { // loop { // // // attempt to read some more bytes into our buffer // // TODO : We read in to self.staging because extending a Vec<u8> is hard without // // TODO : using set_len, which is unsafe. // // TODO : Could consider optimizing for the self.buffer.len() == 0 case, swapping // // TODO : self.staging with self.buffer, rather than using write_all. // let read = self.reader.read(&mut self.staging[..]).unwrap_or(0); // self.buffer.write_all(&self.staging[..read]).unwrap(); // <-- shouldn't fail // // { // // get a view of available bytes // let mut slice = &self.buffer[..]; // // while let Some(header) = MessageHeader::try_read(&mut slice) { // // let h_len = header.length as usize; // length in bytes // let target = self.targets.ensure(header.target, header.graph, header.channel); // let mut buffer = target.1.try_recv().unwrap_or(Vec::new()); // // buffer.clear(); // buffer.write_all(&slice[..h_len]).unwrap(); // slice = &slice[h_len..]; // // target.0.send(buffer).unwrap(); // } // // // TODO: way inefficient... =/ Fix! :D // // if slice.len() < self.buffer.len() { // self.double.clear(); // self.double.write_all(slice).unwrap(); // // } // } // // // if self.double.len() > 0 { // mem::swap(&mut self.buffer, &mut self.double); // // self.double.clear(); // // } // } // } // } // structure in charge of receiving data from a Reader, for example the network struct BinaryReceiver<R: Read> { reader: R, // the generic reader buffer: Vec<u8>, // current working buffer length: usize, targets: Switchboard<(Sender<Vec<u8>>, Receiver<Vec<u8>>)>, } impl<R: Read> BinaryReceiver<R> { fn new(reader: R, channels: Receiver<((u64, u64, u64), (Sender<Vec<u8>>, Receiver<Vec<u8>>))>) -> BinaryReceiver<R> { BinaryReceiver { reader: reader, buffer: vec![0u8; 1 << 20], length: 0, targets: Switchboard::new(channels), } } fn recv_loop(&mut self) { loop { // if we've mostly filled our buffer and still can't read a whole message from it,
// we'll need more space / to read more at once. let's double the buffer! if self.length >= self.buffer.len() / 2 { self.buffer.extend(::std::iter::repeat(0u8).take(self.length)); } // attempt to read some more bytes into our buffer let read = self.reader.read(&mut self.buffer[self.length..]).unwrap_or(0); self.length += read; let remaining = { let mut slice = &self.buffer[..self.length]; while let Some(header) = MessageHeader::try_read(&mut slice) { let h_len = header.length as usize; // length in bytes let target = &mut self.targets.ensure(header.target, header.graph, header.channel).0; target.send(slice[..h_len].to_vec()).unwrap(); slice = &slice[h_len..]; } slice.len() }; // we consumed bytes, must shift to beginning. // this should optimize to copy_overlapping; // would just do that if it weren't unsafe =/ if remaining < self.length { for index in 0..remaining { self.buffer[index] = self.buffer[index + self.length - remaining]; } self.length = remaining; } } } } // structure in charge of sending data to a Writer, for example the network struct BinarySender<W: Write> { id: u64, // destination process writer: W, sources: Receiver<(MessageHeader, Vec<u8>)>, returns: Switchboard<Sender<Vec<u8>>>, } impl<W: Write> BinarySender<W> { fn new(id: u64, writer: W, sources: Receiver<(MessageHeader, Vec<u8>)>, channels: Receiver<((u64, u64, u64), Sender<Vec<u8>>)>) -> BinarySender<W> { BinarySender { id: id, writer: writer, sources: sources, returns: Switchboard::new(channels), } } fn send_loop(&mut self) { let mut stash = Vec::new(); // block until data to recv while let Ok((header, buffer)) = self.sources.recv() { stash.push((header, buffer)); // collect any additional outstanding data to send while let Ok((header, buffer)) = self.sources.try_recv() { stash.push((header, buffer)); } // println!("send loop to process {}:\tstarting", self.id); for (mut header, mut buffer) in stash.drain_temp() { header.length = buffer.len() as u64; // <-- is this really our job? O.o header.write_to(&mut self.writer).unwrap(); self.writer.write_all(&buffer[..]).unwrap(); buffer.clear(); // self.returns.ensure(header.source, header.graph, header.channel).send(buffer).unwrap(); } self.writer.flush().unwrap(); // <-- because writer is buffered } } } struct Switchboard<T:Send> { source: Receiver<((u64, u64, u64), T)>, buffer: Vec<Vec<Vec<Option<T>>>>, } impl<T:Send> Switchboard<T> { pub fn new(source: Receiver<((u64, u64, u64), T)>) -> Switchboard<T> { Switchboard { source: source, buffer: Vec::new(), } } pub fn ensure(&mut self, a: u64, b: u64, c: u64) -> &mut T { let a = a as usize; let b = b as usize; let c = c as usize; while self.buffer.len() <= a { self.buffer.push(Vec::new()); } while self.buffer[a].len() <= b { self.buffer[a].push(Vec::new()); } while self.buffer[a][b].len() <= c { self.buffer[a][b].push(None); } while let None = self.buffer[a][b][c] { let ((x, y, z), s) = self.source.recv().unwrap(); let x = x as usize; let y = y as usize; let z = z as usize; while self.buffer.len() <= x { self.buffer.push(Vec::new()); } while self.buffer[x].len() <= y { self.buffer[x].push(Vec::new()); } while self.buffer[x][y].len() <= z { self.buffer[x][y].push(None); } self.buffer[x][y][z] = Some(s); } // we've just ensured that this is not None self.buffer[a][b][c].as_mut().unwrap() } } pub fn initialize_networking_from_file(filename: &str, my_index: u64, workers: u64) -> Result<Vec<Binary>> { let reader = BufReader::new(try!(File::open(filename))); let mut addresses = Vec::new(); for line in reader.lines() { addresses.push(try!(line)); } initialize_networking(addresses, my_index, workers) } pub fn initialize_networking(addresses: Vec<String>, my_index: u64, workers: u64) -> Result<Vec<Binary>> { let processes = addresses.len() as u64; let hosts1 = Arc::new(addresses); let hosts2 = hosts1.clone(); let start_task = thread::spawn(move || start_connections(hosts1, my_index)); let await_task = thread::spawn(move || await_connections(hosts2, my_index)); let mut results = try!(start_task.join().unwrap()); results.push(None); let mut to_extend = try!(await_task.join().unwrap()); results.extend(to_extend.drain_temp()); println!("worker {}:\tinitialization complete", my_index); let mut writers = Vec::new(); // handles to the BinarySenders (to present new channels) let mut readers = Vec::new(); // handles to the BinaryReceivers (to present new channels) let mut senders = Vec::new(); // destinations for serialized data (to send serialized data) // for each process, if a stream exists (i.e. not local) ... for index in (0..results.len()) { if let Some(stream) = results[index].take() { let (writer_channels_s, writer_channels_r) = channel(); let (reader_channels_s, reader_channels_r) = channel(); let (sender_channels_s, sender_channels_r) = channel(); writers.push(writer_channels_s); // readers.push(reader_channels_s); // senders.push(sender_channels_s); // let mut sender = BinarySender::new(index as u64, BufWriter::with_capacity(1 << 20, stream.try_clone().unwrap()), sender_channels_r, writer_channels_r); let mut recver = BinaryReceiver::new(stream.try_clone().unwrap(), reader_channels_r); // start senders and receivers associated with this stream thread::Builder::new().name(format!("send thread {}", index)) .spawn(move || sender.send_loop()) .unwrap(); thread::Builder::new().name(format!("recv thread {}", index)) .spawn(move || recver.recv_loop()) .unwrap(); } } let proc_comms = Process::new_vector(workers); let mut results = Vec::new(); for (index, proc_comm) in proc_comms.into_iter().enumerate() { results.push(Binary { inner: proc_comm, index: my_index * workers + index as u64, peers: workers * processes, graph: 0, // TODO : Fix this allocated: 0, writers: writers.clone(), readers: readers.clone(), senders: senders.clone(), }); } return Ok(results); } // result contains connections [0, my_index - 1]. fn start_connections(addresses: Arc<Vec<String>>, my_index: u64) -> Result<Vec<Option<TcpStream>>> { let mut results: Vec<_> = (0..my_index).map(|_| None).collect(); for index in (0..my_index) { let mut connected = false; while !connected { match TcpStream::connect(&addresses[index as usize][..]) { Ok(mut stream) => { try!(stream.write_u64::<LittleEndian>(my_index)); results[index as usize] = Some(stream); println!("worker {}:\tconnection to worker {}", my_index, index); connected = true; }, Err(error) => { println!("worker {}:\terror connecting to worker {}: {}; retrying", my_index, index, error); sleep_ms(1000); }, } } } return Ok(results); } // result contains connections [my_index + 1, addresses.len() - 1]. fn await_connections(addresses: Arc<Vec<String>>, my_index: u64) -> Result<Vec<Option<TcpStream>>> { let mut results: Vec<_> = (0..(addresses.len() - my_index as usize - 1)).map(|_| None).collect(); let listener = try!(TcpListener::bind(&addresses[my_index as usize][..])); for _ in (my_index as usize + 1 .. addresses.len()) { let mut stream = try!(listener.accept()).0; let identifier = try!(stream.read_u64::<LittleEndian>()) as usize; results[identifier - my_index as usize - 1] = Some(stream); println!("worker {}:\tconnection from worker {}", my_index, identifier); } return Ok(results); }
random_line_split
networking.rs
use std::io::{Read, Write, Result, BufRead, BufReader, BufWriter}; use std::fs::File; use std::net::{TcpListener, TcpStream}; use std::mem::size_of; use std::sync::Arc; use std::sync::mpsc::{Sender, Receiver, channel}; use std::thread; use std::thread::sleep_ms; use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt}; use communication::communicator::{Binary, Process}; use drain::DrainExt; // TODO : Much of this only relates to BinaryWriter/BinaryReader based communication, not networking. // TODO : Could be moved somewhere less networking-specific. #[derive(Copy, Clone)] pub struct MessageHeader { pub graph: u64, // graph identifier pub channel: u64, // index of channel pub source: u64, // index of worker sending message pub target: u64, // index of worker receiving message pub length: u64, // number of bytes in message } impl MessageHeader { // returns a header when there is enough supporting data fn try_read(bytes: &mut &[u8]) -> Option<MessageHeader> { if bytes.len() > size_of::<MessageHeader>() { // capture original in case we need to rewind let original = *bytes; // unclear what order struct initializers run in, so ... let graph = bytes.read_u64::<LittleEndian>().unwrap(); let channel = bytes.read_u64::<LittleEndian>().unwrap(); let source = bytes.read_u64::<LittleEndian>().unwrap(); let target = bytes.read_u64::<LittleEndian>().unwrap(); let length = bytes.read_u64::<LittleEndian>().unwrap(); if bytes.len() >= length as usize { Some(MessageHeader { graph: graph, channel: channel, source: source, target: target, length: length, }) } else { // rewind the reader *bytes = original; None } } else { None } } fn write_to<W: Write>(&self, writer: &mut W) -> Result<()> { try!(writer.write_u64::<LittleEndian>(self.graph)); try!(writer.write_u64::<LittleEndian>(self.channel)); try!(writer.write_u64::<LittleEndian>(self.source)); try!(writer.write_u64::<LittleEndian>(self.target)); try!(writer.write_u64::<LittleEndian>(self.length)); Ok(()) } } // // structure in charge of receiving data from a Reader, for example the network // struct BinaryReceiver<R: Read> { // reader: R, // the generic reader // buffer: Vec<u8>, // current working buffer // double: Vec<u8>, // second working buffer // staging: Vec<u8>, // 1 << 20 of buffer to read into // targets: Switchboard<(Sender<Vec<u8>>, Receiver<Vec<u8>>)>, // } // // impl<R: Read> BinaryReceiver<R> { // fn new(reader: R, channels: Receiver<((u64, u64, u64), (Sender<Vec<u8>>, Receiver<Vec<u8>>))>) -> BinaryReceiver<R> { // BinaryReceiver { // reader: reader, // buffer: Vec::new(), // double: Vec::new(), // staging: vec![0u8; 1 << 20], // targets: Switchboard::new(channels), // } // } // // fn recv_loop(&mut self) { // loop { // // // attempt to read some more bytes into our buffer // // TODO : We read in to self.staging because extending a Vec<u8> is hard without // // TODO : using set_len, which is unsafe. // // TODO : Could consider optimizing for the self.buffer.len() == 0 case, swapping // // TODO : self.staging with self.buffer, rather than using write_all. // let read = self.reader.read(&mut self.staging[..]).unwrap_or(0); // self.buffer.write_all(&self.staging[..read]).unwrap(); // <-- shouldn't fail // // { // // get a view of available bytes // let mut slice = &self.buffer[..]; // // while let Some(header) = MessageHeader::try_read(&mut slice) { // // let h_len = header.length as usize; // length in bytes // let target = self.targets.ensure(header.target, header.graph, header.channel); // let mut buffer = target.1.try_recv().unwrap_or(Vec::new()); // // buffer.clear(); // buffer.write_all(&slice[..h_len]).unwrap(); // slice = &slice[h_len..]; // // target.0.send(buffer).unwrap(); // } // // // TODO: way inefficient... =/ Fix! :D // // if slice.len() < self.buffer.len() { // self.double.clear(); // self.double.write_all(slice).unwrap(); // // } // } // // // if self.double.len() > 0 { // mem::swap(&mut self.buffer, &mut self.double); // // self.double.clear(); // // } // } // } // } // structure in charge of receiving data from a Reader, for example the network struct BinaryReceiver<R: Read> { reader: R, // the generic reader buffer: Vec<u8>, // current working buffer length: usize, targets: Switchboard<(Sender<Vec<u8>>, Receiver<Vec<u8>>)>, } impl<R: Read> BinaryReceiver<R> { fn new(reader: R, channels: Receiver<((u64, u64, u64), (Sender<Vec<u8>>, Receiver<Vec<u8>>))>) -> BinaryReceiver<R> { BinaryReceiver { reader: reader, buffer: vec![0u8; 1 << 20], length: 0, targets: Switchboard::new(channels), } } fn recv_loop(&mut self) { loop { // if we've mostly filled our buffer and still can't read a whole message from it, // we'll need more space / to read more at once. let's double the buffer! if self.length >= self.buffer.len() / 2 { self.buffer.extend(::std::iter::repeat(0u8).take(self.length)); } // attempt to read some more bytes into our buffer let read = self.reader.read(&mut self.buffer[self.length..]).unwrap_or(0); self.length += read; let remaining = { let mut slice = &self.buffer[..self.length]; while let Some(header) = MessageHeader::try_read(&mut slice) { let h_len = header.length as usize; // length in bytes let target = &mut self.targets.ensure(header.target, header.graph, header.channel).0; target.send(slice[..h_len].to_vec()).unwrap(); slice = &slice[h_len..]; } slice.len() }; // we consumed bytes, must shift to beginning. // this should optimize to copy_overlapping; // would just do that if it weren't unsafe =/ if remaining < self.length { for index in 0..remaining { self.buffer[index] = self.buffer[index + self.length - remaining]; } self.length = remaining; } } } } // structure in charge of sending data to a Writer, for example the network struct BinarySender<W: Write> { id: u64, // destination process writer: W, sources: Receiver<(MessageHeader, Vec<u8>)>, returns: Switchboard<Sender<Vec<u8>>>, } impl<W: Write> BinarySender<W> { fn new(id: u64, writer: W, sources: Receiver<(MessageHeader, Vec<u8>)>, channels: Receiver<((u64, u64, u64), Sender<Vec<u8>>)>) -> BinarySender<W> { BinarySender { id: id, writer: writer, sources: sources, returns: Switchboard::new(channels), } } fn send_loop(&mut self) { let mut stash = Vec::new(); // block until data to recv while let Ok((header, buffer)) = self.sources.recv() { stash.push((header, buffer)); // collect any additional outstanding data to send while let Ok((header, buffer)) = self.sources.try_recv() { stash.push((header, buffer)); } // println!("send loop to process {}:\tstarting", self.id); for (mut header, mut buffer) in stash.drain_temp() { header.length = buffer.len() as u64; // <-- is this really our job? O.o header.write_to(&mut self.writer).unwrap(); self.writer.write_all(&buffer[..]).unwrap(); buffer.clear(); // self.returns.ensure(header.source, header.graph, header.channel).send(buffer).unwrap(); } self.writer.flush().unwrap(); // <-- because writer is buffered } } } struct Switchboard<T:Send> { source: Receiver<((u64, u64, u64), T)>, buffer: Vec<Vec<Vec<Option<T>>>>, } impl<T:Send> Switchboard<T> { pub fn new(source: Receiver<((u64, u64, u64), T)>) -> Switchboard<T> { Switchboard { source: source, buffer: Vec::new(), } } pub fn ensure(&mut self, a: u64, b: u64, c: u64) -> &mut T { let a = a as usize; let b = b as usize; let c = c as usize; while self.buffer.len() <= a { self.buffer.push(Vec::new()); } while self.buffer[a].len() <= b { self.buffer[a].push(Vec::new()); } while self.buffer[a][b].len() <= c { self.buffer[a][b].push(None); } while let None = self.buffer[a][b][c] { let ((x, y, z), s) = self.source.recv().unwrap(); let x = x as usize; let y = y as usize; let z = z as usize; while self.buffer.len() <= x { self.buffer.push(Vec::new()); } while self.buffer[x].len() <= y { self.buffer[x].push(Vec::new()); } while self.buffer[x][y].len() <= z { self.buffer[x][y].push(None); } self.buffer[x][y][z] = Some(s); } // we've just ensured that this is not None self.buffer[a][b][c].as_mut().unwrap() } } pub fn initialize_networking_from_file(filename: &str, my_index: u64, workers: u64) -> Result<Vec<Binary>> { let reader = BufReader::new(try!(File::open(filename))); let mut addresses = Vec::new(); for line in reader.lines() { addresses.push(try!(line)); } initialize_networking(addresses, my_index, workers) } pub fn initialize_networking(addresses: Vec<String>, my_index: u64, workers: u64) -> Result<Vec<Binary>> { let processes = addresses.len() as u64; let hosts1 = Arc::new(addresses); let hosts2 = hosts1.clone(); let start_task = thread::spawn(move || start_connections(hosts1, my_index)); let await_task = thread::spawn(move || await_connections(hosts2, my_index)); let mut results = try!(start_task.join().unwrap()); results.push(None); let mut to_extend = try!(await_task.join().unwrap()); results.extend(to_extend.drain_temp()); println!("worker {}:\tinitialization complete", my_index); let mut writers = Vec::new(); // handles to the BinarySenders (to present new channels) let mut readers = Vec::new(); // handles to the BinaryReceivers (to present new channels) let mut senders = Vec::new(); // destinations for serialized data (to send serialized data) // for each process, if a stream exists (i.e. not local) ... for index in (0..results.len()) { if let Some(stream) = results[index].take() { let (writer_channels_s, writer_channels_r) = channel(); let (reader_channels_s, reader_channels_r) = channel(); let (sender_channels_s, sender_channels_r) = channel(); writers.push(writer_channels_s); // readers.push(reader_channels_s); // senders.push(sender_channels_s); // let mut sender = BinarySender::new(index as u64, BufWriter::with_capacity(1 << 20, stream.try_clone().unwrap()), sender_channels_r, writer_channels_r); let mut recver = BinaryReceiver::new(stream.try_clone().unwrap(), reader_channels_r); // start senders and receivers associated with this stream thread::Builder::new().name(format!("send thread {}", index)) .spawn(move || sender.send_loop()) .unwrap(); thread::Builder::new().name(format!("recv thread {}", index)) .spawn(move || recver.recv_loop()) .unwrap(); } } let proc_comms = Process::new_vector(workers); let mut results = Vec::new(); for (index, proc_comm) in proc_comms.into_iter().enumerate() { results.push(Binary { inner: proc_comm, index: my_index * workers + index as u64, peers: workers * processes, graph: 0, // TODO : Fix this allocated: 0, writers: writers.clone(), readers: readers.clone(), senders: senders.clone(), }); } return Ok(results); } // result contains connections [0, my_index - 1]. fn start_connections(addresses: Arc<Vec<String>>, my_index: u64) -> Result<Vec<Option<TcpStream>>> { let mut results: Vec<_> = (0..my_index).map(|_| None).collect(); for index in (0..my_index) { let mut connected = false; while !connected { match TcpStream::connect(&addresses[index as usize][..]) { Ok(mut stream) => { try!(stream.write_u64::<LittleEndian>(my_index)); results[index as usize] = Some(stream); println!("worker {}:\tconnection to worker {}", my_index, index); connected = true; }, Err(error) => { println!("worker {}:\terror connecting to worker {}: {}; retrying", my_index, index, error); sleep_ms(1000); }, } } } return Ok(results); } // result contains connections [my_index + 1, addresses.len() - 1]. fn await_connections(addresses: Arc<Vec<String>>, my_index: u64) -> Result<Vec<Option<TcpStream>>>
{ let mut results: Vec<_> = (0..(addresses.len() - my_index as usize - 1)).map(|_| None).collect(); let listener = try!(TcpListener::bind(&addresses[my_index as usize][..])); for _ in (my_index as usize + 1 .. addresses.len()) { let mut stream = try!(listener.accept()).0; let identifier = try!(stream.read_u64::<LittleEndian>()) as usize; results[identifier - my_index as usize - 1] = Some(stream); println!("worker {}:\tconnection from worker {}", my_index, identifier); } return Ok(results); }
identifier_body
traits.rs
use core::marker::Sized; use embedded_hal::{ blocking::{delay::*, spi::Write}, digital::v2::*, }; /// All commands need to have this trait which gives the address of the command /// which needs to be send via SPI with activated CommandsPin (Data/Command Pin in CommandMode) pub(crate) trait Command: Copy { fn address(self) -> u8; } /// Seperates the different LUT for the Display Refresh process #[derive(Debug, Clone, PartialEq, Eq, Copy, Default)] pub enum RefreshLut { /// The "normal" full Lookuptable for the Refresh-Sequence #[default] Full, /// The quick LUT where not the full refresh sequence is followed. /// This might lead to some Quick, } pub(crate) trait InternalWiAdditions<SPI, CS, BUSY, DC, RST, DELAY> where SPI: Write<u8>, CS: OutputPin, BUSY: InputPin, DC: OutputPin, RST: OutputPin, DELAY: DelayUs<u32>, { /// This initialises the EPD and powers it up /// /// This function is already called from /// - [new()](WaveshareDisplay::new()) /// - [`wake_up`] /// /// /// This function calls [reset](WaveshareDisplay::reset), /// so you don't need to call reset your self when trying to wake your device up /// after setting it to sleep. fn init(&mut self, spi: &mut SPI, delay: &mut DELAY) -> Result<(), SPI::Error>; } /// Functions to interact with three color panels pub trait WaveshareThreeColorDisplay<SPI, CS, BUSY, DC, RST, DELAY>: WaveshareDisplay<SPI, CS, BUSY, DC, RST, DELAY> where SPI: Write<u8>, CS: OutputPin, BUSY: InputPin, DC: OutputPin, RST: OutputPin, DELAY: DelayUs<u32>, { /// Transmit data to the SRAM of the EPD /// /// Updates both the black and the secondary color layers fn update_color_frame( &mut self, spi: &mut SPI, delay: &mut DELAY, black: &[u8], chromatic: &[u8], ) -> Result<(), SPI::Error>; /// Update only the black/white data of the display. /// /// This must be finished by calling `update_chromatic_frame`. fn update_achromatic_frame( &mut self, spi: &mut SPI, delay: &mut DELAY, black: &[u8], ) -> Result<(), SPI::Error>; /// Update only the chromatic data of the display. /// /// This should be preceded by a call to `update_achromatic_frame`. /// This data takes precedence over the black/white data. fn update_chromatic_frame( &mut self, spi: &mut SPI, delay: &mut DELAY, chromatic: &[u8], ) -> Result<(), SPI::Error>; } /// All the functions to interact with the EPDs /// /// This trait includes all public functions to use the EPDs /// /// # Example /// ///```rust, no_run ///# use embedded_hal_mock::*; ///# fn main() -> Result<(), MockError> { ///use embedded_graphics::{ /// pixelcolor::BinaryColor::On as Black, prelude::*, primitives::{Line, PrimitiveStyle}, ///}; ///use epd_waveshare::{epd4in2::*, prelude::*}; ///# ///# let expectations = []; ///# let mut spi = spi::Mock::new(&expectations); ///# let expectations = []; ///# let cs_pin = pin::Mock::new(&expectations); ///# let busy_in = pin::Mock::new(&expectations); ///# let dc = pin::Mock::new(&expectations); ///# let rst = pin::Mock::new(&expectations); ///# let mut delay = delay::MockNoop::new(); /// ///// Setup EPD ///let mut epd = Epd4in2::new(&mut spi, cs_pin, busy_in, dc, rst, &mut delay, None)?; /// ///// Use display graphics from embedded-graphics ///let mut display = Display4in2::default(); /// ///// Use embedded graphics for drawing a line /// ///let _ = Line::new(Point::new(0, 120), Point::new(0, 295)) /// .into_styled(PrimitiveStyle::with_stroke(Color::Black, 1)) /// .draw(&mut display); /// /// // Display updated frame ///epd.update_frame(&mut spi, &display.buffer(), &mut delay)?; ///epd.display_frame(&mut spi, &mut delay)?; /// ///// Set the EPD to sleep ///epd.sleep(&mut spi, &mut delay)?; ///# Ok(()) ///# } ///``` pub trait WaveshareDisplay<SPI, CS, BUSY, DC, RST, DELAY> where SPI: Write<u8>, CS: OutputPin, BUSY: InputPin, DC: OutputPin, RST: OutputPin, DELAY: DelayUs<u32>, { /// The Color Type used by the Display type DisplayColor; /// Creates a new driver from a SPI peripheral, CS Pin, Busy InputPin, DC /// /// `delay_us` is the number of us the idle loop should sleep on. /// Setting it to 0 implies busy waiting. /// Setting it to None means a default value is used. /// /// This already initialises the device. fn new( spi: &mut SPI, cs: CS, busy: BUSY, dc: DC, rst: RST, delay: &mut DELAY, delay_us: Option<u32>, ) -> Result<Self, SPI::Error> where Self: Sized; /// Let the device enter deep-sleep mode to save power. /// /// The deep sleep mode returns to standby with a hardware reset. fn sleep(&mut self, spi: &mut SPI, delay: &mut DELAY) -> Result<(), SPI::Error>; /// Wakes the device up from sleep /// /// Also reintialises the device if necessary. fn wake_up(&mut self, spi: &mut SPI, delay: &mut DELAY) -> Result<(), SPI::Error>; /// Sets the backgroundcolor for various commands like [clear_frame](WaveshareDisplay::clear_frame) fn set_background_color(&mut self, color: Self::DisplayColor); /// Get current background color fn background_color(&self) -> &Self::DisplayColor; /// Get the width of the display fn width(&self) -> u32; /// Get the height of the display fn height(&self) -> u32; /// Transmit a full frame to the SRAM of the EPD fn update_frame( &mut self, spi: &mut SPI, buffer: &[u8], delay: &mut DELAY, ) -> Result<(), SPI::Error>; /// Transmits partial data to the SRAM of the EPD /// /// (x,y) is the top left corner /// /// BUFFER needs to be of size: width / 8 * height ! #[allow(clippy::too_many_arguments)] fn update_partial_frame( &mut self, spi: &mut SPI, delay: &mut DELAY, buffer: &[u8], x: u32, y: u32, width: u32, height: u32, ) -> Result<(), SPI::Error>; /// Displays the frame data from SRAM /// /// This function waits until the device isn`t busy anymore fn display_frame(&mut self, spi: &mut SPI, delay: &mut DELAY) -> Result<(), SPI::Error>; /// Provide a combined update&display and save some time (skipping a busy check in between) fn update_and_display_frame( &mut self, spi: &mut SPI, buffer: &[u8], delay: &mut DELAY, ) -> Result<(), SPI::Error>; /// Clears the frame buffer on the EPD with the declared background color /// /// The background color can be changed with [`WaveshareDisplay::set_background_color`] fn clear_frame(&mut self, spi: &mut SPI, delay: &mut DELAY) -> Result<(), SPI::Error>; /// Trait for using various Waveforms from different LUTs /// E.g. for partial refreshes /// /// A full refresh is needed after a certain amount of quick refreshes! /// /// WARNING: Quick Refresh might lead to ghosting-effects/problems with your display. Especially for the 4.2in Display! /// /// If None is used the old value will be loaded on the LUTs once more fn set_lut( &mut self, spi: &mut SPI, delay: &mut DELAY, refresh_rate: Option<RefreshLut>, ) -> Result<(), SPI::Error>; /// Wait until the display has stopped processing data /// /// You can call this to make sure a frame is displayed before goin further fn wait_until_idle(&mut self, spi: &mut SPI, delay: &mut DELAY) -> Result<(), SPI::Error>; } /// Allows quick refresh support for displays that support it; lets you send both /// old and new frame data to support this. /// /// When using the quick refresh look-up table, the display must receive separate display /// buffer data marked as old, and new. This is used to determine which pixels need to change, /// and how they will change. This isn't required when using full refreshes. /// /// (todo: Example ommitted due to CI failures.) /// Example: ///```rust, no_run ///# use embedded_hal_mock::*; ///# fn main() -> Result<(), MockError> { ///# use embedded_graphics::{ ///# pixelcolor::BinaryColor::On as Black, prelude::*, primitives::{Line, PrimitiveStyle}, ///# }; ///# use epd_waveshare::{epd4in2::*, prelude::*}; ///# use epd_waveshare::graphics::VarDisplay; ///# ///# let expectations = []; ///# let mut spi = spi::Mock::new(&expectations); ///# let expectations = []; ///# let cs_pin = pin::Mock::new(&expectations); ///# let busy_in = pin::Mock::new(&expectations); ///# let dc = pin::Mock::new(&expectations); ///# let rst = pin::Mock::new(&expectations); ///# let mut delay = delay::MockNoop::new(); ///# ///# // Setup EPD ///# let mut epd = Epd4in2::new(&mut spi, cs_pin, busy_in, dc, rst, &mut delay, None)?; ///let (x, y, frame_width, frame_height) = (20, 40, 80,80); /// ///let mut buffer = [DEFAULT_BACKGROUND_COLOR.get_byte_value(); 80 / 8 * 80]; ///let mut display = VarDisplay::new(frame_width, frame_height, &mut buffer,false).unwrap(); /// ///epd.update_partial_old_frame(&mut spi, &mut delay, display.buffer(), x, y, frame_width, frame_height) /// .ok(); /// ///display.clear(Color::White).ok(); ///// Execute drawing commands here. /// ///epd.update_partial_new_frame(&mut spi, &mut delay, display.buffer(), x, y, frame_width, frame_height) /// .ok(); ///# Ok(()) ///# } ///``` pub trait QuickRefresh<SPI, CS, BUSY, DC, RST, DELAY> where SPI: Write<u8>, CS: OutputPin, BUSY: InputPin, DC: OutputPin, RST: OutputPin, DELAY: DelayUs<u32>, { /// Updates the old frame. fn update_old_frame( &mut self, spi: &mut SPI, buffer: &[u8], delay: &mut DELAY, ) -> Result<(), SPI::Error>; /// Updates the new frame. fn update_new_frame( &mut self, spi: &mut SPI, buffer: &[u8], delay: &mut DELAY, ) -> Result<(), SPI::Error>;
/// Updates and displays the new frame. fn update_and_display_new_frame( &mut self, spi: &mut SPI, buffer: &[u8], delay: &mut DELAY, ) -> Result<(), SPI::Error>; /// Updates the old frame for a portion of the display. #[allow(clippy::too_many_arguments)] fn update_partial_old_frame( &mut self, spi: &mut SPI, delay: &mut DELAY, buffer: &[u8], x: u32, y: u32, width: u32, height: u32, ) -> Result<(), SPI::Error>; /// Updates the new frame for a portion of the display. #[allow(clippy::too_many_arguments)] fn update_partial_new_frame( &mut self, spi: &mut SPI, delay: &mut DELAY, buffer: &[u8], x: u32, y: u32, width: u32, height: u32, ) -> Result<(), SPI::Error>; /// Clears the partial frame buffer on the EPD with the declared background color /// The background color can be changed with [`WaveshareDisplay::set_background_color`] fn clear_partial_frame( &mut self, spi: &mut SPI, delay: &mut DELAY, x: u32, y: u32, width: u32, height: u32, ) -> Result<(), SPI::Error>; }
/// Displays the new frame fn display_new_frame(&mut self, spi: &mut SPI, _delay: &mut DELAY) -> Result<(), SPI::Error>;
random_line_split
traits.rs
use core::marker::Sized; use embedded_hal::{ blocking::{delay::*, spi::Write}, digital::v2::*, }; /// All commands need to have this trait which gives the address of the command /// which needs to be send via SPI with activated CommandsPin (Data/Command Pin in CommandMode) pub(crate) trait Command: Copy { fn address(self) -> u8; } /// Seperates the different LUT for the Display Refresh process #[derive(Debug, Clone, PartialEq, Eq, Copy, Default)] pub enum
{ /// The "normal" full Lookuptable for the Refresh-Sequence #[default] Full, /// The quick LUT where not the full refresh sequence is followed. /// This might lead to some Quick, } pub(crate) trait InternalWiAdditions<SPI, CS, BUSY, DC, RST, DELAY> where SPI: Write<u8>, CS: OutputPin, BUSY: InputPin, DC: OutputPin, RST: OutputPin, DELAY: DelayUs<u32>, { /// This initialises the EPD and powers it up /// /// This function is already called from /// - [new()](WaveshareDisplay::new()) /// - [`wake_up`] /// /// /// This function calls [reset](WaveshareDisplay::reset), /// so you don't need to call reset your self when trying to wake your device up /// after setting it to sleep. fn init(&mut self, spi: &mut SPI, delay: &mut DELAY) -> Result<(), SPI::Error>; } /// Functions to interact with three color panels pub trait WaveshareThreeColorDisplay<SPI, CS, BUSY, DC, RST, DELAY>: WaveshareDisplay<SPI, CS, BUSY, DC, RST, DELAY> where SPI: Write<u8>, CS: OutputPin, BUSY: InputPin, DC: OutputPin, RST: OutputPin, DELAY: DelayUs<u32>, { /// Transmit data to the SRAM of the EPD /// /// Updates both the black and the secondary color layers fn update_color_frame( &mut self, spi: &mut SPI, delay: &mut DELAY, black: &[u8], chromatic: &[u8], ) -> Result<(), SPI::Error>; /// Update only the black/white data of the display. /// /// This must be finished by calling `update_chromatic_frame`. fn update_achromatic_frame( &mut self, spi: &mut SPI, delay: &mut DELAY, black: &[u8], ) -> Result<(), SPI::Error>; /// Update only the chromatic data of the display. /// /// This should be preceded by a call to `update_achromatic_frame`. /// This data takes precedence over the black/white data. fn update_chromatic_frame( &mut self, spi: &mut SPI, delay: &mut DELAY, chromatic: &[u8], ) -> Result<(), SPI::Error>; } /// All the functions to interact with the EPDs /// /// This trait includes all public functions to use the EPDs /// /// # Example /// ///```rust, no_run ///# use embedded_hal_mock::*; ///# fn main() -> Result<(), MockError> { ///use embedded_graphics::{ /// pixelcolor::BinaryColor::On as Black, prelude::*, primitives::{Line, PrimitiveStyle}, ///}; ///use epd_waveshare::{epd4in2::*, prelude::*}; ///# ///# let expectations = []; ///# let mut spi = spi::Mock::new(&expectations); ///# let expectations = []; ///# let cs_pin = pin::Mock::new(&expectations); ///# let busy_in = pin::Mock::new(&expectations); ///# let dc = pin::Mock::new(&expectations); ///# let rst = pin::Mock::new(&expectations); ///# let mut delay = delay::MockNoop::new(); /// ///// Setup EPD ///let mut epd = Epd4in2::new(&mut spi, cs_pin, busy_in, dc, rst, &mut delay, None)?; /// ///// Use display graphics from embedded-graphics ///let mut display = Display4in2::default(); /// ///// Use embedded graphics for drawing a line /// ///let _ = Line::new(Point::new(0, 120), Point::new(0, 295)) /// .into_styled(PrimitiveStyle::with_stroke(Color::Black, 1)) /// .draw(&mut display); /// /// // Display updated frame ///epd.update_frame(&mut spi, &display.buffer(), &mut delay)?; ///epd.display_frame(&mut spi, &mut delay)?; /// ///// Set the EPD to sleep ///epd.sleep(&mut spi, &mut delay)?; ///# Ok(()) ///# } ///``` pub trait WaveshareDisplay<SPI, CS, BUSY, DC, RST, DELAY> where SPI: Write<u8>, CS: OutputPin, BUSY: InputPin, DC: OutputPin, RST: OutputPin, DELAY: DelayUs<u32>, { /// The Color Type used by the Display type DisplayColor; /// Creates a new driver from a SPI peripheral, CS Pin, Busy InputPin, DC /// /// `delay_us` is the number of us the idle loop should sleep on. /// Setting it to 0 implies busy waiting. /// Setting it to None means a default value is used. /// /// This already initialises the device. fn new( spi: &mut SPI, cs: CS, busy: BUSY, dc: DC, rst: RST, delay: &mut DELAY, delay_us: Option<u32>, ) -> Result<Self, SPI::Error> where Self: Sized; /// Let the device enter deep-sleep mode to save power. /// /// The deep sleep mode returns to standby with a hardware reset. fn sleep(&mut self, spi: &mut SPI, delay: &mut DELAY) -> Result<(), SPI::Error>; /// Wakes the device up from sleep /// /// Also reintialises the device if necessary. fn wake_up(&mut self, spi: &mut SPI, delay: &mut DELAY) -> Result<(), SPI::Error>; /// Sets the backgroundcolor for various commands like [clear_frame](WaveshareDisplay::clear_frame) fn set_background_color(&mut self, color: Self::DisplayColor); /// Get current background color fn background_color(&self) -> &Self::DisplayColor; /// Get the width of the display fn width(&self) -> u32; /// Get the height of the display fn height(&self) -> u32; /// Transmit a full frame to the SRAM of the EPD fn update_frame( &mut self, spi: &mut SPI, buffer: &[u8], delay: &mut DELAY, ) -> Result<(), SPI::Error>; /// Transmits partial data to the SRAM of the EPD /// /// (x,y) is the top left corner /// /// BUFFER needs to be of size: width / 8 * height ! #[allow(clippy::too_many_arguments)] fn update_partial_frame( &mut self, spi: &mut SPI, delay: &mut DELAY, buffer: &[u8], x: u32, y: u32, width: u32, height: u32, ) -> Result<(), SPI::Error>; /// Displays the frame data from SRAM /// /// This function waits until the device isn`t busy anymore fn display_frame(&mut self, spi: &mut SPI, delay: &mut DELAY) -> Result<(), SPI::Error>; /// Provide a combined update&display and save some time (skipping a busy check in between) fn update_and_display_frame( &mut self, spi: &mut SPI, buffer: &[u8], delay: &mut DELAY, ) -> Result<(), SPI::Error>; /// Clears the frame buffer on the EPD with the declared background color /// /// The background color can be changed with [`WaveshareDisplay::set_background_color`] fn clear_frame(&mut self, spi: &mut SPI, delay: &mut DELAY) -> Result<(), SPI::Error>; /// Trait for using various Waveforms from different LUTs /// E.g. for partial refreshes /// /// A full refresh is needed after a certain amount of quick refreshes! /// /// WARNING: Quick Refresh might lead to ghosting-effects/problems with your display. Especially for the 4.2in Display! /// /// If None is used the old value will be loaded on the LUTs once more fn set_lut( &mut self, spi: &mut SPI, delay: &mut DELAY, refresh_rate: Option<RefreshLut>, ) -> Result<(), SPI::Error>; /// Wait until the display has stopped processing data /// /// You can call this to make sure a frame is displayed before goin further fn wait_until_idle(&mut self, spi: &mut SPI, delay: &mut DELAY) -> Result<(), SPI::Error>; } /// Allows quick refresh support for displays that support it; lets you send both /// old and new frame data to support this. /// /// When using the quick refresh look-up table, the display must receive separate display /// buffer data marked as old, and new. This is used to determine which pixels need to change, /// and how they will change. This isn't required when using full refreshes. /// /// (todo: Example ommitted due to CI failures.) /// Example: ///```rust, no_run ///# use embedded_hal_mock::*; ///# fn main() -> Result<(), MockError> { ///# use embedded_graphics::{ ///# pixelcolor::BinaryColor::On as Black, prelude::*, primitives::{Line, PrimitiveStyle}, ///# }; ///# use epd_waveshare::{epd4in2::*, prelude::*}; ///# use epd_waveshare::graphics::VarDisplay; ///# ///# let expectations = []; ///# let mut spi = spi::Mock::new(&expectations); ///# let expectations = []; ///# let cs_pin = pin::Mock::new(&expectations); ///# let busy_in = pin::Mock::new(&expectations); ///# let dc = pin::Mock::new(&expectations); ///# let rst = pin::Mock::new(&expectations); ///# let mut delay = delay::MockNoop::new(); ///# ///# // Setup EPD ///# let mut epd = Epd4in2::new(&mut spi, cs_pin, busy_in, dc, rst, &mut delay, None)?; ///let (x, y, frame_width, frame_height) = (20, 40, 80,80); /// ///let mut buffer = [DEFAULT_BACKGROUND_COLOR.get_byte_value(); 80 / 8 * 80]; ///let mut display = VarDisplay::new(frame_width, frame_height, &mut buffer,false).unwrap(); /// ///epd.update_partial_old_frame(&mut spi, &mut delay, display.buffer(), x, y, frame_width, frame_height) /// .ok(); /// ///display.clear(Color::White).ok(); ///// Execute drawing commands here. /// ///epd.update_partial_new_frame(&mut spi, &mut delay, display.buffer(), x, y, frame_width, frame_height) /// .ok(); ///# Ok(()) ///# } ///``` pub trait QuickRefresh<SPI, CS, BUSY, DC, RST, DELAY> where SPI: Write<u8>, CS: OutputPin, BUSY: InputPin, DC: OutputPin, RST: OutputPin, DELAY: DelayUs<u32>, { /// Updates the old frame. fn update_old_frame( &mut self, spi: &mut SPI, buffer: &[u8], delay: &mut DELAY, ) -> Result<(), SPI::Error>; /// Updates the new frame. fn update_new_frame( &mut self, spi: &mut SPI, buffer: &[u8], delay: &mut DELAY, ) -> Result<(), SPI::Error>; /// Displays the new frame fn display_new_frame(&mut self, spi: &mut SPI, _delay: &mut DELAY) -> Result<(), SPI::Error>; /// Updates and displays the new frame. fn update_and_display_new_frame( &mut self, spi: &mut SPI, buffer: &[u8], delay: &mut DELAY, ) -> Result<(), SPI::Error>; /// Updates the old frame for a portion of the display. #[allow(clippy::too_many_arguments)] fn update_partial_old_frame( &mut self, spi: &mut SPI, delay: &mut DELAY, buffer: &[u8], x: u32, y: u32, width: u32, height: u32, ) -> Result<(), SPI::Error>; /// Updates the new frame for a portion of the display. #[allow(clippy::too_many_arguments)] fn update_partial_new_frame( &mut self, spi: &mut SPI, delay: &mut DELAY, buffer: &[u8], x: u32, y: u32, width: u32, height: u32, ) -> Result<(), SPI::Error>; /// Clears the partial frame buffer on the EPD with the declared background color /// The background color can be changed with [`WaveshareDisplay::set_background_color`] fn clear_partial_frame( &mut self, spi: &mut SPI, delay: &mut DELAY, x: u32, y: u32, width: u32, height: u32, ) -> Result<(), SPI::Error>; }
RefreshLut
identifier_name
base.go
package controllers import ( "encoding/json" "fmt" "strconv" "time" "github.com/astaxie/beego" "github.com/astaxie/beego/orm" "github.com/bitly/go-simplejson" "github.com/juju/errors" "seater/models" "seater/schema" ) // Define controller constants const ( TimestampLayout = time.RFC3339 PeriodDay = "day" PeriodHour = "hour" PeriodWeek = "week" ) type msgBody struct { Msg string `json:"msg"` } // SeaterController defines the base controller type SeaterController struct { beego.Controller model *models.SeaterModel orm orm.Ormer deferrers []deferrer errs []error pagingResult models.QueryParams } // Prepare prepares controller context func (c *SeaterController) Prepare() { model, err := models.NewModel() if err != nil { c.TraceServerError(errors.Annotatef(err, "failed to init model")) } if err = model.Begin(); err != nil { c.TraceServerError(errors.Annotatef(err, "failed to begin database transaction")) } c.model = model c.orm = model.Orm() c.pagingResult = models.NewQueryParams() } // Finish ends transaction func (c *SeaterController) Finish() { defer c.execDeferrers() err := c.endTransaction() if err != nil { c.TraceServerError(errors.Annotatef(err, "failed to end transaction")) } } // M returns the model object func (c *SeaterController) M() *models.SeaterModel { return c.model } type deferrer func() error func (c *SeaterController) deferExec(f deferrer) { c.deferrers = append(c.deferrers, f) } // Code sets the response status func (c *SeaterController) Code(code int) { c.Ctx.Output.SetStatus(code) } func (c *SeaterController) execDeferrers() { var err error for i := len(c.deferrers) - 1; i >= 0; i-- { err = c.deferrers[i]() if err != nil { c.errs = append(c.errs, err) } } } func (c *SeaterController) traceJSONAbort(err error, code int, args ...string) { c.jsonAbort(code, args...) } // jsonAbort trace and abort error func (c *SeaterController) jsonAbort(code int, args ...string) { defer c.execDeferrers() c.Header("Content-Type", "application/json; charset=utf-8") var msg string if len(args) == 0 || args[0] == "" { switch code { case 400: msg = "Bad Request" case 401: msg = "Unauthorized" case 404: msg = "Resource Not Found" case 409: msg = "Conflict" case 500: msg = "Server Error" default: msg = "" } } else { msg = args[0] } c.addError(fmt.Errorf(msg)) err := c.endTransaction() if err != nil { code = 500 msg = "Server Error" } body, err := json.Marshal(msgBody{Msg: msg}) if err != nil { c.CustomAbort(500, `{"msg": "Unknown Error"}`) } c.CustomAbort(code, string(body)) } // BadRequestf returns bad request response with formatted message func (c *SeaterController) BadRequestf(format string, args ...interface{}) { c.TraceBadRequestf(nil, format, args...) } // TraceBadRequestf traces error and returns bad request response with formatted message func (c *SeaterController) TraceBadRequestf(err error, format string, args ...interface{}) { msg := fmt.Sprintf(format, args...) c.traceJSONAbort(nil, 400, msg) } // TraceServerError traces error and returns server error func (c *SeaterController)
(err error) { c.traceJSONAbort(err, 500) } // Forbiddenf returns forbidden response with formatted message func (c *SeaterController) Forbiddenf(format string, args ...interface{}) { c.TraceForbiddenf(nil, format, args...) } // TraceForbiddenf traces error and returns forbidden response with formatted message func (c *SeaterController) TraceForbiddenf(err error, format string, args ...interface{}) { msg := fmt.Sprintf(format, args...) c.traceJSONAbort(err, 403, msg) } // NotFoundf returns not found response with formatted message func (c *SeaterController) NotFoundf(format string, args ...interface{}) { c.TraceNotFoundf(nil, format, args...) } // TraceNotFoundf traces error and returns not found response with formatted message func (c *SeaterController) TraceNotFoundf(err error, format string, args ...interface{}) { msg := fmt.Sprintf(format, args...) c.traceJSONAbort(err, 404, msg) } // Conflictf returns conflict response with formatted message func (c *SeaterController) Conflictf(format string, args ...interface{}) { c.TraceConflictf(nil, format, args...) } // TraceConflictf traces error and returns conflict response with formatted message func (c *SeaterController) TraceConflictf(err error, format string, args ...interface{}) { msg := fmt.Sprintf(format, args...) c.traceJSONAbort(err, 409, msg) } // Unauthorizedf returns authorized response with formatted message func (c *SeaterController) Unauthorizedf(format string, args ...interface{}) { c.TraceUnauthorizedf(nil, format, args...) } // TraceUnauthorizedf traces error and returns authorized reponse with formatted message func (c *SeaterController) TraceUnauthorizedf(err error, format string, args ...interface{}) { msg := fmt.Sprintf(format, args...) c.traceJSONAbort(err, 401, msg) } func (c *SeaterController) addError(err error) { c.errs = append(c.errs, err) } // jsonResp serves json response func (c *SeaterController) jsonResp(data interface{}) { if obj, ok := data.(*simplejson.Json); ok { data = obj.Interface() } paging := c.getPagingResult() if paging != nil { bytes, err := json.Marshal(data) if err != nil { err = errors.Annotatef(err, "failed to marshal resp interface") c.TraceServerError(err) } j, err := simplejson.NewJson(bytes) if err != nil { err = errors.Annotatef(err, "failed to unmarshal resp bytes") c.TraceServerError(err) } j.Set("paging", paging) data = j.Interface() } c.Data["json"] = data c.ServeJSON() } // OK response 200 OK with json data func (c *SeaterController) OK(data interface{}) { c.Code(200) c.jsonResp(data) } // Accepted response an asynchronous resource func (c *SeaterController) Accepted(data interface{}) { c.Code(202) c.jsonResp(data) } // Created response an asynchronous resource func (c *SeaterController) Created(data interface{}) { c.Code(201) c.jsonResp(data) } // NoContent responses with code 204 func (c *SeaterController) NoContent(code ...int) { if len(code) > 0 { c.Code(code[0]) } else { c.Code(204) } c.Ctx.Output.Body([]byte("")) } // Validate validates with json schema func (c *SeaterController) Validate(sche string, document ...string) { var doc string if len(document) > 0 { doc = document[0] } else { doc = string(c.Ctx.Input.RequestBody) if len(doc) == 0 { c.BadRequestf("request body is empty") } } _, err := simplejson.NewJson([]byte(doc)) if err != nil { c.BadRequestf("invalid json format") } result, err := schema.Validate(sche, doc) if err != nil { c.TraceServerError(errors.Annotatef(err, "invalid schema")) } if !result.Valid() { s := "invalid parameters:\n" var e interface{} for _, err := range result.Errors() { s += fmt.Sprintf("%s\n", err) e = err } c.BadRequestf("%s", e) } } func (c *SeaterController) getInt64(key string, defs ...int64) (v int64, ok bool) { if strv := c.Ctx.Input.Query(key); strv != "" { val, err := strconv.ParseInt(strv, 10, 64) if err != nil { c.BadRequestf("invalid int64 argument %s: %s", key, strv) } return val, true } return } func (c *SeaterController) getString(key string, defs ...string) (v string, ok bool) { if v = c.Ctx.Input.Query(key); v != "" { return v, true } if len(defs) > 0 { return defs[0], false } return "", false } // getTime return input as an time and the existence of the input func (c *SeaterController) getTime(key string, defs ...time.Time) (v time.Time, ok bool) { if strv := c.Ctx.Input.Query(key); strv != "" { val, err := time.Parse(TimestampLayout, strv) if err != nil { c.BadRequestf("invalid time argument %s: %s", key, strv) } return val, true } else if len(defs) > 0 { v = defs[0] return } return } // Header get or set a header if value is provided func (c *SeaterController) Header(key string, value ...interface{}) string { if len(value) == 0 { return c.Ctx.Input.Header(key) } retval := fmt.Sprintf("%v", value[0]) c.Ctx.Output.Header(key, retval) return retval } func (c *SeaterController) endTransaction() (err error) { if c.model == nil { return } rollback := false if len(c.errs) > 0 { rollback = true } if rollback { err = c.model.Rollback() if err != nil { panic(fmt.Sprintf("failed to rollback transaction: %v", err)) } } else { err = c.model.Commit() if err != nil { panic(fmt.Sprintf("failed to commit transaction: %v", err)) } } return } func (c *SeaterController) parseJSONBody(keys ...string) (v *simplejson.Json) { v, err := simplejson.NewJson(c.Ctx.Input.RequestBody) if err != nil { c.BadRequestf("invalid json format") } if len(keys) > 0 { for _, k := range keys { _, ok := v.CheckGet(k) if !ok { c.BadRequestf("Bad Request") } else { v = v.Get(k) } } } return } // UnmarshalJSONBody unmarshal request json body func (c *SeaterController) UnmarshalJSONBody(v interface{}, keys ...string) { var bytes []byte var err error if len(keys) > 0 { j := c.parseJSONBody(keys...) bytes, err = j.MarshalJSON() if err != nil { err = errors.Annotate(err, "failed to unmarshal json") c.TraceServerError(err) } } else { bytes = c.Ctx.Input.RequestBody } err = json.Unmarshal(bytes, v) if err != nil { c.BadRequestf("invalid request body") } } // UserInfo defines session value type UserInfo struct { UserID int64 `json:"user_id"` Openid string `json:"openid"` SessionKey string `json:"session_key"` } func (c *SeaterController) getURLParam(key string) string { return c.Ctx.Input.Param(key) } func (c *SeaterController) getURLID(name string) int64 { id, err := strconv.ParseInt(c.getURLParam(name), 10, 64) if err != nil { c.BadRequestf("invalid id") } return id } // CreateTask create task func (c *SeaterController) CreateTask(t string, data *simplejson.Json) (task *models.Task, err error) { if task, err = c.model.NewTask(t, data); err != nil { err = errors.Annotatef(err, "failed to create task %s", t) return } return }
TraceServerError
identifier_name
base.go
package controllers import ( "encoding/json" "fmt" "strconv" "time" "github.com/astaxie/beego" "github.com/astaxie/beego/orm" "github.com/bitly/go-simplejson" "github.com/juju/errors" "seater/models" "seater/schema" ) // Define controller constants const ( TimestampLayout = time.RFC3339 PeriodDay = "day" PeriodHour = "hour" PeriodWeek = "week" ) type msgBody struct { Msg string `json:"msg"` } // SeaterController defines the base controller type SeaterController struct { beego.Controller model *models.SeaterModel orm orm.Ormer deferrers []deferrer errs []error pagingResult models.QueryParams } // Prepare prepares controller context func (c *SeaterController) Prepare() { model, err := models.NewModel() if err != nil { c.TraceServerError(errors.Annotatef(err, "failed to init model")) } if err = model.Begin(); err != nil { c.TraceServerError(errors.Annotatef(err, "failed to begin database transaction")) } c.model = model c.orm = model.Orm() c.pagingResult = models.NewQueryParams() } // Finish ends transaction func (c *SeaterController) Finish() { defer c.execDeferrers() err := c.endTransaction() if err != nil { c.TraceServerError(errors.Annotatef(err, "failed to end transaction")) } } // M returns the model object func (c *SeaterController) M() *models.SeaterModel { return c.model } type deferrer func() error func (c *SeaterController) deferExec(f deferrer) { c.deferrers = append(c.deferrers, f) } // Code sets the response status func (c *SeaterController) Code(code int) { c.Ctx.Output.SetStatus(code) } func (c *SeaterController) execDeferrers() { var err error for i := len(c.deferrers) - 1; i >= 0; i-- { err = c.deferrers[i]() if err != nil { c.errs = append(c.errs, err) } } } func (c *SeaterController) traceJSONAbort(err error, code int, args ...string) { c.jsonAbort(code, args...) } // jsonAbort trace and abort error func (c *SeaterController) jsonAbort(code int, args ...string) { defer c.execDeferrers() c.Header("Content-Type", "application/json; charset=utf-8") var msg string if len(args) == 0 || args[0] == "" { switch code { case 400: msg = "Bad Request" case 401: msg = "Unauthorized" case 404: msg = "Resource Not Found" case 409: msg = "Conflict" case 500: msg = "Server Error" default: msg = "" } } else { msg = args[0] } c.addError(fmt.Errorf(msg)) err := c.endTransaction() if err != nil { code = 500 msg = "Server Error" } body, err := json.Marshal(msgBody{Msg: msg}) if err != nil { c.CustomAbort(500, `{"msg": "Unknown Error"}`) } c.CustomAbort(code, string(body)) } // BadRequestf returns bad request response with formatted message func (c *SeaterController) BadRequestf(format string, args ...interface{}) { c.TraceBadRequestf(nil, format, args...) } // TraceBadRequestf traces error and returns bad request response with formatted message func (c *SeaterController) TraceBadRequestf(err error, format string, args ...interface{}) { msg := fmt.Sprintf(format, args...) c.traceJSONAbort(nil, 400, msg) } // TraceServerError traces error and returns server error func (c *SeaterController) TraceServerError(err error) { c.traceJSONAbort(err, 500) } // Forbiddenf returns forbidden response with formatted message func (c *SeaterController) Forbiddenf(format string, args ...interface{}) { c.TraceForbiddenf(nil, format, args...) } // TraceForbiddenf traces error and returns forbidden response with formatted message func (c *SeaterController) TraceForbiddenf(err error, format string, args ...interface{}) { msg := fmt.Sprintf(format, args...) c.traceJSONAbort(err, 403, msg) } // NotFoundf returns not found response with formatted message func (c *SeaterController) NotFoundf(format string, args ...interface{}) { c.TraceNotFoundf(nil, format, args...) } // TraceNotFoundf traces error and returns not found response with formatted message func (c *SeaterController) TraceNotFoundf(err error, format string, args ...interface{}) { msg := fmt.Sprintf(format, args...) c.traceJSONAbort(err, 404, msg) } // Conflictf returns conflict response with formatted message func (c *SeaterController) Conflictf(format string, args ...interface{}) { c.TraceConflictf(nil, format, args...) } // TraceConflictf traces error and returns conflict response with formatted message func (c *SeaterController) TraceConflictf(err error, format string, args ...interface{}) { msg := fmt.Sprintf(format, args...) c.traceJSONAbort(err, 409, msg) } // Unauthorizedf returns authorized response with formatted message func (c *SeaterController) Unauthorizedf(format string, args ...interface{}) { c.TraceUnauthorizedf(nil, format, args...) } // TraceUnauthorizedf traces error and returns authorized reponse with formatted message func (c *SeaterController) TraceUnauthorizedf(err error, format string, args ...interface{}) { msg := fmt.Sprintf(format, args...) c.traceJSONAbort(err, 401, msg) } func (c *SeaterController) addError(err error) { c.errs = append(c.errs, err) } // jsonResp serves json response func (c *SeaterController) jsonResp(data interface{}) { if obj, ok := data.(*simplejson.Json); ok { data = obj.Interface() } paging := c.getPagingResult() if paging != nil { bytes, err := json.Marshal(data) if err != nil { err = errors.Annotatef(err, "failed to marshal resp interface") c.TraceServerError(err) } j, err := simplejson.NewJson(bytes) if err != nil { err = errors.Annotatef(err, "failed to unmarshal resp bytes") c.TraceServerError(err) } j.Set("paging", paging) data = j.Interface() } c.Data["json"] = data c.ServeJSON() } // OK response 200 OK with json data func (c *SeaterController) OK(data interface{})
// Accepted response an asynchronous resource func (c *SeaterController) Accepted(data interface{}) { c.Code(202) c.jsonResp(data) } // Created response an asynchronous resource func (c *SeaterController) Created(data interface{}) { c.Code(201) c.jsonResp(data) } // NoContent responses with code 204 func (c *SeaterController) NoContent(code ...int) { if len(code) > 0 { c.Code(code[0]) } else { c.Code(204) } c.Ctx.Output.Body([]byte("")) } // Validate validates with json schema func (c *SeaterController) Validate(sche string, document ...string) { var doc string if len(document) > 0 { doc = document[0] } else { doc = string(c.Ctx.Input.RequestBody) if len(doc) == 0 { c.BadRequestf("request body is empty") } } _, err := simplejson.NewJson([]byte(doc)) if err != nil { c.BadRequestf("invalid json format") } result, err := schema.Validate(sche, doc) if err != nil { c.TraceServerError(errors.Annotatef(err, "invalid schema")) } if !result.Valid() { s := "invalid parameters:\n" var e interface{} for _, err := range result.Errors() { s += fmt.Sprintf("%s\n", err) e = err } c.BadRequestf("%s", e) } } func (c *SeaterController) getInt64(key string, defs ...int64) (v int64, ok bool) { if strv := c.Ctx.Input.Query(key); strv != "" { val, err := strconv.ParseInt(strv, 10, 64) if err != nil { c.BadRequestf("invalid int64 argument %s: %s", key, strv) } return val, true } return } func (c *SeaterController) getString(key string, defs ...string) (v string, ok bool) { if v = c.Ctx.Input.Query(key); v != "" { return v, true } if len(defs) > 0 { return defs[0], false } return "", false } // getTime return input as an time and the existence of the input func (c *SeaterController) getTime(key string, defs ...time.Time) (v time.Time, ok bool) { if strv := c.Ctx.Input.Query(key); strv != "" { val, err := time.Parse(TimestampLayout, strv) if err != nil { c.BadRequestf("invalid time argument %s: %s", key, strv) } return val, true } else if len(defs) > 0 { v = defs[0] return } return } // Header get or set a header if value is provided func (c *SeaterController) Header(key string, value ...interface{}) string { if len(value) == 0 { return c.Ctx.Input.Header(key) } retval := fmt.Sprintf("%v", value[0]) c.Ctx.Output.Header(key, retval) return retval } func (c *SeaterController) endTransaction() (err error) { if c.model == nil { return } rollback := false if len(c.errs) > 0 { rollback = true } if rollback { err = c.model.Rollback() if err != nil { panic(fmt.Sprintf("failed to rollback transaction: %v", err)) } } else { err = c.model.Commit() if err != nil { panic(fmt.Sprintf("failed to commit transaction: %v", err)) } } return } func (c *SeaterController) parseJSONBody(keys ...string) (v *simplejson.Json) { v, err := simplejson.NewJson(c.Ctx.Input.RequestBody) if err != nil { c.BadRequestf("invalid json format") } if len(keys) > 0 { for _, k := range keys { _, ok := v.CheckGet(k) if !ok { c.BadRequestf("Bad Request") } else { v = v.Get(k) } } } return } // UnmarshalJSONBody unmarshal request json body func (c *SeaterController) UnmarshalJSONBody(v interface{}, keys ...string) { var bytes []byte var err error if len(keys) > 0 { j := c.parseJSONBody(keys...) bytes, err = j.MarshalJSON() if err != nil { err = errors.Annotate(err, "failed to unmarshal json") c.TraceServerError(err) } } else { bytes = c.Ctx.Input.RequestBody } err = json.Unmarshal(bytes, v) if err != nil { c.BadRequestf("invalid request body") } } // UserInfo defines session value type UserInfo struct { UserID int64 `json:"user_id"` Openid string `json:"openid"` SessionKey string `json:"session_key"` } func (c *SeaterController) getURLParam(key string) string { return c.Ctx.Input.Param(key) } func (c *SeaterController) getURLID(name string) int64 { id, err := strconv.ParseInt(c.getURLParam(name), 10, 64) if err != nil { c.BadRequestf("invalid id") } return id } // CreateTask create task func (c *SeaterController) CreateTask(t string, data *simplejson.Json) (task *models.Task, err error) { if task, err = c.model.NewTask(t, data); err != nil { err = errors.Annotatef(err, "failed to create task %s", t) return } return }
{ c.Code(200) c.jsonResp(data) }
identifier_body
base.go
package controllers import ( "encoding/json" "fmt" "strconv" "time" "github.com/astaxie/beego" "github.com/astaxie/beego/orm" "github.com/bitly/go-simplejson" "github.com/juju/errors" "seater/models" "seater/schema" ) // Define controller constants const ( TimestampLayout = time.RFC3339 PeriodDay = "day" PeriodHour = "hour" PeriodWeek = "week" ) type msgBody struct { Msg string `json:"msg"` } // SeaterController defines the base controller type SeaterController struct { beego.Controller model *models.SeaterModel orm orm.Ormer deferrers []deferrer errs []error pagingResult models.QueryParams } // Prepare prepares controller context func (c *SeaterController) Prepare() { model, err := models.NewModel() if err != nil { c.TraceServerError(errors.Annotatef(err, "failed to init model")) } if err = model.Begin(); err != nil { c.TraceServerError(errors.Annotatef(err, "failed to begin database transaction")) } c.model = model c.orm = model.Orm() c.pagingResult = models.NewQueryParams() } // Finish ends transaction func (c *SeaterController) Finish() { defer c.execDeferrers() err := c.endTransaction() if err != nil { c.TraceServerError(errors.Annotatef(err, "failed to end transaction")) } } // M returns the model object func (c *SeaterController) M() *models.SeaterModel { return c.model } type deferrer func() error func (c *SeaterController) deferExec(f deferrer) { c.deferrers = append(c.deferrers, f) } // Code sets the response status func (c *SeaterController) Code(code int) { c.Ctx.Output.SetStatus(code) } func (c *SeaterController) execDeferrers() { var err error for i := len(c.deferrers) - 1; i >= 0; i-- { err = c.deferrers[i]() if err != nil { c.errs = append(c.errs, err) } } } func (c *SeaterController) traceJSONAbort(err error, code int, args ...string) { c.jsonAbort(code, args...) } // jsonAbort trace and abort error func (c *SeaterController) jsonAbort(code int, args ...string) { defer c.execDeferrers() c.Header("Content-Type", "application/json; charset=utf-8") var msg string if len(args) == 0 || args[0] == "" { switch code { case 400: msg = "Bad Request" case 401: msg = "Unauthorized" case 404: msg = "Resource Not Found" case 409: msg = "Conflict" case 500: msg = "Server Error" default: msg = "" } } else { msg = args[0] } c.addError(fmt.Errorf(msg)) err := c.endTransaction() if err != nil { code = 500 msg = "Server Error" } body, err := json.Marshal(msgBody{Msg: msg}) if err != nil { c.CustomAbort(500, `{"msg": "Unknown Error"}`) } c.CustomAbort(code, string(body)) } // BadRequestf returns bad request response with formatted message func (c *SeaterController) BadRequestf(format string, args ...interface{}) { c.TraceBadRequestf(nil, format, args...) } // TraceBadRequestf traces error and returns bad request response with formatted message func (c *SeaterController) TraceBadRequestf(err error, format string, args ...interface{}) { msg := fmt.Sprintf(format, args...) c.traceJSONAbort(nil, 400, msg) } // TraceServerError traces error and returns server error func (c *SeaterController) TraceServerError(err error) { c.traceJSONAbort(err, 500) } // Forbiddenf returns forbidden response with formatted message func (c *SeaterController) Forbiddenf(format string, args ...interface{}) { c.TraceForbiddenf(nil, format, args...) } // TraceForbiddenf traces error and returns forbidden response with formatted message func (c *SeaterController) TraceForbiddenf(err error, format string, args ...interface{}) { msg := fmt.Sprintf(format, args...) c.traceJSONAbort(err, 403, msg) } // NotFoundf returns not found response with formatted message func (c *SeaterController) NotFoundf(format string, args ...interface{}) { c.TraceNotFoundf(nil, format, args...) } // TraceNotFoundf traces error and returns not found response with formatted message func (c *SeaterController) TraceNotFoundf(err error, format string, args ...interface{}) { msg := fmt.Sprintf(format, args...) c.traceJSONAbort(err, 404, msg) } // Conflictf returns conflict response with formatted message func (c *SeaterController) Conflictf(format string, args ...interface{}) { c.TraceConflictf(nil, format, args...) } // TraceConflictf traces error and returns conflict response with formatted message func (c *SeaterController) TraceConflictf(err error, format string, args ...interface{}) { msg := fmt.Sprintf(format, args...) c.traceJSONAbort(err, 409, msg) } // Unauthorizedf returns authorized response with formatted message func (c *SeaterController) Unauthorizedf(format string, args ...interface{}) { c.TraceUnauthorizedf(nil, format, args...) } // TraceUnauthorizedf traces error and returns authorized reponse with formatted message func (c *SeaterController) TraceUnauthorizedf(err error, format string, args ...interface{}) { msg := fmt.Sprintf(format, args...) c.traceJSONAbort(err, 401, msg) } func (c *SeaterController) addError(err error) { c.errs = append(c.errs, err) } // jsonResp serves json response func (c *SeaterController) jsonResp(data interface{}) { if obj, ok := data.(*simplejson.Json); ok { data = obj.Interface() } paging := c.getPagingResult() if paging != nil { bytes, err := json.Marshal(data) if err != nil { err = errors.Annotatef(err, "failed to marshal resp interface") c.TraceServerError(err) } j, err := simplejson.NewJson(bytes) if err != nil { err = errors.Annotatef(err, "failed to unmarshal resp bytes") c.TraceServerError(err) } j.Set("paging", paging) data = j.Interface() } c.Data["json"] = data c.ServeJSON() } // OK response 200 OK with json data func (c *SeaterController) OK(data interface{}) { c.Code(200) c.jsonResp(data) } // Accepted response an asynchronous resource func (c *SeaterController) Accepted(data interface{}) { c.Code(202) c.jsonResp(data) } // Created response an asynchronous resource func (c *SeaterController) Created(data interface{}) { c.Code(201) c.jsonResp(data) } // NoContent responses with code 204 func (c *SeaterController) NoContent(code ...int) { if len(code) > 0 { c.Code(code[0]) } else { c.Code(204) } c.Ctx.Output.Body([]byte("")) } // Validate validates with json schema func (c *SeaterController) Validate(sche string, document ...string) { var doc string if len(document) > 0 { doc = document[0] } else { doc = string(c.Ctx.Input.RequestBody) if len(doc) == 0 { c.BadRequestf("request body is empty") } } _, err := simplejson.NewJson([]byte(doc)) if err != nil { c.BadRequestf("invalid json format") } result, err := schema.Validate(sche, doc) if err != nil { c.TraceServerError(errors.Annotatef(err, "invalid schema")) } if !result.Valid() {
s += fmt.Sprintf("%s\n", err) e = err } c.BadRequestf("%s", e) } } func (c *SeaterController) getInt64(key string, defs ...int64) (v int64, ok bool) { if strv := c.Ctx.Input.Query(key); strv != "" { val, err := strconv.ParseInt(strv, 10, 64) if err != nil { c.BadRequestf("invalid int64 argument %s: %s", key, strv) } return val, true } return } func (c *SeaterController) getString(key string, defs ...string) (v string, ok bool) { if v = c.Ctx.Input.Query(key); v != "" { return v, true } if len(defs) > 0 { return defs[0], false } return "", false } // getTime return input as an time and the existence of the input func (c *SeaterController) getTime(key string, defs ...time.Time) (v time.Time, ok bool) { if strv := c.Ctx.Input.Query(key); strv != "" { val, err := time.Parse(TimestampLayout, strv) if err != nil { c.BadRequestf("invalid time argument %s: %s", key, strv) } return val, true } else if len(defs) > 0 { v = defs[0] return } return } // Header get or set a header if value is provided func (c *SeaterController) Header(key string, value ...interface{}) string { if len(value) == 0 { return c.Ctx.Input.Header(key) } retval := fmt.Sprintf("%v", value[0]) c.Ctx.Output.Header(key, retval) return retval } func (c *SeaterController) endTransaction() (err error) { if c.model == nil { return } rollback := false if len(c.errs) > 0 { rollback = true } if rollback { err = c.model.Rollback() if err != nil { panic(fmt.Sprintf("failed to rollback transaction: %v", err)) } } else { err = c.model.Commit() if err != nil { panic(fmt.Sprintf("failed to commit transaction: %v", err)) } } return } func (c *SeaterController) parseJSONBody(keys ...string) (v *simplejson.Json) { v, err := simplejson.NewJson(c.Ctx.Input.RequestBody) if err != nil { c.BadRequestf("invalid json format") } if len(keys) > 0 { for _, k := range keys { _, ok := v.CheckGet(k) if !ok { c.BadRequestf("Bad Request") } else { v = v.Get(k) } } } return } // UnmarshalJSONBody unmarshal request json body func (c *SeaterController) UnmarshalJSONBody(v interface{}, keys ...string) { var bytes []byte var err error if len(keys) > 0 { j := c.parseJSONBody(keys...) bytes, err = j.MarshalJSON() if err != nil { err = errors.Annotate(err, "failed to unmarshal json") c.TraceServerError(err) } } else { bytes = c.Ctx.Input.RequestBody } err = json.Unmarshal(bytes, v) if err != nil { c.BadRequestf("invalid request body") } } // UserInfo defines session value type UserInfo struct { UserID int64 `json:"user_id"` Openid string `json:"openid"` SessionKey string `json:"session_key"` } func (c *SeaterController) getURLParam(key string) string { return c.Ctx.Input.Param(key) } func (c *SeaterController) getURLID(name string) int64 { id, err := strconv.ParseInt(c.getURLParam(name), 10, 64) if err != nil { c.BadRequestf("invalid id") } return id } // CreateTask create task func (c *SeaterController) CreateTask(t string, data *simplejson.Json) (task *models.Task, err error) { if task, err = c.model.NewTask(t, data); err != nil { err = errors.Annotatef(err, "failed to create task %s", t) return } return }
s := "invalid parameters:\n" var e interface{} for _, err := range result.Errors() {
random_line_split
base.go
package controllers import ( "encoding/json" "fmt" "strconv" "time" "github.com/astaxie/beego" "github.com/astaxie/beego/orm" "github.com/bitly/go-simplejson" "github.com/juju/errors" "seater/models" "seater/schema" ) // Define controller constants const ( TimestampLayout = time.RFC3339 PeriodDay = "day" PeriodHour = "hour" PeriodWeek = "week" ) type msgBody struct { Msg string `json:"msg"` } // SeaterController defines the base controller type SeaterController struct { beego.Controller model *models.SeaterModel orm orm.Ormer deferrers []deferrer errs []error pagingResult models.QueryParams } // Prepare prepares controller context func (c *SeaterController) Prepare() { model, err := models.NewModel() if err != nil { c.TraceServerError(errors.Annotatef(err, "failed to init model")) } if err = model.Begin(); err != nil { c.TraceServerError(errors.Annotatef(err, "failed to begin database transaction")) } c.model = model c.orm = model.Orm() c.pagingResult = models.NewQueryParams() } // Finish ends transaction func (c *SeaterController) Finish() { defer c.execDeferrers() err := c.endTransaction() if err != nil { c.TraceServerError(errors.Annotatef(err, "failed to end transaction")) } } // M returns the model object func (c *SeaterController) M() *models.SeaterModel { return c.model } type deferrer func() error func (c *SeaterController) deferExec(f deferrer) { c.deferrers = append(c.deferrers, f) } // Code sets the response status func (c *SeaterController) Code(code int) { c.Ctx.Output.SetStatus(code) } func (c *SeaterController) execDeferrers() { var err error for i := len(c.deferrers) - 1; i >= 0; i-- { err = c.deferrers[i]() if err != nil { c.errs = append(c.errs, err) } } } func (c *SeaterController) traceJSONAbort(err error, code int, args ...string) { c.jsonAbort(code, args...) } // jsonAbort trace and abort error func (c *SeaterController) jsonAbort(code int, args ...string) { defer c.execDeferrers() c.Header("Content-Type", "application/json; charset=utf-8") var msg string if len(args) == 0 || args[0] == "" { switch code { case 400: msg = "Bad Request" case 401: msg = "Unauthorized" case 404: msg = "Resource Not Found" case 409: msg = "Conflict" case 500: msg = "Server Error" default: msg = "" } } else { msg = args[0] } c.addError(fmt.Errorf(msg)) err := c.endTransaction() if err != nil { code = 500 msg = "Server Error" } body, err := json.Marshal(msgBody{Msg: msg}) if err != nil { c.CustomAbort(500, `{"msg": "Unknown Error"}`) } c.CustomAbort(code, string(body)) } // BadRequestf returns bad request response with formatted message func (c *SeaterController) BadRequestf(format string, args ...interface{}) { c.TraceBadRequestf(nil, format, args...) } // TraceBadRequestf traces error and returns bad request response with formatted message func (c *SeaterController) TraceBadRequestf(err error, format string, args ...interface{}) { msg := fmt.Sprintf(format, args...) c.traceJSONAbort(nil, 400, msg) } // TraceServerError traces error and returns server error func (c *SeaterController) TraceServerError(err error) { c.traceJSONAbort(err, 500) } // Forbiddenf returns forbidden response with formatted message func (c *SeaterController) Forbiddenf(format string, args ...interface{}) { c.TraceForbiddenf(nil, format, args...) } // TraceForbiddenf traces error and returns forbidden response with formatted message func (c *SeaterController) TraceForbiddenf(err error, format string, args ...interface{}) { msg := fmt.Sprintf(format, args...) c.traceJSONAbort(err, 403, msg) } // NotFoundf returns not found response with formatted message func (c *SeaterController) NotFoundf(format string, args ...interface{}) { c.TraceNotFoundf(nil, format, args...) } // TraceNotFoundf traces error and returns not found response with formatted message func (c *SeaterController) TraceNotFoundf(err error, format string, args ...interface{}) { msg := fmt.Sprintf(format, args...) c.traceJSONAbort(err, 404, msg) } // Conflictf returns conflict response with formatted message func (c *SeaterController) Conflictf(format string, args ...interface{}) { c.TraceConflictf(nil, format, args...) } // TraceConflictf traces error and returns conflict response with formatted message func (c *SeaterController) TraceConflictf(err error, format string, args ...interface{}) { msg := fmt.Sprintf(format, args...) c.traceJSONAbort(err, 409, msg) } // Unauthorizedf returns authorized response with formatted message func (c *SeaterController) Unauthorizedf(format string, args ...interface{}) { c.TraceUnauthorizedf(nil, format, args...) } // TraceUnauthorizedf traces error and returns authorized reponse with formatted message func (c *SeaterController) TraceUnauthorizedf(err error, format string, args ...interface{}) { msg := fmt.Sprintf(format, args...) c.traceJSONAbort(err, 401, msg) } func (c *SeaterController) addError(err error) { c.errs = append(c.errs, err) } // jsonResp serves json response func (c *SeaterController) jsonResp(data interface{}) { if obj, ok := data.(*simplejson.Json); ok { data = obj.Interface() } paging := c.getPagingResult() if paging != nil { bytes, err := json.Marshal(data) if err != nil { err = errors.Annotatef(err, "failed to marshal resp interface") c.TraceServerError(err) } j, err := simplejson.NewJson(bytes) if err != nil { err = errors.Annotatef(err, "failed to unmarshal resp bytes") c.TraceServerError(err) } j.Set("paging", paging) data = j.Interface() } c.Data["json"] = data c.ServeJSON() } // OK response 200 OK with json data func (c *SeaterController) OK(data interface{}) { c.Code(200) c.jsonResp(data) } // Accepted response an asynchronous resource func (c *SeaterController) Accepted(data interface{}) { c.Code(202) c.jsonResp(data) } // Created response an asynchronous resource func (c *SeaterController) Created(data interface{}) { c.Code(201) c.jsonResp(data) } // NoContent responses with code 204 func (c *SeaterController) NoContent(code ...int) { if len(code) > 0 { c.Code(code[0]) } else { c.Code(204) } c.Ctx.Output.Body([]byte("")) } // Validate validates with json schema func (c *SeaterController) Validate(sche string, document ...string) { var doc string if len(document) > 0 { doc = document[0] } else { doc = string(c.Ctx.Input.RequestBody) if len(doc) == 0 { c.BadRequestf("request body is empty") } } _, err := simplejson.NewJson([]byte(doc)) if err != nil { c.BadRequestf("invalid json format") } result, err := schema.Validate(sche, doc) if err != nil { c.TraceServerError(errors.Annotatef(err, "invalid schema")) } if !result.Valid() { s := "invalid parameters:\n" var e interface{} for _, err := range result.Errors() { s += fmt.Sprintf("%s\n", err) e = err } c.BadRequestf("%s", e) } } func (c *SeaterController) getInt64(key string, defs ...int64) (v int64, ok bool) { if strv := c.Ctx.Input.Query(key); strv != "" { val, err := strconv.ParseInt(strv, 10, 64) if err != nil { c.BadRequestf("invalid int64 argument %s: %s", key, strv) } return val, true } return } func (c *SeaterController) getString(key string, defs ...string) (v string, ok bool) { if v = c.Ctx.Input.Query(key); v != "" { return v, true } if len(defs) > 0 { return defs[0], false } return "", false } // getTime return input as an time and the existence of the input func (c *SeaterController) getTime(key string, defs ...time.Time) (v time.Time, ok bool) { if strv := c.Ctx.Input.Query(key); strv != "" { val, err := time.Parse(TimestampLayout, strv) if err != nil { c.BadRequestf("invalid time argument %s: %s", key, strv) } return val, true } else if len(defs) > 0 { v = defs[0] return } return } // Header get or set a header if value is provided func (c *SeaterController) Header(key string, value ...interface{}) string { if len(value) == 0 { return c.Ctx.Input.Header(key) } retval := fmt.Sprintf("%v", value[0]) c.Ctx.Output.Header(key, retval) return retval } func (c *SeaterController) endTransaction() (err error) { if c.model == nil { return } rollback := false if len(c.errs) > 0 { rollback = true } if rollback { err = c.model.Rollback() if err != nil { panic(fmt.Sprintf("failed to rollback transaction: %v", err)) } } else { err = c.model.Commit() if err != nil { panic(fmt.Sprintf("failed to commit transaction: %v", err)) } } return } func (c *SeaterController) parseJSONBody(keys ...string) (v *simplejson.Json) { v, err := simplejson.NewJson(c.Ctx.Input.RequestBody) if err != nil { c.BadRequestf("invalid json format") } if len(keys) > 0 { for _, k := range keys { _, ok := v.CheckGet(k) if !ok { c.BadRequestf("Bad Request") } else { v = v.Get(k) } } } return } // UnmarshalJSONBody unmarshal request json body func (c *SeaterController) UnmarshalJSONBody(v interface{}, keys ...string) { var bytes []byte var err error if len(keys) > 0 { j := c.parseJSONBody(keys...) bytes, err = j.MarshalJSON() if err != nil { err = errors.Annotate(err, "failed to unmarshal json") c.TraceServerError(err) } } else { bytes = c.Ctx.Input.RequestBody } err = json.Unmarshal(bytes, v) if err != nil { c.BadRequestf("invalid request body") } } // UserInfo defines session value type UserInfo struct { UserID int64 `json:"user_id"` Openid string `json:"openid"` SessionKey string `json:"session_key"` } func (c *SeaterController) getURLParam(key string) string { return c.Ctx.Input.Param(key) } func (c *SeaterController) getURLID(name string) int64 { id, err := strconv.ParseInt(c.getURLParam(name), 10, 64) if err != nil
return id } // CreateTask create task func (c *SeaterController) CreateTask(t string, data *simplejson.Json) (task *models.Task, err error) { if task, err = c.model.NewTask(t, data); err != nil { err = errors.Annotatef(err, "failed to create task %s", t) return } return }
{ c.BadRequestf("invalid id") }
conditional_block
app.js
const Influx = require('influx') const express = require('express') const http = require('http') const os = require('os') const path = require('path'); const bodyParser = require('body-parser'); const request = require('request'); const app = express(); // view engine setup app.set('views', path.join(__dirname, 'views')); app.set('view engine', 'ejs'); app.use(bodyParser.urlencoded({ extended: false })); const influx = new Influx.InfluxDB({ database: 'flux_yum', protocol: 'https', host: 'awsflux01.base.li', port: 18086, username: 'impact', password: 'BNMghj!' }); //console.log('Create influx:', influx); // setup mysql const mysql = require('mysql'); const pool = mysql.createPool({ connectionLimit : 10, host : 'localhost', user : 'fire_alarm', password : 'fire_alarm', database : 'fire_alarm' }); http.createServer(app).listen(3100, function () { console.log('Listening on port 3100') }) Date.prototype.addHours = function(h) { this.setTime(this.getTime() + (h*60*60*1000)); return this; } // 对Date的扩展,将 Date 转化为指定格式的String // 月(M)、日(d)、小时(h)、分(m)、秒(s)、季度(q) 可以用 1-2 个占位符, // 年(y)可以用 1-4 个占位符,毫秒(S)只能用 1 个占位符(是 1-3 位的数字) // 例子: // (new Date()).formatTime("yyyy-MM-dd hh:mm:ss.S") ==> 2006-07-02 08:09:04.423 // (new Date()).formatTime("yyyy-M-d h:m:s.S") ==> 2006-7-2 8:9:4.18 Date.prototype.formatTime = function (fmt) { //author: meizz var o = { "M+": this.getMonth() + 1, //月份 "d+": this.getDate(), //日 "h+": this.getHours(), //小时 "m+": this.getMinutes(), //分 "s+": this.getSeconds(), //秒 "q+": Math.floor((this.getMonth() + 3) / 3), //季度 "S": this.getMilliseconds() //毫秒 }; if (/(y+)/.test(fmt)) fmt = fmt.replace(RegExp.$1, (this.getFullYear() + "").substr(4 - RegExp.$1.length)); for (var k in o) if (new RegExp("(" + k + ")").test(fmt)) fmt = fmt.replace(RegExp.$1, (RegExp.$1.length == 1) ? (o[k]) : (("00" + o[k]).substr(("" + o[k]).length))); return fmt; } function groupBy(xs, key) { return xs.reduce(function(rv, x) { (rv[x[key]] = rv[x[key]] || []).push(x); return rv; }, {}); }; function minOffsetNow(date) { date = date || new Date(); return parseInt((new Date().getTime() - date.getTime())/(60*1000)); } /** * 全局数据定义 */ var kpis = require('./kpis'); var sensors = require('./sensors'); var users = require('./users'); var timer = 0; var out_value = 0; var out_date = 0; var tm_off_count = {}; const INTV_MIN = 5; // 查询间隔 min const MIN_SECS = 2; // 测试时可调到 2 默认 60 const ALM_AFTER = 5; // 累积多久时长后报警 min const ALM_BETWEEN = 30; // 多次报警时长间隔 min const ALARM_LEVEL = 1; // 什么等级开始报警, 默认1 const BLOCK_FILE = './blocks.json'; // const WX_MSG_URL = 'http://localhost:3100/blockme'; const WX_MSG_URL = 'http://2whzur.natappfree.cc/blockme'; const KPI_SERVICE = 'http://localhost:3119/kpi/alarm'; /** * 获取库名 */ function groupName(group) { switch(group) { case 'sydc1': return '一号库'; case 'sydc2': return '二号库'; default: return group; } } /** * 解析 tag_mesa 为查询参数 */ function parseTagMesa (tag_mesa) { let measurement = ''; let mesa_where = ''; let mesa_vals = ''; let m = {}, m2 = {}; let qarray = []; let chunks = tag_mesa.split(','); chunks.forEach((val, key, arr) => { let c = val.split('='); m[ c[0] ] = c[1]; }); m2 = Object.assign({}, m); if( 'measurement' in m) { measurement = `"${m['measurement']}"`; delete m.measurement; } for( let k in m) { qarray.push( `"${k}"='${m[k]}'`); } mesa_where = qarray.join(' AND '); return { measurement, mesa_where, mesa_vals: m2 }; } /** * 微信 API 初始化 */ let appId = "wx32e64b2b2f8f20df"; // bstwo let appSecret = "905848f29979a4859d2468f76626aa88"; var WechatAPI = require('wechat-api') var fs = require('fs') var wechatApi = new WechatAPI(appId, appSecret, function (callback) { fs.readFile('access_token.txt', 'utf8', function (err, txt) { if (err) {return callback(null, null)} callback(null, JSON.parse(txt)) }) }, function (token, callback) { fs.writeFile('access_token.txt', JSON.stringify(token), callback) }) /** * 开启定时器 */ function startTimer(res) { let pp = ''; if( timer) { pp = 'Error: please stop previous timer.'; res? res.send(pp): console.err(pp); } else { pp = 'Timer start @'+ INTV_MIN + ' min'; timer = setInterval(checkSensors, INTV_MIN* 1000* MIN_SECS /*60*/); console.log(pp); res? res.send(pp): null; // At first checkSensors(); } } /** * 结束定时器 */ function stopTimer(res) { if( timer) { clearInterval(timer); timer = 0; res? res.send('Timer stop.'): null; } else { res? res.send('No timer.'): null; } } /** * 自动启动首次(分钟需5的倍数) */ function autoStart(enable) { if(enable) { let m1 = new Date().getMinutes(); let m2 = Math.ceil(m1/5)*5; let df = (m2-m1)*1000* MIN_SECS; console.log('AutoStart waits:', m2-m1, 'min to start.'); if(!timer) { setTimeout(startTimer, df); } else { console.log('Error: please stop previous timer.'); } } } autoStart(MIN_SECS == 60); /** * 传感器检查流程 */ function checkSensors() { console.log('------ CheckSensors start '+ new Date().toLocaleString()+ '-------'); // 找出在线传感器 let onlines = sensors.filter(function(s) { return !s.offline; }); // 批量查询传感器 sensorBatchValues(onlines, function(err, sensors) { if(err) { return; } let blocks = JSON.parse(fs.readFileSync(BLOCK_FILE)); sensors.forEach( function(sensor) { let pt = sensor.point, kpi = kpis[sensor.kpi]; let ck = checkKpi(pt, kpi); let ex = exceedCount(sensor, ck); if( ex) { sendAlarm(sensor, ck, users, blocks); } console.log(sensor.name+':', ck.value, 'min-off:', sensor.point.min_off, ck.exceed? 'exceed:'+ck.exceed+ ' count: ' +sensor.exc_count[ck.level] : ''); }); alarmTmOfflineSensors(sensors, users, blocks); }); } /** * 读取传感器值(批量) */ function sensorBatchValues(sensors, callback) { let qs = []; sensors.forEach( function(sensor) { let m = parseTagMesa(sensor.tag_mesa); qs.push(`SELECT last(value) FROM ${m.measurement} WHERE ${m.mesa_where}`) }); let q = qs.join('; '); //console.log('batch q:', q); influx.query(q).then(result => { // 注: 结果实际不符合 json 格式, 可用 stringify 转 //console.log('result', JSON.stringify(result)); if( sensors.length == 1) { result = [result]; // 一个传感器时,必须包装成二维 } sensors.forEach( function(sensor, idx) { if(sensor.test) { sensor.point = { time: out_date==0? new Date(): new Date(out_date), last: out_value, } } else { sensor.point = (result&&result.length>idx&&result[idx].length>0) ? result[idx][0]: {}; } }); callback(null, sensors); }).catch(err => { console.error('sensorBatchValues err:', err); callback(err); }); } /** * 读取传感器值(单次) */ function sensorValue(sensor, callback) { let m = parseTagMesa(sensor.tag_mesa); let q = `SELECT last(value) FROM ${m.measurement} WHERE ${m.mesa_where}`; //console.log('q:', q); influx.query(q).then(result => { callback(null, (result&&result.length>0)? result[0]: {}); }).catch(err => { console.error('SensorValue err:', err); callback(err); }); } /** * 计算 KPI */ function checkKpi(point, kpi) { let ck = { src: kpi.src || 'calc', measure: kpi.measure || 'temp', reset_alarm: kpi.reset_alarm, exceed: 0, level: 0, standard: '', is_reset: false, }; //TODO: 来自 point 的其他值 let value = point.last; let time = new Date(point.time.getTime()); //console.log('time', time.toLocaleString()); point.min_off = minOffsetNow(time); ck.tm_offline = point.min_off > 12; if( !kpi.src && !ck.tm_offline) { if( kpi.ra_above && value > kpi.ra_above) { ck.exceed = 2; ck.standard = kpi.ar_below+ '~'+ kpi.ra_above; } else if( kpi.ag_above && value > kpi.ag_above) { ck.exceed = 1; ck.standard = kpi.ga_below+ '~'+ kpi.ag_above; } else if( kpi.ar_below && value < kpi.ar_below) { ck.exceed = -2; ck.standard = kpi.ar_below+ '~'+ kpi.ra_above; } else if( kpi.ga_below && value < kpi.ga_below) { ck.exceed = -1; ck.standard = kpi.ga_below+ '~'+ kpi.ag_above; } } else if( kpi.src == 'read' && !ck.tm_offline) { ck.exceed = value>0? 2: 0; ck.standard = kpi.standard; } ck.level = Math.abs(ck.exceed); ck.value = value; ck.time = time; return ck; } /** * 统计超限次数 * * @return true/false 是否累积到报警程度 */ function exceedCount(sensor, check) { let lvl = check.level; sensor.exc_count = sensor.exc_count || {}; sensor.exc_count[lvl] = sensor.exc_count[lvl] || 0; sensor.tm_offline = check.tm_offline; if(sensor.tm_offline) { return false; // tm_offline 数量太多, 1.不单独报警; 2.不累积计数 } if( sensor.exc_count[lvl] == 0 && check.exceed == 0) { return false; } // 0 0 // 1 1 // 累加计数 if(check.exceed == 0 || check.level >= ALARM_LEVEL) { for(let lo=1; lo<=lvl; lo++) sensor.exc_count[lo]++; setDuration(check, sensor.exc_count[lvl]); } else { return false; //不计数 } if( check.exceed == 0) { // 复位情况:也允许发送报警 check.is_reset = (sensor.exc_count[lvl] > 1); for(let lo=1; lo<=lvl; lo++) sensor.exc_count[lo] = 0; return ( check.is_reset && check.reset_alarm)? true: false; } else { // 超限情况:从计数判断 let tms_a = (check.src=='read')? 0: parseInt(ALM_AFTER / INTV_MIN); // ALM_AFTER 转为次数, read 的不用等待 let tms_b = parseInt(ALM_BETWEEN / INTV_MIN) || 6; let real_tms = sensor.exc_count[lvl]-1; // 因为上面 ALM_AFTER 是累积, 所以 exc_count-1 再判断 if(real_tms < tms_a) { return false; } else { real_tms -= tms_a; return (real_tms%tms_b == 0)? true: false; } } /* if( check.exceed === 0 || check.level < ALARM_LEVEL) { check.is_reset = (sensor.exc_count > 0); setDuration(check, sensor.exc_count); sensor.exc_count = 0; // 复位也允许发送报警 return ( check.is_reset && check.reset_alarm)? true: false; } else { // 累加计数 if(!sensor.exc_count) { sensor.exc_count = 1; } else { sensor.exc_count++; } setDuration(check, sensor.exc_count); // 计数判断 let tms_a = parseInt(ALM_AFTER / INTV_MIN); let tms_b = parseInt(ALM_BETWEEN / INTV_MIN) || 2; let real_tms = sensor.exc_count-1; // 因为上面 ALM_AFTER 是累积, 所以 exc_count-1 再判断 if(real_tms < tms_a) { return false; } else { real_tms -= tms_a; return (real_tms%tms_b == 0)? true: false; } } */ } /** * 计算 或 设置时长 */ function setDuration(check, exc_count) { exc_count = exc_count || 0; if(check.src == 'read') { check.duration = (exc_count<1)? 0: (exc_count-1)*INTV_MIN; } else { check.duration = (exc_count<2)? 0: (exc_count-2)*INTV_MIN; // -2:隐含减去用于确认的第一个 ALM_AFTER 分钟 } //TODO: read 时,来自 point } /** * 格式化下时长 */ function formatDuration(minutes) { minutes = minutes || 0; if(minutes < 120 ) { return minutes+ '分钟'; } else { let hr = parseInt(minutes/60); let min = minutes%60; let day = 0; if( hr >= 24) { day = parseInt(hr/24); hr = hr%24; } return (day? day+ '天':'')+ (hr+ '小时')+ (min? min+ '分钟': ''); } } /** * 发送报警 */ function sendAlarm(sensor, check, users, blocks) { let firstline = makeFirstline(sensor, check); let curtime = new Date().formatTime('yyyy-MM-dd hh:mm'); let level = check.level; let level_name = level==0? '通知': (level==1? '预警': '报警'); let level_color = level==0? '#16A765': (level==1? '#FFAD46': '#F83A22'); let durat_str = formatDuration(check.duration); let lastline = (check.duration? '已持续约 '+ durat_str: '')+ (check.duration&&level? ', ': '') +(level? '请及时处理!':''); console.error('SendAlarm', curtime, sensor.name, check); let to_mobiles = []; users.forEach( function(user) { if(!user.openid) return; // 传感器是否在用户的组? if( user.groups.indexOf(sensor.group) == -1) { return; } // 用户是否屏蔽该报警? let sid = sensor.id, uid = user.id; let until = blocks&&blocks[sid]&&blocks[sid][uid]? new Date(blocks[sid][uid]): null; if(until && until> new Date() && level> 0) { //复位通知不能屏蔽 console.log('user <'+ uid+ '> blocks <'+ sid+ '> until', until.toLocaleString()); return; } to_mobiles.push(user.mobile); // 发送微信消息(本地) /* var templateId = 'zOVAEaSZVEHPdRE1KM2uQJy5wPfuWibHSU6NmXpIqF8'; var url = WX_MSG_URL+ `?sid=${sid}&uid=${uid}`; var data = { "first": { "value": firstline, "color":"#173177" }, "keyword1":{ "value": level_name, "color": level_color }, "keyword2": { "value": curtime, "color":"#173177" }, "keyword3": { "value": sensor.loc, "color":"#173177" }, "keyword4": { "value": '何 138****1234', "color":"#173177" }, "keyword5": { "value": 'n/a', "color":"#173177" }, "remark":{ "value": lastline, "color":"#173177" } }; wechatApi.sendTemplate(user.openid, templateId, url, data, function(err, result) { //console.log('sendTemplate err+result:', err, result) }) */ }); let mobiles = to_mobiles.join(','); let json = { "token":"20185523", "mobile": mobiles, "firstline": firstline, "level_name": level_name, "level_color": level_color, "curtime": curtime, "location": sensor.loc, "contact": "何 138****2345", "workorder": "n/a", "lastline": lastline }; postRequest(KPI_SERVICE, json, function(err, resp, body) { if(err) console.log('Remote:', err); else console.log('Remote:', resp.statusCode, body); }); } /** * 生成报警主提示 */ function makeFirstline(sensor, check) { //eg. sensor.name+ '温度超标!数值:'+ check.value+ ' 标准:'+ check.standard, let r = sensor.name+ ' '; switch (check.measure) { case 'temp': r += '温度' + (!check.is_reset? '超标': '复位')+ '!'; r += '数值:' + check.value+ ' 标准:'+ check.standard; break; case 'offline': r += '离线' + (!check.is_reset? '报警': '复位')+ '!'; r += !check.is_reset? '标准:'+ check.standard: ''; break; } return r; } /** * 统计 tm 离线传感器并报警 */ function alarmTmOfflineSensors(sensors, users, blocks) { let offlines = sensors.filter(function(s) { return s.tm_offline; }); let new_off_count = {}; let new_off_snrs = groupBy(offlines, 'group'); for( group in new_off_snrs) { new_off_count[group] = {}; new_off_count[group]['num'] = new_off_snrs[group].length; new_off_count[group]['min_off'] = new_off_snrs[group][0].point.min_off; } console.log('old_off_count', tm_off_count); console.log('new_off_count', new_off_count); // 查找离线复位 for( group in tm_off_count) { let od = tm_off_count[group]; let nw = new_off_count[group]; if(!nw) { let sensor = { name: '共计'+ od.num+ '个传感器', group: group, id: group+ '_group_offline', loc: groupName(group)+'-全库范围', }; let check = { level: 0, duration: od.min_off+ INTV_MIN, measure: 'offline', is_reset: true, standard: '', }; sendAlarm(sensor, check, users, blocks); } } // 查找离线报警 for( group in new_off_count) { let od = tm_off_count[group]; let nw = new_off_count[group]; let exc_count = od ? od['exc_count']+1 : 1; let min_between = (exc_count-1)*INTV_MIN; nw['exc_count'] = exc_count; // console.log('min_between', min_between); if(!od || min_between%ALM_BETWEEN==0) { let sensor = { name: '共计'+ nw.num+ '个传感器', group: group, id: group+ '_group_offline', loc: groupName(group)+'-全库范围', }; let check = { level: 2, duration: nw.min_off, measure: 'offline', is_reset: false, standard: '数据停止更新', }; sendAlarm(sensor, check, users, blocks); } } // 保存新离线数量 tm_off_count = new_off_count; } /** * 清理过期屏蔽项 * * 注意: 要全部清除屏蔽项时, 不能清空文件内容, 请手动把内容设置为 {} */ function cleanBlocks() { let count = 0; let blocks = JSON.parse(fs.readFileSync(BLOCK_FILE)); console.log('------ cleanBlocks start ------------'); for( sid in blocks) { for( uid in blocks[sid]) { let until = new Date(blocks[sid][uid]); let del = ''; if( until< new Date()) { delete blocks[sid][uid]; count++; del = '(deleted)'; } console.log(sid, uid, until.toLocaleString(), del); }
return count; } /** * 发送 POST 请求 */ function postRequest(url, json, callback) { var options = { uri: url, method: 'POST', json: json, }; request(options, callback); } // -- routers ------------------------------------------------------ app.get('/', function (req, res) { setTimeout(() => res.end('Hello sensor!'), Math.random() * 500); }) app.get('/start', function (req, res) { startTimer(res); }); app.get('/stop', function (req, res) { stopTimer(res); }); /** * 临时屏蔽报警(表单) */ app.get('/blockme', function (req, res) { let sid = req.query.sid; let uid = req.query.uid; if(!sid || !uid) { return res.send('错误: 参数错误!'); } let blocks = JSON.parse(fs.readFileSync(BLOCK_FILE)); let until = blocks&&blocks[sid]&&blocks[sid][uid]? new Date(blocks[sid][uid]): null; if(until && until > new Date()) { res.render('blockme', {sid, uid, until: until.toLocaleString()}); } else { res.render('blockme', {sid, uid, until:null}); } }); /** * 临时屏蔽报警(提交) */ app.post('/blockme', function (req, res) { let after = parseInt(req.body.after); let sid = req.body.sid; let uid = req.body.uid; if(!sid || !uid || !after) { return res.send('错误: 参数错误!'); } let until = new Date().addHours(after); let blocks = JSON.parse(fs.readFileSync(BLOCK_FILE)); blocks[sid] = {}; blocks[sid][uid] = until; fs.writeFileSync(BLOCK_FILE, JSON.stringify(blocks)); res.redirect('/blockme?sid='+ sid+ '&uid='+ uid+ '&v=2'); }); /** * 手动清理临时屏蔽报警过期项 */ app.get('/cleanblocks', function (req, res) { let c = cleanBlocks(); res.send(c+ ' expires cleaned!(see logs)'); }); // -- tests ------------------------------------------------------ app.get('/test', function (req, res) { // q format = ` // SELECT last(value) FROM "li.base.v1.yum.fac" WHERE "where"='shenyang02' AND "where_type"='fac' AND "what_type"='env' AND "what1"='frig' AND "what2"='mt' AND "what3"='air' AND "output"='temp_c' AND "tool"='sample' AND "time_step"='5t' AND "at"='dock' GROUP BY "what4" // `; let tag1 = "measurement=li.base.v1.yum.fac,where_type=fac,where=shenyang02,what_type=env,what1=frig,what2=mt,what3=air,what4=h01,output=temp_c,tool=sample,time_step=5t,at=dock,where1=dl_1_1,where3=4m"; let tag2 = "measurement=li.base.v1.yum.fac,where_type=fac,where=shenyang01,what_type=env,what1=frig,what2=lt,what3=air,what4=s10,output=temp_c,tool=sample,time_step=5t,at=room,where1=rl_1_1,where3=8_5m"; let tag3 = "measurement=li.base.v1.yum.fac,where_type=fac,where=shenyang01,what_type=sys,what1=frig,what2=rcs,what3=offline,what4=A_power,output=status_u,tool=sample,time_step=5t"; let m = parseTagMesa(tag1); let q = `SELECT last(value) FROM ${m.measurement} WHERE ${m.mesa_where}`; // 批量查询: // m = parseTagMesa(tag2); // q += `;SELECT last(value) FROM ${m.measurement} WHERE ${m.mesa_where}`; console.log('q:', q); influx.query(q).then(result => { res.json(result) }).catch(err => { res.status(500).send(err.stack) }) //result: [{"time":"2018-05-07T09:00:00.000Z","last":14}] //result: [[{"time":"2018-05-09T06:15:00.000Z","last":12.9}],[{"time":"2018-05-09T06:20:00.000Z","last":-18.5}]] }); app.get('/test-batch', function (req, res) { let offlines = sensors.filter(function(s) { return s.offline; }); console.log('offlines', offlines); console.time('sensorBatchValues'); sensorBatchValues(offlines, function(err, sensors) { console.log('Result sensors', sensors); console.timeEnd('sensorBatchValues'); res.send('SensorBatchValues finished!'); }); }); app.get('/test-blocks-r', function (req, res) { let file = './blocks_t.json' let result = JSON.parse(fs.readFileSync(file)); let val = result['sydc2_lt_dock_01']['102']; let tm = new Date(val).toLocaleString(); let later = (new Date(val) > new Date())? 'after now': 'before now'; res.send(tm +','+ later); }); app.get('/test-blocks-w', function (req, res) { let file = './blocks_t.json' let result = JSON.parse(fs.readFileSync(file)); result['sydc2_lt_dock_01']['102'] = new Date().addHours(2); fs.writeFileSync(file, JSON.stringify(result)); res.send('file written!'); }); app.get('/outdata', function (req, res) { out_value = req.query.v || 0; out_date = req.query.d || 0; res.send('out_value = '+ out_value+ ' out_date='+ out_date); }); app.get('/auto-start', function (req, res) { autoStart(true); res.send('done!'); }); app.get('/array', function (req, res) { let test_count = {}; test_count[1] = 1; test_count[2] = 2; console.log('test_count', test_count); test_count[1]++; console.log('test_count', test_count); res.send('done!'); }); app.get('/test8hr', function (req, res) { pool.query('SELECT count(*) AS cnt FROM users', function (error, results, fields) { if (error) throw error; res.send('The count is: '+ results[0].cnt); }); }); app.get('/test-request', function (req1, res1) { let json = { "token":"20185523", "mobile":"13011112222,13072168298", "firstline":"POSTMAN设备 温度超标!数值:15 标准:0~10", "level_name":"报警", "level_color":"#F83A22", "curtime":"2018-5-16 13:15", "location":"上海一号库", "contact":"测 138****2345", "workorder":"311429", "lastline":"已持续5小时, 请紧急处理!" }; postRequest(KPI_SERVICE, json, function(err, resp, body) { if(err) console.log('Remote:', err); else console.log('Remote:', resp.statusCode, body); }); res1.send('done!'); });
} fs.writeFileSync(BLOCK_FILE, JSON.stringify(blocks));
random_line_split
app.js
const Influx = require('influx') const express = require('express') const http = require('http') const os = require('os') const path = require('path'); const bodyParser = require('body-parser'); const request = require('request'); const app = express(); // view engine setup app.set('views', path.join(__dirname, 'views')); app.set('view engine', 'ejs'); app.use(bodyParser.urlencoded({ extended: false })); const influx = new Influx.InfluxDB({ database: 'flux_yum', protocol: 'https', host: 'awsflux01.base.li', port: 18086, username: 'impact', password: 'BNMghj!' }); //console.log('Create influx:', influx); // setup mysql const mysql = require('mysql'); const pool = mysql.createPool({ connectionLimit : 10, host : 'localhost', user : 'fire_alarm', password : 'fire_alarm', database : 'fire_alarm' }); http.createServer(app).listen(3100, function () { console.log('Listening on port 3100') }) Date.prototype.addHours = function(h) { this.setTime(this.getTime() + (h*60*60*1000)); return this; } // 对Date的扩展,将 Date 转化为指定格式的String // 月(M)、日(d)、小时(h)、分(m)、秒(s)、季度(q) 可以用 1-2 个占位符, // 年(y)可以用 1-4 个占位符,毫秒(S)只能用 1 个占位符(是 1-3 位的数字) // 例子: // (new Date()).formatTime("yyyy-MM-dd hh:mm:ss.S") ==> 2006-07-02 08:09:04.423 // (new Date()).formatTime("yyyy-M-d h:m:s.S") ==> 2006-7-2 8:9:4.18 Date.prototype.formatTime = function (fmt) { //author: meizz var o = { "M+": this.getMonth() + 1, //月份 "d+": this.getDate(), //日 "h+": this.getHours(), //小时 "m+": this.getMinutes(), //分 "s+": this.getSeconds(), //秒 "q+": Math.floor((this.getMonth() + 3) / 3), //季度 "S": this.getMilliseconds() //毫秒 }; if (/(y+)/.test(fmt)) fmt = fmt.replace(RegExp.$1, (this.getFullYear() + "").substr(4 - RegExp.$1.length)); for (var k in o) if (new RegExp("(" + k + ")").test(fmt)) fmt = fmt.replace(RegExp.$1, (RegExp.$1.length == 1) ? (o[k]) : (("00" + o[k]).substr(("" + o[k]).length))); return fmt; } function groupBy(xs, key) { return xs.reduce(function(rv, x) { (rv[x[key]] = rv[x[key]] || []).push(x); return rv; }, {}); }; function minOffsetNow(date) { date = date || new Date(); return parseInt((new Date().getTime() - date.getTime())/(60*1000)); } /** * 全局数据定义 */ var kpis = require('./kpis'); var sensors = require('./sensors'); var users = require('./users'); var timer = 0; var out_value = 0; var out_date = 0; var tm_off_count = {}; const INTV_MIN = 5; // 查询间隔 min const MIN_SECS = 2; // 测试时可调到 2 默认 60 const ALM_AFTER = 5; // 累积多久时长后报警 min const ALM_BETWEEN = 30; // 多次报警时长间隔 min const ALARM_LEVEL = 1; // 什么等级开始报警, 默认1 const BLOCK_FILE = './blocks.json'; // const WX_MSG_URL = 'http://localhost:3100/blockme'; const WX_MSG_URL = 'http://2whzur.natappfree.cc/blockme'; const KPI_SERVICE = 'http://localhost:3119/kpi/alarm'; /** * 获取库名 */ function groupName(group) { switch(group) { case 'sydc1': return '一号库'; case 'sydc2': return '二号库'; default: return group; } } /** * 解析 tag_mesa 为查询参数 */ function parseTagMesa (tag_mesa) { let measurement = ''; let mesa_where = ''; let mesa_vals = ''; let m = {}, m2 = {}; let qarray = []; let chunks = tag_mesa.split(','); chunks.forEach((val, key, arr) => { let c = val.split('='); m[ c[0] ] = c[1]; }); m2 = Object.assign({}, m); if( 'measurement' in m) { measurement = `"${m['measurement']}"`; delete m.measurement; } for( let k in m) { qarray.push( `"${k}"='${m[k]}'`); } mesa_where = qarray.join(' AND '); return { measurement, mesa_where, mesa_vals: m2 }; } /** * 微信 API 初始化 */ let appId = "wx32e64b2b2f8f20df"; // bstwo let appSecret = "905848f29979a4859d2468f76626aa88"; var WechatAPI = require('wechat-api') var fs = require('fs') var wechatApi = new WechatAPI(appId, appSecret, function (callback) { fs.readFile('access_token.txt', 'utf8', function (err, txt) { if (err) {return callback(null, null)} callback(null, JSON.parse(txt)) }) }, function (token, callback) { fs.writeFile('access_token.txt', JSON.stringify(token), callback) }) /** * 开启定时器 */ function startTimer(res) { let pp = ''; if( timer) { pp = 'Error: please stop previous timer.'; res? res.send(pp): console.err(pp); } else { pp = 'Timer start @'+ INTV_MIN + ' min'; timer = setInterval(checkSensors, INTV_MIN* 1000* MIN_SECS /*60*/); console.log(pp); res? res.send(pp): null; // At first checkSensors(); } } /** * 结束定时器 */ function stopTimer(res) { if( timer) { clearInterval(timer); timer = 0; res? res.send('Timer stop.'): null; } else { res? res.send('No timer.'): null; } } /** * 自动启动首次(分钟需5的倍数) */ function autoStart(enable) { if(enable) { let m1 = new Date().getMinutes(); let m2 = Math.ceil(m1/5)*5; let df = (m2-m1)*1000* MIN_SECS; console.log('AutoStart waits:', m2-m1, 'min to start.'); if(!timer) { setTimeout(startTimer, df); } else { console.log('Error: please stop previous timer.'); } } } autoStart(MIN_SECS == 60); /** * 传感器检查流程 */ function checkSensors() { console.log('------ CheckSensors start '+ new Date().toLocaleString()+ '-------'); // 找出在线传感器 let onlines = sensors.filter(function(s) { return !s.offline; }); // 批量查询传感器 sensorBatchValues(onlines, function(err, sensors) { if(err) { return; } let blocks = JSON.parse(fs.readFileSync(BLOCK_FILE)); sensors.forEach( function(sensor) { let pt = sensor.point, kpi = kpis[sensor.kpi]; let ck = checkKpi(pt, kpi); let ex = exceedCount(sensor, ck); if( ex) { sendAlarm(sensor, ck, users, blocks); } console.log(sensor.name+':', ck.value, 'min-off:', sensor.point.min_off, ck.exceed? 'exceed:'+ck.exceed+ ' count: ' +sensor.exc_count[ck.level] : ''); }); alarmTmOfflineSensors(sensors, users, blocks); }); } /** * 读取传感器值(批量) */ function sensorBatchValues(sensors, callback) { let qs = []; sensors.forEach( function(sensor) { let m = parseTagMesa(sensor.tag_mesa); qs.push(`SELECT last(value) FROM ${m.measurement} WHERE ${m.mesa_where}`) }); let q = qs.join('; '); //console.log('batch q:', q); influx.query(q).then(result => { // 注: 结果实际不符合 json 格式, 可用 stringify 转 //console.log('result', JSON.stringify(result)); if( sensors.length == 1) { result = [result]; // 一个传感器时,必须包装成二维 } sensors.forEach( function(sensor, idx) { if(sensor.test) { sensor.point = { time: out_date==0? new Date(): new Date(out_date), last: out_value, } } else { sensor.point = (result&&result.length>idx&&result[idx].length>0) ? result[idx][0]: {}; } }); callback(null, sensors); }).catch(err => { console.error('sensorBatchValues err:', err); callback(err); }); } /** * 读取传感器值(单次) */ function sensorValue(sensor, callback) { let m = parseTagMesa(sensor.tag_mesa); let q = `SELECT last(value) FROM ${m.measurement} WHERE ${m.mesa_where}`; //console.log('q:', q); influx.query(q).then(result => { callback(null, (result&&result.length>0)? result[0]: {}); }).catch(err => { console.error('SensorValue err:', err); callback(err); }); } /** * 计算 KPI */ function checkKpi(point, kpi) { let ck = { src: kpi.src || 'calc', measure: kpi.measure || 'temp', reset_alarm: kpi.reset_alarm, exceed: 0, level: 0, standard: '', is_reset: false, }; //TODO: 来自 point 的其他值 let value = point.last; let time = new Date(point.time.getTime()); //console.log('time', time.toLocaleString()); point.min_off = minOffsetNow(time); ck.tm_offline = point.min_off > 12; if( !kpi.src && !ck.tm_offline) { if( kpi.ra_above && value > kpi.ra_above) { ck.exceed = 2; ck.standard = kpi.ar_below+ '~'+ kpi.ra_above; } else if( kpi.ag_above && value > kpi.ag_above) { ck.exceed = 1; ck.standard = kpi.ga_below+ '~'+ kpi.ag_above; } else if( kpi.ar_below && value < kpi.ar_below) { ck.exceed = -2; ck.standard = kpi.ar_below+ '~'+ kpi.ra_above; } else if( kpi.ga_below && value < kpi.ga_below) { ck.exceed = -1; ck.standard = kpi.ga_below+ '~'+ kpi.ag_above; } } else if( kpi.src == 'read' && !ck.tm_offline) { ck.exceed = value>0? 2: 0; ck.standard = kpi.standard; } ck.level = Math.abs(ck.exceed); ck.value = value; ck.time = time; return ck; } /** * 统计超限次数 * * @return true/false 是否累积到报警程度 */ function exceedCount(sensor, check) { let lvl = check.level; sensor.exc_count = sensor.exc_count || {}; sensor.exc_count[lvl] = sensor.exc_count[lvl] || 0; sensor.tm_offline = check.tm_offline; if(sensor.tm_offline) { return false; // tm_offline 数量太多, 1.不单独报警; 2.不累积计数 } if( sensor.exc_count[lvl] == 0 && check.exceed == 0) { return false; } // 0 0 // 1 1 // 累加计数 if(check.exceed == 0 || check.level >= ALARM_LEVEL) { for(let lo=1; lo<=lvl; lo++) sensor.exc_count[lo]++; setDuration(check, sensor.exc_count[lvl]); } else { return false; //不计数 } if( check.exceed == 0) { // 复位情况:也允许发送报警 check.is_reset = (sensor.exc_count[lvl] > 1); for(let lo=1; lo<=lvl; lo++) sensor.exc_count[lo] = 0; return ( check.is_reset && check.reset_alarm)? true: false; } else { // 超限情况:从计数判断 let tms_a = (check.src=='read')? 0: parseInt(ALM_AFTER / INTV_MIN); // ALM_AFTER 转为次数, read 的不用等待 let tms_b = parseInt(ALM_BETWEEN / INTV_MIN) || 6; let real_tms = sensor.exc_count[lvl]-1; // 因为上面 ALM_AFTER 是累积, 所以 exc_count-1
{ return false; } else { real_tms -= tms_a; return (real_tms%tms_b == 0)? true: false; } } /* if( check.exceed === 0 || check.level < ALARM_LEVEL) { check.is_reset = (sensor.exc_count > 0); setDuration(check, sensor.exc_count); sensor.exc_count = 0; // 复位也允许发送报警 return ( check.is_reset && check.reset_alarm)? true: false; } else { // 累加计数 if(!sensor.exc_count) { sensor.exc_count = 1; } else { sensor.exc_count++; } setDuration(check, sensor.exc_count); // 计数判断 let tms_a = parseInt(ALM_AFTER / INTV_MIN); let tms_b = parseInt(ALM_BETWEEN / INTV_MIN) || 2; let real_tms = sensor.exc_count-1; // 因为上面 ALM_AFTER 是累积, 所以 exc_count-1 再判断 if(real_tms < tms_a) { return false; } else { real_tms -= tms_a; return (real_tms%tms_b == 0)? true: false; } } */ } /** * 计算 或 设置时长 */ function setDuration(check, exc_count) { exc_count = exc_count || 0; if(check.src == 'read') { check.duration = (exc_count<1)? 0: (exc_count-1)*INTV_MIN; } else { check.duration = (exc_count<2)? 0: (exc_count-2)*INTV_MIN; // -2:隐含减去用于确认的第一个 ALM_AFTER 分钟 } //TODO: read 时,来自 point } /** * 格式化下时长 */ function formatDuration(minutes) { minutes = minutes || 0; if(minutes < 120 ) { return minutes+ '分钟'; } else { let hr = parseInt(minutes/60); let min = minutes%60; let day = 0; if( hr >= 24) { day = parseInt(hr/24); hr = hr%24; } return (day? day+ '天':'')+ (hr+ '小时')+ (min? min+ '分钟': ''); } } /** * 发送报警 */ function sendAlarm(sensor, check, users, blocks) { let firstline = makeFirstline(sensor, check); let curtime = new Date().formatTime('yyyy-MM-dd hh:mm'); let level = check.level; let level_name = level==0? '通知': (level==1? '预警': '报警'); let level_color = level==0? '#16A765': (level==1? '#FFAD46': '#F83A22'); let durat_str = formatDuration(check.duration); let lastline = (check.duration? '已持续约 '+ durat_str: '')+ (check.duration&&level? ', ': '') +(level? '请及时处理!':''); console.error('SendAlarm', curtime, sensor.name, check); let to_mobiles = []; users.forEach( function(user) { if(!user.openid) return; // 传感器是否在用户的组? if( user.groups.indexOf(sensor.group) == -1) { return; } // 用户是否屏蔽该报警? let sid = sensor.id, uid = user.id; let until = blocks&&blocks[sid]&&blocks[sid][uid]? new Date(blocks[sid][uid]): null; if(until && until> new Date() && level> 0) { //复位通知不能屏蔽 console.log('user <'+ uid+ '> blocks <'+ sid+ '> until', until.toLocaleString()); return; } to_mobiles.push(user.mobile); // 发送微信消息(本地) /* var templateId = 'zOVAEaSZVEHPdRE1KM2uQJy5wPfuWibHSU6NmXpIqF8'; var url = WX_MSG_URL+ `?sid=${sid}&uid=${uid}`; var data = { "first": { "value": firstline, "color":"#173177" }, "keyword1":{ "value": level_name, "color": level_color }, "keyword2": { "value": curtime, "color":"#173177" }, "keyword3": { "value": sensor.loc, "color":"#173177" }, "keyword4": { "value": '何 138****1234', "color":"#173177" }, "keyword5": { "value": 'n/a', "color":"#173177" }, "remark":{ "value": lastline, "color":"#173177" } }; wechatApi.sendTemplate(user.openid, templateId, url, data, function(err, result) { //console.log('sendTemplate err+result:', err, result) }) */ }); let mobiles = to_mobiles.join(','); let json = { "token":"20185523", "mobile": mobiles, "firstline": firstline, "level_name": level_name, "level_color": level_color, "curtime": curtime, "location": sensor.loc, "contact": "何 138****2345", "workorder": "n/a", "lastline": lastline }; postRequest(KPI_SERVICE, json, function(err, resp, body) { if(err) console.log('Remote:', err); else console.log('Remote:', resp.statusCode, body); }); } /** * 生成报警主提示 */ function makeFirstline(sensor, check) { //eg. sensor.name+ '温度超标!数值:'+ check.value+ ' 标准:'+ check.standard, let r = sensor.name+ ' '; switch (check.measure) { case 'temp': r += '温度' + (!check.is_reset? '超标': '复位')+ '!'; r += '数值:' + check.value+ ' 标准:'+ check.standard; break; case 'offline': r += '离线' + (!check.is_reset? '报警': '复位')+ '!'; r += !check.is_reset? '标准:'+ check.standard: ''; break; } return r; } /** * 统计 tm 离线传感器并报警 */ function alarmTmOfflineSensors(sensors, users, blocks) { let offlines = sensors.filter(function(s) { return s.tm_offline; }); let new_off_count = {}; let new_off_snrs = groupBy(offlines, 'group'); for( group in new_off_snrs) { new_off_count[group] = {}; new_off_count[group]['num'] = new_off_snrs[group].length; new_off_count[group]['min_off'] = new_off_snrs[group][0].point.min_off; } console.log('old_off_count', tm_off_count); console.log('new_off_count', new_off_count); // 查找离线复位 for( group in tm_off_count) { let od = tm_off_count[group]; let nw = new_off_count[group]; if(!nw) { let sensor = { name: '共计'+ od.num+ '个传感器', group: group, id: group+ '_group_offline', loc: groupName(group)+'-全库范围', }; let check = { level: 0, duration: od.min_off+ INTV_MIN, measure: 'offline', is_reset: true, standard: '', }; sendAlarm(sensor, check, users, blocks); } } // 查找离线报警 for( group in new_off_count) { let od = tm_off_count[group]; let nw = new_off_count[group]; let exc_count = od ? od['exc_count']+1 : 1; let min_between = (exc_count-1)*INTV_MIN; nw['exc_count'] = exc_count; // console.log('min_between', min_between); if(!od || min_between%ALM_BETWEEN==0) { let sensor = { name: '共计'+ nw.num+ '个传感器', group: group, id: group+ '_group_offline', loc: groupName(group)+'-全库范围', }; let check = { level: 2, duration: nw.min_off, measure: 'offline', is_reset: false, standard: '数据停止更新', }; sendAlarm(sensor, check, users, blocks); } } // 保存新离线数量 tm_off_count = new_off_count; } /** * 清理过期屏蔽项 * * 注意: 要全部清除屏蔽项时, 不能清空文件内容, 请手动把内容设置为 {} */ function cleanBlocks() { let count = 0; let blocks = JSON.parse(fs.readFileSync(BLOCK_FILE)); console.log('------ cleanBlocks start ------------'); for( sid in blocks) { for( uid in blocks[sid]) { let until = new Date(blocks[sid][uid]); let del = ''; if( until< new Date()) { delete blocks[sid][uid]; count++; del = '(deleted)'; } console.log(sid, uid, until.toLocaleString(), del); } } fs.writeFileSync(BLOCK_FILE, JSON.stringify(blocks)); return count; } /** * 发送 POST 请求 */ function postRequest(url, json, callback) { var options = { uri: url, method: 'POST', json: json, }; request(options, callback); } // -- routers ------------------------------------------------------ app.get('/', function (req, res) { setTimeout(() => res.end('Hello sensor!'), Math.random() * 500); }) app.get('/start', function (req, res) { startTimer(res); }); app.get('/stop', function (req, res) { stopTimer(res); }); /** * 临时屏蔽报警(表单) */ app.get('/blockme', function (req, res) { let sid = req.query.sid; let uid = req.query.uid; if(!sid || !uid) { return res.send('错误: 参数错误!'); } let blocks = JSON.parse(fs.readFileSync(BLOCK_FILE)); let until = blocks&&blocks[sid]&&blocks[sid][uid]? new Date(blocks[sid][uid]): null; if(until && until > new Date()) { res.render('blockme', {sid, uid, until: until.toLocaleString()}); } else { res.render('blockme', {sid, uid, until:null}); } }); /** * 临时屏蔽报警(提交) */ app.post('/blockme', function (req, res) { let after = parseInt(req.body.after); let sid = req.body.sid; let uid = req.body.uid; if(!sid || !uid || !after) { return res.send('错误: 参数错误!'); } let until = new Date().addHours(after); let blocks = JSON.parse(fs.readFileSync(BLOCK_FILE)); blocks[sid] = {}; blocks[sid][uid] = until; fs.writeFileSync(BLOCK_FILE, JSON.stringify(blocks)); res.redirect('/blockme?sid='+ sid+ '&uid='+ uid+ '&v=2'); }); /** * 手动清理临时屏蔽报警过期项 */ app.get('/cleanblocks', function (req, res) { let c = cleanBlocks(); res.send(c+ ' expires cleaned!(see logs)'); }); // -- tests ------------------------------------------------------ app.get('/test', function (req, res) { // q format = ` // SELECT last(value) FROM "li.base.v1.yum.fac" WHERE "where"='shenyang02' AND "where_type"='fac' AND "what_type"='env' AND "what1"='frig' AND "what2"='mt' AND "what3"='air' AND "output"='temp_c' AND "tool"='sample' AND "time_step"='5t' AND "at"='dock' GROUP BY "what4" // `; let tag1 = "measurement=li.base.v1.yum.fac,where_type=fac,where=shenyang02,what_type=env,what1=frig,what2=mt,what3=air,what4=h01,output=temp_c,tool=sample,time_step=5t,at=dock,where1=dl_1_1,where3=4m"; let tag2 = "measurement=li.base.v1.yum.fac,where_type=fac,where=shenyang01,what_type=env,what1=frig,what2=lt,what3=air,what4=s10,output=temp_c,tool=sample,time_step=5t,at=room,where1=rl_1_1,where3=8_5m"; let tag3 = "measurement=li.base.v1.yum.fac,where_type=fac,where=shenyang01,what_type=sys,what1=frig,what2=rcs,what3=offline,what4=A_power,output=status_u,tool=sample,time_step=5t"; let m = parseTagMesa(tag1); let q = `SELECT last(value) FROM ${m.measurement} WHERE ${m.mesa_where}`; // 批量查询: // m = parseTagMesa(tag2); // q += `;SELECT last(value) FROM ${m.measurement} WHERE ${m.mesa_where}`; console.log('q:', q); influx.query(q).then(result => { res.json(result) }).catch(err => { res.status(500).send(err.stack) }) //result: [{"time":"2018-05-07T09:00:00.000Z","last":14}] //result: [[{"time":"2018-05-09T06:15:00.000Z","last":12.9}],[{"time":"2018-05-09T06:20:00.000Z","last":-18.5}]] }); app.get('/test-batch', function (req, res) { let offlines = sensors.filter(function(s) { return s.offline; }); console.log('offlines', offlines); console.time('sensorBatchValues'); sensorBatchValues(offlines, function(err, sensors) { console.log('Result sensors', sensors); console.timeEnd('sensorBatchValues'); res.send('SensorBatchValues finished!'); }); }); app.get('/test-blocks-r', function (req, res) { let file = './blocks_t.json' let result = JSON.parse(fs.readFileSync(file)); let val = result['sydc2_lt_dock_01']['102']; let tm = new Date(val).toLocaleString(); let later = (new Date(val) > new Date())? 'after now': 'before now'; res.send(tm +','+ later); }); app.get('/test-blocks-w', function (req, res) { let file = './blocks_t.json' let result = JSON.parse(fs.readFileSync(file)); result['sydc2_lt_dock_01']['102'] = new Date().addHours(2); fs.writeFileSync(file, JSON.stringify(result)); res.send('file written!'); }); app.get('/outdata', function (req, res) { out_value = req.query.v || 0; out_date = req.query.d || 0; res.send('out_value = '+ out_value+ ' out_date='+ out_date); }); app.get('/auto-start', function (req, res) { autoStart(true); res.send('done!'); }); app.get('/array', function (req, res) { let test_count = {}; test_count[1] = 1; test_count[2] = 2; console.log('test_count', test_count); test_count[1]++; console.log('test_count', test_count); res.send('done!'); }); app.get('/test8hr', function (req, res) { pool.query('SELECT count(*) AS cnt FROM users', function (error, results, fields) { if (error) throw error; res.send('The count is: '+ results[0].cnt); }); }); app.get('/test-request', function (req1, res1) { let json = { "token":"20185523", "mobile":"13011112222,13072168298", "firstline":"POSTMAN设备 温度超标!数值:15 标准:0~10", "level_name":"报警", "level_color":"#F83A22", "curtime":"2018-5-16 13:15", "location":"上海一号库", "contact":"测 138****2345", "workorder":"311429", "lastline":"已持续5小时, 请紧急处理!" }; postRequest(KPI_SERVICE, json, function(err, resp, body) { if(err) console.log('Remote:', err); else console.log('Remote:', resp.statusCode, body); }); res1.send('done!'); });
再判断 if(real_tms < tms_a)
conditional_block
app.js
const Influx = require('influx') const express = require('express') const http = require('http') const os = require('os') const path = require('path'); const bodyParser = require('body-parser'); const request = require('request'); const app = express(); // view engine setup app.set('views', path.join(__dirname, 'views')); app.set('view engine', 'ejs'); app.use(bodyParser.urlencoded({ extended: false })); const influx = new Influx.InfluxDB({ database: 'flux_yum', protocol: 'https', host: 'awsflux01.base.li', port: 18086, username: 'impact', password: 'BNMghj!' }); //console.log('Create influx:', influx); // setup mysql const mysql = require('mysql'); const pool = mysql.createPool({ connectionLimit : 10, host : 'localhost', user : 'fire_alarm', password : 'fire_alarm', database : 'fire_alarm' }); http.createServer(app).listen(3100, function () { console.log('Listening on port 3100') }) Date.prototype.addHours = function(h) { this.setTime(this.getTime() + (h*60*60*1000)); return this; } // 对Date的扩展,将 Date 转化为指定格式的String // 月(M)、日(d)、小时(h)、分(m)、秒(s)、季度(q) 可以用 1-2 个占位符, // 年(y)可以用 1-4 个占位符,毫秒(S)只能用 1 个占位符(是 1-3 位的数字) // 例子: // (new Date()).formatTime("yyyy-MM-dd hh:mm:ss.S") ==> 2006-07-02 08:09:04.423 // (new Date()).formatTime("yyyy-M-d h:m:s.S") ==> 2006-7-2 8:9:4.18 Date.prototype.formatTime = function (fmt) { //author: meizz var o = { "M+": this.getMonth() + 1, //月份 "d+": this.getDate(), //日 "h+": this.getHours(), //小时 "m+": this.getMinutes(), //分 "s+": this.getSeconds(), //秒 "q+": Math.floor((this.getMonth() + 3) / 3), //季度 "S": this.getMilliseconds() //毫秒 }; if (/(y+)/.test(fmt)) fmt = fmt.replace(RegExp.$1, (this.getFullYear() + "").substr(4 - RegExp.$1.length)); for (var k in o) if (new RegExp("(" + k + ")").test(fmt)) fmt = fmt.replace(RegExp.$1, (RegExp.$1.length == 1) ? (o[k]) : (("00" + o[k]).substr(("" + o[k]).length))); return fmt; } function groupBy(xs, key) { return xs.reduce(function(rv, x) { (rv[x[key]] = rv[x[key]] || []).push(x); return rv; }, {}); }; function minOffsetNow(date) { date = date || new Date(); return parseInt((new Date().getTime() - date.getTime())/(60*1000)); } /** * 全局数据定义 */ var kpis = require('./kpis'); var sensors = require('./sensors'); var users = require('./users'); var timer = 0; var out_value = 0; var out_date = 0; var tm_off_count = {}; const INTV_MIN = 5; // 查询间隔 min const MIN_SECS = 2; // 测试时可调到 2 默认 60 const ALM_AFTER = 5; // 累积多久时长后报警 min const ALM_BETWEEN = 30; // 多次报警时长间隔 min const ALARM_LEVEL = 1; // 什么等级开始报警, 默认1 const BLOCK_FILE = './blocks.json'; // const WX_MSG_URL = 'http://localhost:3100/blockme'; const WX_MSG_URL = 'http://2whzur.natappfree.cc/blockme'; const KPI_SERVICE = 'http://localhost:3119/kpi/alarm'; /** * 获取库名 */ function groupName(group) { switch(group) { case 'sydc1': return '一号库'; case 'sydc2': return '二号库'; default: return group; } } /** * 解析 tag_mesa 为查询参数 */ function parseTagMesa (tag_mesa) { let measurement = ''; let mesa_where = ''; let mesa_vals = ''; let m = {}, m2 = {}; let qarray = []; let chunks = tag_mesa.split(','); chunks.forEach((val, key, arr) => { let c = val.split('='); m[ c[0] ] = c[1]; }); m2 = Object.assign({}, m); if( 'measurement' in m) { measurement = `"${m['measurement']}"`; delete m.measurement; } for( let k in m) { qarray.push( `"${k}"='${m[k]}'`); } mesa_where = qarray.join(' AND '); return { measurement, mesa_where, mesa_vals: m2 }; } /** * 微信 API 初始化 */ let appId = "wx32e64b2b2f8f20df"; // bstwo let appSecret = "905848f29979a4859d2468f76626aa88"; var WechatAPI = require('wechat-api') var fs = require('fs') var wechatApi = new WechatAPI(appId, appSecret, function (callback) { fs.readFile('access_token.txt', 'utf8', function (err, txt) { if (err) {return callback(null, null)} callback(null, JSON.parse(txt)) }) }, function (token, callback) { fs.writeFile('access_token.txt', JSON.stringify(token), callback) }) /** * 开启定时器 */ function startTimer(res) { let pp = ''; if( timer) { pp = 'Error: please stop previous timer.'; res? res.send(pp): console.err(pp); } else { pp = 'Timer start @'+ INTV_MIN + ' min'; timer = setInterval(checkSensors, INTV_MIN* 1000* MIN_SECS /*60*/); console.log(pp); res? res.send(pp): null; // At first checkSensors(); } } /** * 结束定时器 */ function stopTimer(res) { if( timer) { clearInterval(timer); timer = 0; res? res.send('Timer stop.'): null; } else { res? res.send('No timer.'): null; } } /** * 自动启动首次(分钟需5的倍数) */ function autoStart(enable) { if(enable) { let m1 = new Date().getMinutes(); let m2 = Math.ceil(m1/5)*5; let df = (m2-m1)*1000* MIN_SECS; console.log('AutoStart waits:', m2-m1, 'min to start.'); if(!timer) { setTimeout(startTimer, df); } else { console.log('Error: please stop previous timer.'); } } } autoStart(MIN_SECS == 60); /** * 传感器检查流程 */ function checkSensors() { console.log('------ CheckSensors start '+ new Date().toLocaleString()+ '-------'); // 找出在线传感器 let onlines = sensors.filter(function(s) { return !s.offline; }); // 批量查询传感器 sensorBatchValues(onlines, function(err, sensors) { if(err) { return; } let blocks = JSON.parse(fs.readFileSync(BLOCK_FILE)); sensors.forEach( function(sensor) { let pt = sensor.point, kpi = kpis[sensor.kpi]; let ck = checkKpi(pt, kpi); let ex = exceedCount(sensor, ck); if( ex) { sendAlarm(sensor, ck, users, blocks); } console.log(sensor.name+':', ck.value, 'min-off:', sensor.point.min_off, ck.exceed? 'exceed:'+ck.exceed+ ' count: ' +sensor.exc_count[ck.level] : ''); }); alarmTmOfflineSensors(sensors, users, blocks); }); } /** * 读取传感器值(批量) */ function sensorBatchValues(sensors, callback) { let qs = []; sensors.forEach( function(sensor) { let m = parseTagMesa(sensor.tag_mesa); qs.push(`SELECT last(value) FROM ${m.measurement} WHERE ${m.mesa_where}`) }); let q = qs.join('; '); //console.log('batch q:', q); influx.query(q).then(result => { // 注: 结果实际不符合 json 格式, 可用 stringify 转 //console.log('result', JSON.stringify(result)); if( sensors.length == 1) { result = [result]; // 一个传感器时,必须包装成二维 } sensors.forEach( function(sensor, idx) { if(sensor.test) { sensor.point = { time: out_date==0? new Date(): new Date(out_date), last: out_value, } } else { sensor.point = (result&&result.length>idx&&result[idx].length>0) ? result[idx][0]: {}; } }); callback(null, sensors); }).catch(err => { console.error('sensorBatchValues err:', err); callback(err); }); } /** * 读取传感器值(单次) */ function sensorValue(sensor, callback) { let m = parseTagMesa(sensor.tag_mesa); let q = `SELECT last(value) FROM ${m.measurement} WHERE ${m.mesa_where}`; //console.log('q:', q); influx.query(q).then(result => { callback(null, (result&&result.length>0)? result[0]: {}); }).catch(err => { console.error('SensorValue err:', err); callback(err); }); } /** * 计算 KPI */ function checkKpi(point, kpi) { let ck = { src: kpi.src || 'calc', measure: kpi.measure || 'temp', reset_alarm: kpi.reset_alarm, exceed: 0, level: 0, standard: '', is_reset: false, }; //TODO: 来自 point 的其他值 let value = point.last; let time = new Date(point.time.getTime()); //console.log('time', time.toLocaleString()); point.min_off = minOffsetNow(time); ck.tm_offline = point.min_off > 12; if( !kpi.src && !ck.tm_offline) { if( kpi.ra_above && value > kpi.ra_above) { ck.exceed = 2; ck.standard = kpi.ar_below+ '~'+ kpi.ra_above; } else if( kpi.ag_above && value > kpi.ag_above) { ck.exceed = 1; ck.standard = kpi.ga_below+ '~'+ kpi.ag_above; } else if( kpi.ar_below && value < kpi.ar_below) { ck.exceed = -2; ck.standard = kpi.ar_below+ '~'+ kpi.ra_above; } else if( kpi.ga_below && value < kpi.ga_below) { ck.exceed = -1; ck.standard = kpi.ga_below+ '~'+ kpi.ag_above; } } else if( kpi.src == 'read' && !ck.tm_offline) { ck.exceed = value>0? 2: 0; ck.standard = kpi.standard; } ck.level = Math.abs(ck.exceed); ck.value = value; ck.time = time; return ck; } /** * 统计超限次数 * * @return true/false 是否累积到报警程度 */ function exceedCount(sensor, check) { let lvl = check.level; sensor.exc_count = sensor.exc_count || {}; sensor.exc_count[lvl] = sensor.exc_count[lvl] || 0; sensor.tm_offline = check.tm_offline; if(sensor.tm_offline) { return false; // tm_offline 数量太多, 1.不单独报警; 2.不累积计数 } if( sensor.exc_count[lvl] == 0 && check.exceed == 0) { return false; } // 0 0 // 1 1 // 累加计数 if(check.exceed == 0 || check.level >= ALARM_LEVEL) { for(let lo=1; lo<=lvl; lo++) sensor.exc_count[lo]++; setDuration(check, sensor.exc_count[lvl]); } else { return false; //不计数 } if( check.exceed == 0) { // 复位情况:也允许发送报警 check.is_reset = (sensor.exc_count[lvl] > 1); for(let lo=1; lo<=lvl; lo++) sensor.exc_count[lo] = 0; return ( check.is_reset && check.reset_alarm)? true: false; } else { // 超限情况:从计数判断 let tms_a = (check.src=='read')? 0: parseInt(ALM_AFTER / INTV_MIN); // ALM_AFTER 转为次数, read 的不用等待 let tms_b = parseInt(ALM_BETWEEN / INTV_MIN) || 6; let real_tms = sensor.exc_count[lvl]-1; // 因为上面 ALM_AFTER 是累积, 所以 exc_count-1 再判断 if(real_tms < tms_a) { return false; } else { real_tms -= tms_a; return (real_tms%tms_b == 0)? true: false; } } /* if( check.exceed === 0 || check.level < ALARM_LEVEL) { check.is_reset = (sensor.exc_count > 0); setDuration(check, sensor.exc_count); sensor.exc_count = 0; // 复位也允许发送报警 return ( check.is_reset && check.reset_alarm)? true: false; } else { // 累加计数 if(!sensor.exc_count) { sensor.exc_count = 1; } else { sensor.exc_count++; } setDuration(check, sensor.exc_count); // 计数判断 let tms_a = parseInt(ALM_AFTER / INTV_MIN); let tms_b = parseInt(ALM_BETWEEN / INTV_MIN) || 2; let real_tms = sensor.exc_count-1; // 因为上面 ALM_AFTER 是累积, 所以 exc_count-1 再判断 if(real_tms < tms_a) { return false; } else { real_tms -= tms_a; return (real_tms%tms_b == 0)? true: false; } } */ } /** * 计算 或 设置时长 */ function setDuration(check, exc_count) { exc_count = exc_count || 0; if(check.src == 'read') { check.duration = (exc_count<1)? 0: (exc_count-1)*INTV_MIN; } else { check.duration = (exc_count<2)? 0: (exc_count-2)*INTV_MIN; // -2:隐含减去用于确认的第一个 ALM_AFTER 分钟 } //TODO: read 时,来自 point } /** * 格式化下时长 */ function formatDuration(minutes) { minutes = minutes || 0; if(minutes < 120 ) { return minutes+ '分钟'; } else { let hr = parseInt(minutes/60); let min = minutes%60; let day = 0; if( hr >= 24) { day = parseInt(hr/24); hr = hr%24; } return (day? day+ '天':'')+ (hr+ '小时')+ (min? min+ '分钟': ''); } } /** * 发送报警 */ function sendAlarm(sensor, check, users, blocks) { let firstline = makeFirstline(sensor, check); let curtime = new Date().formatTime('yyyy-MM-dd hh:mm'); let level = check.level; let level_name = level==0? '通知': (level==1? '预警': '报警'); let level_color = level==0? '#16A765': (level==1? '#FFAD46': '#F83A22'); let durat_str = formatDuration(check.duration); let lastline = (check.duration? '已持续约 '+ durat_str: '')+ (check.duration&&level? ', ': '') +(level? '请及时处理!':''); console.error('SendAlarm', curtime, sensor.name, check); let to_mobiles = []; users.forEach( function(user) { if(!user.openid) return; // 传感器是否在用户的组? if( user.groups.indexOf(sensor.group) == -1) { return; } // 用户是否屏蔽该报警? let sid = sensor.id, uid = user.id; let until = blocks&&blocks[sid]&&blocks[sid][uid]? new Date(blocks[sid][uid]): null; if(until && until> new Date() && level> 0) { //复位通知不能屏蔽 console.log('user <'+ uid+ '> blocks <'+ sid+ '> until', until.toLocaleString()); return; } to_mobiles.push(user.mobile); // 发送微信消息(本地) /* var templateId = 'zOVAEaSZVEHPdRE1KM2uQJy5wPfuWibHSU6NmXpIqF8'; var url = WX_MSG_URL+ `?sid=${sid}&uid=${uid}`; var data = { "first": { "value": firstline, "color":"#173177" }, "keyword1":{ "value": level_name, "color": level_color }, "keyword2": { "value": curtime, "color":"#173177" }, "keyword3": { "value": sensor.loc, "color":"#173177" }, "keyword4": { "value": '何 138****1234', "color":"#173177" }, "keyword5": { "value": 'n/a', "color":"#173177" }, "remark":{ "value": lastline, "color":"#173177" } }; wechatApi.sendTemplate(user.openid, templateId, url, data, function(err, result) { //console.log('sendTemplate err+result:', err, result) }) */ }); let mobiles = to_mobiles.join(','); let json = { "token":"20185523", "mobile": mobiles, "firstline": firstline, "level_name": level_name, "level_color": level_color, "curtime": curtime, "location": sensor.loc, "contact": "何 138****2345", "workorder": "n/a", "lastline": lastline }; postRequest(KPI_SERVICE, json, function(err, resp, body) { if(err) console.log('Remote:', err); else console.log('Remote:', resp.statusCode, body); }); } /** * 生成报警主提示 */ function makeFirstline(sensor, check) { //eg. sensor.name+ '温度超标!数值:'+ check.value+ ' 标准:'+ check.standard, let r = sensor.name+ ' '; switch (check.measure) { case 'temp': r += '温度' + (!check.is_reset? '超标': '复位')+ '!'; r += '数值:' + check.value+ ' 标准:'+ check.standard; break; case 'offline': r += '离线' + (!check.is_reset? '报警': '复位')+ '!'; r += !check.is_reset? '标准:'+ check.standard: ''; break; } return r; } /** * 统计 tm 离线传感器并报警 */ function alarmTmOfflineSensors(sensors, users, blocks) { let offlines = sensors.filter(function(s) { return s.tm_offline; }); let new_off_count = {}; let new_off_snrs = groupBy(offlines, 'group'); for( group in new_off_snrs) { new_off_count[group] = {}; new_off_count[group]['num'] = new_off_snrs[group].length; new_off_count[group]['min_off'] = new_off_snrs[group][0].point.min_off; } console.log('old_off_count', tm_off_count); console.log('new_off_count', new_off_count); // 查找离线复位 for( group in tm_off_count) { let od = tm_off_count[group]; let nw = new_off_count[group]; if(!nw) { let sensor = { name: '共计'+ od.num+ '个传感器', group: group, id: group+ '_group_offline', loc: groupName(group)+'-全库范围', }; let check = { level: 0, duration: od.min_off+ INTV_MIN, measure: 'offline', is_reset: true, standard: '', }; sendAlarm(sensor, check, users, blocks); } } // 查找离线报警 for( group in new_off_count) { let od = tm_off_count[group]; let nw = new_off_count[group]; let exc_count = od ? od['exc_count']+1 : 1; let min_between = (exc_count-1)*INTV_MIN; nw['exc_count'] = exc_count; // console.log('min_between', min_between); if(!od || min_between%ALM_BETWEEN==0) { let sensor = { name: '共计'+ nw.num+ '个传感器', group: group, id: group+ '_group_offline', loc: groupName(group)+'-全库范围', }; let check = { level: 2, duration: nw.min_off, measure: 'offline', is_reset: false, standard: '数据停止更新', }; sendAlarm(sensor, check, users, blocks); } } // 保存新离线数量 tm_off_count = new_off_count; } /** * 清理过期屏蔽项 * * 注意: 要全部清除屏蔽项时, 不能清空文件内容, 请手动把内容设置为 {} */ function cleanBlocks() { let count = 0; let blocks = JSON.parse(fs.readFileSync(BLOCK_FILE)); console.log('------ cleanBlocks start ------------'); for( sid in blocks) { for( uid in blocks[sid]) { let until = new Date(blocks[sid][uid]); let del = ''; if( until< new Date()) { delete blocks[sid][uid]; count++; del = '(deleted)'; } console.log(sid, uid, until.toLocaleString(), del); } } fs.writeFileSync(BLOCK_FILE, JSON.stringify(blocks)); return count; } /** * 发送 POST 请求 */ function postRequest(url, json, callback) { var options = { uri: url, method: 'POST', json: json, }; request(options, callback); } // -- routers ------------------------------------------------------ app.get('/', function (req, res) { setTimeout(() => res.end('Hello sensor!'), Math.random() * 500); }) app.get('/start', function (req, res) { startTimer(res); }); app.get('/stop', function (req, res) { stopTimer(res); }); /** * 临时屏蔽报警(表单) */ app.get('/blockme', function (req, res) { let sid = req.query.sid; let uid = req.query.uid; if(!sid || !uid) { return res.send('错误: 参数错误!'); } let blocks = JSON.parse(fs.readFileSync(BLOCK_FILE)); let until = blocks&&blocks[sid]&&blocks[sid][uid]? new Date(blocks[sid][uid]): null; if(until && until > new Date()) { res.render('blockme', {sid, uid, until: until.toLocaleString()}); } else { res.render('blockme', {sid, uid, until:null}); } }); /** * 临时屏蔽报警(提交) */ app.post('/blockme', function (req, res) { let after = parseInt(req.body.after); let sid = req.body.sid; let uid = req.body.uid; if(!sid || !uid || !after) { return res.send('错误: 参
ILE)); blocks[sid] = {}; blocks[sid][uid] = until; fs.writeFileSync(BLOCK_FILE, JSON.stringify(blocks)); res.redirect('/blockme?sid='+ sid+ '&uid='+ uid+ '&v=2'); }); /** * 手动清理临时屏蔽报警过期项 */ app.get('/cleanblocks', function (req, res) { let c = cleanBlocks(); res.send(c+ ' expires cleaned!(see logs)'); }); // -- tests ------------------------------------------------------ app.get('/test', function (req, res) { // q format = ` // SELECT last(value) FROM "li.base.v1.yum.fac" WHERE "where"='shenyang02' AND "where_type"='fac' AND "what_type"='env' AND "what1"='frig' AND "what2"='mt' AND "what3"='air' AND "output"='temp_c' AND "tool"='sample' AND "time_step"='5t' AND "at"='dock' GROUP BY "what4" // `; let tag1 = "measurement=li.base.v1.yum.fac,where_type=fac,where=shenyang02,what_type=env,what1=frig,what2=mt,what3=air,what4=h01,output=temp_c,tool=sample,time_step=5t,at=dock,where1=dl_1_1,where3=4m"; let tag2 = "measurement=li.base.v1.yum.fac,where_type=fac,where=shenyang01,what_type=env,what1=frig,what2=lt,what3=air,what4=s10,output=temp_c,tool=sample,time_step=5t,at=room,where1=rl_1_1,where3=8_5m"; let tag3 = "measurement=li.base.v1.yum.fac,where_type=fac,where=shenyang01,what_type=sys,what1=frig,what2=rcs,what3=offline,what4=A_power,output=status_u,tool=sample,time_step=5t"; let m = parseTagMesa(tag1); let q = `SELECT last(value) FROM ${m.measurement} WHERE ${m.mesa_where}`; // 批量查询: // m = parseTagMesa(tag2); // q += `;SELECT last(value) FROM ${m.measurement} WHERE ${m.mesa_where}`; console.log('q:', q); influx.query(q).then(result => { res.json(result) }).catch(err => { res.status(500).send(err.stack) }) //result: [{"time":"2018-05-07T09:00:00.000Z","last":14}] //result: [[{"time":"2018-05-09T06:15:00.000Z","last":12.9}],[{"time":"2018-05-09T06:20:00.000Z","last":-18.5}]] }); app.get('/test-batch', function (req, res) { let offlines = sensors.filter(function(s) { return s.offline; }); console.log('offlines', offlines); console.time('sensorBatchValues'); sensorBatchValues(offlines, function(err, sensors) { console.log('Result sensors', sensors); console.timeEnd('sensorBatchValues'); res.send('SensorBatchValues finished!'); }); }); app.get('/test-blocks-r', function (req, res) { let file = './blocks_t.json' let result = JSON.parse(fs.readFileSync(file)); let val = result['sydc2_lt_dock_01']['102']; let tm = new Date(val).toLocaleString(); let later = (new Date(val) > new Date())? 'after now': 'before now'; res.send(tm +','+ later); }); app.get('/test-blocks-w', function (req, res) { let file = './blocks_t.json' let result = JSON.parse(fs.readFileSync(file)); result['sydc2_lt_dock_01']['102'] = new Date().addHours(2); fs.writeFileSync(file, JSON.stringify(result)); res.send('file written!'); }); app.get('/outdata', function (req, res) { out_value = req.query.v || 0; out_date = req.query.d || 0; res.send('out_value = '+ out_value+ ' out_date='+ out_date); }); app.get('/auto-start', function (req, res) { autoStart(true); res.send('done!'); }); app.get('/array', function (req, res) { let test_count = {}; test_count[1] = 1; test_count[2] = 2; console.log('test_count', test_count); test_count[1]++; console.log('test_count', test_count); res.send('done!'); }); app.get('/test8hr', function (req, res) { pool.query('SELECT count(*) AS cnt FROM users', function (error, results, fields) { if (error) throw error; res.send('The count is: '+ results[0].cnt); }); }); app.get('/test-request', function (req1, res1) { let json = { "token":"20185523", "mobile":"13011112222,13072168298", "firstline":"POSTMAN设备 温度超标!数值:15 标准:0~10", "level_name":"报警", "level_color":"#F83A22", "curtime":"2018-5-16 13:15", "location":"上海一号库", "contact":"测 138****2345", "workorder":"311429", "lastline":"已持续5小时, 请紧急处理!" }; postRequest(KPI_SERVICE, json, function(err, resp, body) { if(err) console.log('Remote:', err); else console.log('Remote:', resp.statusCode, body); }); res1.send('done!'); });
数错误!'); } let until = new Date().addHours(after); let blocks = JSON.parse(fs.readFileSync(BLOCK_F
identifier_body
app.js
const Influx = require('influx') const express = require('express') const http = require('http') const os = require('os') const path = require('path'); const bodyParser = require('body-parser'); const request = require('request'); const app = express(); // view engine setup app.set('views', path.join(__dirname, 'views')); app.set('view engine', 'ejs'); app.use(bodyParser.urlencoded({ extended: false })); const influx = new Influx.InfluxDB({ database: 'flux_yum', protocol: 'https', host: 'awsflux01.base.li', port: 18086, username: 'impact', password: 'BNMghj!' }); //console.log('Create influx:', influx); // setup mysql const mysql = require('mysql'); const pool = mysql.createPool({ connectionLimit : 10, host : 'localhost', user : 'fire_alarm', password : 'fire_alarm', database : 'fire_alarm' }); http.createServer(app).listen(3100, function () { console.log('Listening on port 3100') }) Date.prototype.addHours = function(h) { this.setTime(this.getTime() + (h*60*60*1000)); return this; } // 对Date的扩展,将 Date 转化为指定格式的String // 月(M)、日(d)、小时(h)、分(m)、秒(s)、季度(q) 可以用 1-2 个占位符, // 年(y)可以用 1-4 个占位符,毫秒(S)只能用 1 个占位符(是 1-3 位的数字) // 例子: // (new Date()).formatTime("yyyy-MM-dd hh:mm:ss.S") ==> 2006-07-02 08:09:04.423 // (new Date()).formatTime("yyyy-M-d h:m:s.S") ==> 2006-7-2 8:9:4.18 Date.prototype.formatTime = function (fmt) { //author: meizz var o = { "M+": this.getMonth() + 1, //月份 "d+": this.getDate(), //日 "h+": this.getHours(), //小时 "m+": this.getMinutes(), //分 "s+": this.getSeconds(), //秒 "q+": Math.floor((this.getMonth() + 3) / 3), //季度 "S": this.getMilliseconds() //毫秒 }; if (/(y+)/.test(fmt)) fmt = fmt.replace(RegExp.$1, (this.getFullYear() + "").substr(4 - RegExp.$1.length)); for (var k in o) if (new RegExp("(" + k + ")").test(fmt)) fmt = fmt.replace(RegExp.$1, (RegExp.$1.length == 1) ? (o[k]) : (("00" + o[k]).substr(("" + o[k]).length))); return fmt; } function groupBy(xs, key) { return xs.reduce(function(rv, x) { (rv[x[key]] = rv[x[key]] || []).push(x); return rv; }, {}); }; function minOffsetNow(date) { date = date || new Date(); return parseInt((new Date().getTime() - date.getTime())/(60*1000)); } /** * 全局数据定义 */ var kpis = require('./kpis'); var sensors = require('./sensors'); var users = require('./users'); var timer = 0; var out_value = 0; var out_date = 0; var tm_off_count = {}; const INTV_MIN = 5; // 查询间隔 min const MIN_SECS = 2; // 测试时可调到 2 默认 60 const ALM_AFTER = 5; // 累积多久时长后报警 min const ALM_BETWEEN = 30; // 多次报警时长间隔 min const ALARM_LEVEL = 1; // 什么等级开始报警, 默认1 const BLOCK_FILE = './blocks.json'; // const WX_MSG_URL = 'http://localhost:3100/blockme'; const WX_MSG_URL = 'http://2whzur.natappfree.cc/blockme'; const KPI_SERVICE = 'http://localhost:3119/kpi/alarm'; /** * 获取库名 */ function groupName(group) { switch(group) { case 'sydc1': return '一号库'; case 'sydc2': return '二号库'; default: return group; } } /** * 解析 tag_mesa 为查询参数 */ function parseTagMesa (tag_mesa) { let measurement = ''; let mesa_where = ''; let mesa_vals = ''; let m = {}, m2 = {}; let qarray = []; let chunks = tag_mesa.split(','); chunks.forEach((val, key, arr) => { let c = val.split('='); m[ c[0] ] = c[1]; }); m2 = Object.assign({}, m); if( 'measurement' in m) { measurement = `"${m['measurement']}"`; delete m.measurement; } for( let k in m) { qarray.push( `"${k}"='${m[k]}'`); } mesa_where = qarray.join(' AND '); return { measurement, mesa_where, mesa_vals: m2 }; } /** * 微信 API 初始化 */ let appId = "wx32e64b2b2f8f20df"; // bstwo let appSecret = "905848f29979a4859d2468f76626aa88"; var WechatAPI = require('wechat-api') var fs = require('fs') var wechatApi = new WechatAPI(appId, appSecret, function (callback) { fs.readFile('access_token.txt', 'utf8', function (err, txt) { if (err) {return callback(null, null)} callback(null, JSON.parse(txt)) }) }, function (token, callback) { fs.writeFile('access_token.txt', JSON.stringify(token), callback) }) /** * 开启定时器 */ function startTimer(res) { let pp = ''; if( timer) { pp = 'Error: please stop previous timer.'; res? res.send(pp): console.err(pp); } else { pp = 'Timer start @'+ INTV_MIN + ' min'; timer = setInterval(checkSensors, INTV_MIN* 1000* MIN_SECS /*60*/); console.log(pp); res? res.send(pp): null; // At first checkSensors(); } } /** * 结束定时器 */ function stopTimer(res) { if( timer) { clearInterval(timer); timer = 0; res? res.send('Timer stop.'): null; } else { res? res.send('No timer.'): null; } } /** * 自动启动首次(分钟需5的倍数) */ function autoStart(enable) { if(enable) { let m1 = new Date().getMinutes(); let m2 = Math.ceil(m1/5)*5; let df = (m2-m1)*1000* MIN_SECS; console.log('AutoStart waits:', m2-m1, 'min to start.'); if(!timer) { setTimeout(startTimer, df); } else { console.log('Error: please stop previous timer.'); } } } autoStart(MIN_SECS == 60); /** * 传感器检查流程 */ function checkSensors() { console.log('------ CheckSensors start '+ new Date().toLocaleString()+ '-------'); // 找出在线传感器 let onlines = sensors.filter(function(s) { return !s.offline; }); // 批量查询传感器 sensorBatchValues(onlines, function(err, sensors) { if(err) { return; } let blocks = JSON.parse(fs.readFileSync(BLOCK_FILE)); sensors.forEach( function(sensor) { let pt = sensor.point, kpi = kpis[sensor.kpi]; let ck = checkKpi(pt, kpi); let ex = exceedCount(sensor, ck); if( ex) { sendAlarm(sensor, ck, users, blocks); } console.log(sensor.name+':', ck.value, 'min-off:', sensor.point.min_off, ck.exceed? 'exceed:'+ck.exceed+ ' count: ' +sensor.exc_count[ck.level] : ''); }); alarmTmOfflineSensors(sensors, users, blocks); }); } /** * 读取传感器值(批量) */ function sensorBatchValues(sensors, callback) { let qs = []; sensors.forEach( function(sensor) { let m = parseTagMesa(sensor.tag_mesa); qs.push(`SELECT last(value) FROM ${m.measurement} WHERE ${m.mesa_where}`) }); let q = qs.join('; '); //console.log('batch q:', q); influx.query(q).then(result => { // 注: 结果实际不符合 json 格式, 可用 stringify 转 //console.log('result', JSON.stringify(result)); if( sensors.length == 1) { result = [result]; // 一个传感器时,必须包装成二维 } sensors.forEach( function(sensor, idx) { if(sensor.test) { sensor.point = { time: out_date==0? new Date(): new Date(out_date), last: out_value, } } else { sensor.point = (result&&result.length>idx&&result[idx].length>0) ? result[idx][0]: {}; } }); callback(null, sensors); }).catch(err => { console.error('sensorBatchValues err:', err); callback(err); }); } /** * 读取传感器值(单次) */ function sensorValue(sensor, callback) { let m = parseTagMesa(sensor.tag_mesa); let q = `SELECT last(value) FROM ${m.measurement} WHERE ${m.mesa_where}`; //console.log('q:', q); influx.query(q).then(result => { callback(null, (result&&result.length>0)? result[0]: {}); }).catch(err => { console.error('SensorValue err:', err); callback(err); }); } /** * 计算 KPI */ function checkKpi(point, kpi) { let ck = { src: kpi.src || 'calc', measure: kpi.measure || 'temp', reset_alarm: kpi.reset_alarm, exceed: 0, level: 0, standard: '', is_reset: false, }; //TODO: 来自 point 的其他值 let value = point.last; let time = new Date(point.time.getTime()); //console.log('time', time.toLocaleString()); point.min_off = minOffsetNow(time); ck.tm_offline = point.min_off > 12; if( !kpi.src && !ck.tm_offline) { if( kpi.ra_above && value > kpi.ra_above) { ck.exceed = 2; ck.standard = kpi.ar_below+ '~'+ kpi.ra_above; } else if( kpi.ag_above && value > kpi.ag_above) { ck.exceed = 1; ck.standard = kpi.ga_below+ '~'+ kpi.ag_above; } else if( kpi.ar_below && value < kpi.ar_below) { ck.exceed = -2; ck.standard = kpi.ar_below+ '~'+ kpi.ra_above; } else if( kpi.ga_below && value < kpi.ga_below) { ck.exceed = -1; ck.standard = kpi.ga_below+ '~'+ kpi.ag_above; } } else if( kpi.src == 'read' && !ck.tm_offline) { ck.exceed = value>0? 2: 0; ck.standard = kpi.standard; } ck.level = Math.abs(ck.exceed); ck.value = value; ck.time = time; return ck; } /** * 统计超限次数 * * @return true/false 是否累积到报警程度 */ function exceedCount(sensor, check) { let lvl = check.level; sensor.exc_count = sensor.exc_count || {}; sensor.exc_count[lvl] = sensor.exc_count[lvl] || 0; sensor.tm_offline = check.tm_offline; if(sensor.tm_offline) { return false; // tm_offline 数量太多, 1.不单独报警; 2.不累积计数 } if( sensor.exc_count[lvl] == 0 && check.exceed == 0) { return false; } // 0 0 // 1 1 // 累加计数 if(check.exceed == 0 || check.level >= ALARM_LEVEL) { for(let lo=1; lo<=lvl; lo++) sensor.exc_count[lo]++; setDuration(check, sensor.exc_count[lvl]); } else { return false; //不计数 } if( check.exceed == 0) { // 复位情况:也允许发送报警 check.is_reset = (sensor.exc_count[lvl] > 1); for(let lo=1; lo<=lvl; lo++) sensor.exc_count[lo] = 0; return ( check.is_reset && check.reset_alarm)? true: false; } else { // 超限情况:从计数判断 let tms_a = (check.src=='read')? 0: parseInt(ALM_AFTER / INTV_MIN); // ALM_AFTER 转为次数, read 的不用等待 let tms_b = parseInt(ALM_BETWEEN / INTV_MIN) || 6; let real_tms = sensor.exc_count[lvl]-1; // 因为上面 ALM_AFTER 是累积, 所以 exc_count-1 再判断 if(real_tms < tms_a) { return false; } else { real_tms -= tms_a; return (real_tms%tms_b == 0)? true: false; } } /* if( check.exceed === 0 || check.level < ALARM_LEVEL) { check.is_reset = (sensor.exc_count > 0); setDuration(check, sensor.exc_count); sensor.exc_count = 0; // 复位也允许发送报警 return ( check.is_reset && check.reset_alarm)? true: false; } else { // 累加计数 if(!sensor.exc_count) { sensor.exc_count = 1; } else { sensor.exc_count++; } setDuration(check, sensor.exc_count); // 计数判断 let tms_a = parseInt(ALM_AFTER / INTV_MIN); let tms_b = parseInt(ALM_BETWEEN / INTV_MIN) || 2; let real_tms = sensor.exc_count-1; // 因为上面 ALM_AFTER 是累积, 所以 exc_count-1 再判断 if(real_tms < tms_a) { return false; } else { real_tms -= tms_a; return (real_tms%tms_b == 0)? true: false; } } */ } /** * 计算 或 设置时长 */ function setDuration(check, exc_count) { exc_count = exc_count || 0; if(check.src == 'read') { check.duration = (exc_count<1)? 0: (exc_count-1)*INTV_MIN; } else { check.duration = (exc_count<2)? 0: (exc_count-2)*INTV_MIN; // -2:隐含减去用于确认的第一个 ALM_AFTER 分钟 } //TODO: read 时,来自 point } /** * 格式化下时长 */ function formatDuration(minutes) { minutes = minutes || 0; if(minutes < 120 ) { return minutes+ '分钟'; } else { let hr = parseInt(minutes/60); let min = minutes%60; let day = 0; if( hr >= 24) { day = parseInt(hr/24); hr = hr%24; } return (day? day+ '天':'')+ (hr+ '小时')+ (min? min+ '分钟': ''); } } /** * 发送报警 */ function sendAl
check, users, blocks) { let firstline = makeFirstline(sensor, check); let curtime = new Date().formatTime('yyyy-MM-dd hh:mm'); let level = check.level; let level_name = level==0? '通知': (level==1? '预警': '报警'); let level_color = level==0? '#16A765': (level==1? '#FFAD46': '#F83A22'); let durat_str = formatDuration(check.duration); let lastline = (check.duration? '已持续约 '+ durat_str: '')+ (check.duration&&level? ', ': '') +(level? '请及时处理!':''); console.error('SendAlarm', curtime, sensor.name, check); let to_mobiles = []; users.forEach( function(user) { if(!user.openid) return; // 传感器是否在用户的组? if( user.groups.indexOf(sensor.group) == -1) { return; } // 用户是否屏蔽该报警? let sid = sensor.id, uid = user.id; let until = blocks&&blocks[sid]&&blocks[sid][uid]? new Date(blocks[sid][uid]): null; if(until && until> new Date() && level> 0) { //复位通知不能屏蔽 console.log('user <'+ uid+ '> blocks <'+ sid+ '> until', until.toLocaleString()); return; } to_mobiles.push(user.mobile); // 发送微信消息(本地) /* var templateId = 'zOVAEaSZVEHPdRE1KM2uQJy5wPfuWibHSU6NmXpIqF8'; var url = WX_MSG_URL+ `?sid=${sid}&uid=${uid}`; var data = { "first": { "value": firstline, "color":"#173177" }, "keyword1":{ "value": level_name, "color": level_color }, "keyword2": { "value": curtime, "color":"#173177" }, "keyword3": { "value": sensor.loc, "color":"#173177" }, "keyword4": { "value": '何 138****1234', "color":"#173177" }, "keyword5": { "value": 'n/a', "color":"#173177" }, "remark":{ "value": lastline, "color":"#173177" } }; wechatApi.sendTemplate(user.openid, templateId, url, data, function(err, result) { //console.log('sendTemplate err+result:', err, result) }) */ }); let mobiles = to_mobiles.join(','); let json = { "token":"20185523", "mobile": mobiles, "firstline": firstline, "level_name": level_name, "level_color": level_color, "curtime": curtime, "location": sensor.loc, "contact": "何 138****2345", "workorder": "n/a", "lastline": lastline }; postRequest(KPI_SERVICE, json, function(err, resp, body) { if(err) console.log('Remote:', err); else console.log('Remote:', resp.statusCode, body); }); } /** * 生成报警主提示 */ function makeFirstline(sensor, check) { //eg. sensor.name+ '温度超标!数值:'+ check.value+ ' 标准:'+ check.standard, let r = sensor.name+ ' '; switch (check.measure) { case 'temp': r += '温度' + (!check.is_reset? '超标': '复位')+ '!'; r += '数值:' + check.value+ ' 标准:'+ check.standard; break; case 'offline': r += '离线' + (!check.is_reset? '报警': '复位')+ '!'; r += !check.is_reset? '标准:'+ check.standard: ''; break; } return r; } /** * 统计 tm 离线传感器并报警 */ function alarmTmOfflineSensors(sensors, users, blocks) { let offlines = sensors.filter(function(s) { return s.tm_offline; }); let new_off_count = {}; let new_off_snrs = groupBy(offlines, 'group'); for( group in new_off_snrs) { new_off_count[group] = {}; new_off_count[group]['num'] = new_off_snrs[group].length; new_off_count[group]['min_off'] = new_off_snrs[group][0].point.min_off; } console.log('old_off_count', tm_off_count); console.log('new_off_count', new_off_count); // 查找离线复位 for( group in tm_off_count) { let od = tm_off_count[group]; let nw = new_off_count[group]; if(!nw) { let sensor = { name: '共计'+ od.num+ '个传感器', group: group, id: group+ '_group_offline', loc: groupName(group)+'-全库范围', }; let check = { level: 0, duration: od.min_off+ INTV_MIN, measure: 'offline', is_reset: true, standard: '', }; sendAlarm(sensor, check, users, blocks); } } // 查找离线报警 for( group in new_off_count) { let od = tm_off_count[group]; let nw = new_off_count[group]; let exc_count = od ? od['exc_count']+1 : 1; let min_between = (exc_count-1)*INTV_MIN; nw['exc_count'] = exc_count; // console.log('min_between', min_between); if(!od || min_between%ALM_BETWEEN==0) { let sensor = { name: '共计'+ nw.num+ '个传感器', group: group, id: group+ '_group_offline', loc: groupName(group)+'-全库范围', }; let check = { level: 2, duration: nw.min_off, measure: 'offline', is_reset: false, standard: '数据停止更新', }; sendAlarm(sensor, check, users, blocks); } } // 保存新离线数量 tm_off_count = new_off_count; } /** * 清理过期屏蔽项 * * 注意: 要全部清除屏蔽项时, 不能清空文件内容, 请手动把内容设置为 {} */ function cleanBlocks() { let count = 0; let blocks = JSON.parse(fs.readFileSync(BLOCK_FILE)); console.log('------ cleanBlocks start ------------'); for( sid in blocks) { for( uid in blocks[sid]) { let until = new Date(blocks[sid][uid]); let del = ''; if( until< new Date()) { delete blocks[sid][uid]; count++; del = '(deleted)'; } console.log(sid, uid, until.toLocaleString(), del); } } fs.writeFileSync(BLOCK_FILE, JSON.stringify(blocks)); return count; } /** * 发送 POST 请求 */ function postRequest(url, json, callback) { var options = { uri: url, method: 'POST', json: json, }; request(options, callback); } // -- routers ------------------------------------------------------ app.get('/', function (req, res) { setTimeout(() => res.end('Hello sensor!'), Math.random() * 500); }) app.get('/start', function (req, res) { startTimer(res); }); app.get('/stop', function (req, res) { stopTimer(res); }); /** * 临时屏蔽报警(表单) */ app.get('/blockme', function (req, res) { let sid = req.query.sid; let uid = req.query.uid; if(!sid || !uid) { return res.send('错误: 参数错误!'); } let blocks = JSON.parse(fs.readFileSync(BLOCK_FILE)); let until = blocks&&blocks[sid]&&blocks[sid][uid]? new Date(blocks[sid][uid]): null; if(until && until > new Date()) { res.render('blockme', {sid, uid, until: until.toLocaleString()}); } else { res.render('blockme', {sid, uid, until:null}); } }); /** * 临时屏蔽报警(提交) */ app.post('/blockme', function (req, res) { let after = parseInt(req.body.after); let sid = req.body.sid; let uid = req.body.uid; if(!sid || !uid || !after) { return res.send('错误: 参数错误!'); } let until = new Date().addHours(after); let blocks = JSON.parse(fs.readFileSync(BLOCK_FILE)); blocks[sid] = {}; blocks[sid][uid] = until; fs.writeFileSync(BLOCK_FILE, JSON.stringify(blocks)); res.redirect('/blockme?sid='+ sid+ '&uid='+ uid+ '&v=2'); }); /** * 手动清理临时屏蔽报警过期项 */ app.get('/cleanblocks', function (req, res) { let c = cleanBlocks(); res.send(c+ ' expires cleaned!(see logs)'); }); // -- tests ------------------------------------------------------ app.get('/test', function (req, res) { // q format = ` // SELECT last(value) FROM "li.base.v1.yum.fac" WHERE "where"='shenyang02' AND "where_type"='fac' AND "what_type"='env' AND "what1"='frig' AND "what2"='mt' AND "what3"='air' AND "output"='temp_c' AND "tool"='sample' AND "time_step"='5t' AND "at"='dock' GROUP BY "what4" // `; let tag1 = "measurement=li.base.v1.yum.fac,where_type=fac,where=shenyang02,what_type=env,what1=frig,what2=mt,what3=air,what4=h01,output=temp_c,tool=sample,time_step=5t,at=dock,where1=dl_1_1,where3=4m"; let tag2 = "measurement=li.base.v1.yum.fac,where_type=fac,where=shenyang01,what_type=env,what1=frig,what2=lt,what3=air,what4=s10,output=temp_c,tool=sample,time_step=5t,at=room,where1=rl_1_1,where3=8_5m"; let tag3 = "measurement=li.base.v1.yum.fac,where_type=fac,where=shenyang01,what_type=sys,what1=frig,what2=rcs,what3=offline,what4=A_power,output=status_u,tool=sample,time_step=5t"; let m = parseTagMesa(tag1); let q = `SELECT last(value) FROM ${m.measurement} WHERE ${m.mesa_where}`; // 批量查询: // m = parseTagMesa(tag2); // q += `;SELECT last(value) FROM ${m.measurement} WHERE ${m.mesa_where}`; console.log('q:', q); influx.query(q).then(result => { res.json(result) }).catch(err => { res.status(500).send(err.stack) }) //result: [{"time":"2018-05-07T09:00:00.000Z","last":14}] //result: [[{"time":"2018-05-09T06:15:00.000Z","last":12.9}],[{"time":"2018-05-09T06:20:00.000Z","last":-18.5}]] }); app.get('/test-batch', function (req, res) { let offlines = sensors.filter(function(s) { return s.offline; }); console.log('offlines', offlines); console.time('sensorBatchValues'); sensorBatchValues(offlines, function(err, sensors) { console.log('Result sensors', sensors); console.timeEnd('sensorBatchValues'); res.send('SensorBatchValues finished!'); }); }); app.get('/test-blocks-r', function (req, res) { let file = './blocks_t.json' let result = JSON.parse(fs.readFileSync(file)); let val = result['sydc2_lt_dock_01']['102']; let tm = new Date(val).toLocaleString(); let later = (new Date(val) > new Date())? 'after now': 'before now'; res.send(tm +','+ later); }); app.get('/test-blocks-w', function (req, res) { let file = './blocks_t.json' let result = JSON.parse(fs.readFileSync(file)); result['sydc2_lt_dock_01']['102'] = new Date().addHours(2); fs.writeFileSync(file, JSON.stringify(result)); res.send('file written!'); }); app.get('/outdata', function (req, res) { out_value = req.query.v || 0; out_date = req.query.d || 0; res.send('out_value = '+ out_value+ ' out_date='+ out_date); }); app.get('/auto-start', function (req, res) { autoStart(true); res.send('done!'); }); app.get('/array', function (req, res) { let test_count = {}; test_count[1] = 1; test_count[2] = 2; console.log('test_count', test_count); test_count[1]++; console.log('test_count', test_count); res.send('done!'); }); app.get('/test8hr', function (req, res) { pool.query('SELECT count(*) AS cnt FROM users', function (error, results, fields) { if (error) throw error; res.send('The count is: '+ results[0].cnt); }); }); app.get('/test-request', function (req1, res1) { let json = { "token":"20185523", "mobile":"13011112222,13072168298", "firstline":"POSTMAN设备 温度超标!数值:15 标准:0~10", "level_name":"报警", "level_color":"#F83A22", "curtime":"2018-5-16 13:15", "location":"上海一号库", "contact":"测 138****2345", "workorder":"311429", "lastline":"已持续5小时, 请紧急处理!" }; postRequest(KPI_SERVICE, json, function(err, resp, body) { if(err) console.log('Remote:', err); else console.log('Remote:', resp.statusCode, body); }); res1.send('done!'); });
arm(sensor,
identifier_name
spamScore.js
const { Extendable } = require('klasa'); const config = require("../config"); const moment = require("moment"); const stringSimilarity = require("string-similarity"); const { Message, MessageEmbed } = require('discord.js'); module.exports = class extends Extendable { constructor(...args) { super(...args, { appliesTo: [ Message ] }); this._earnedSpamScore = 0 } get spamScore () { return new Promise(async (resolve, reject) => { if (this.type !== 'DEFAULT' || this.author.id === this.client.user.id) return resolve(0); // Start with a base score of 2 var score = 2; var scoreReasons = {}; /* // Add 3 points for every profane word used; excessive profanity spam config.profanity.map((word) => { var numbers = getIndicesOf(word, this.cleanContent, false); if (numbers.length > 0) { score += 3; //console.log(`profanity`); } }); */ // Executed after finishing perspective; manages multipliers var afterFunction = () => { // Start with a spam score multiplier of 0.5 // spam score 50% if less strict channel AND less strict role // Spam score 100% if less strict channel OR less strict role // Spam score 150% if neither less strict channel nor less strict role // If the member is muted, the spam score will always be 150% var multiplier = 0.5; var isMuted = (this.member && this.guild && this.member.roles.get(this.guild.settings.muteRole)); // If this is not a less strict channel, add 0.5 to the multiplier. if (this.guild.settings.antispamLessStrictChannels.indexOf(this.channel.id) === -1) multiplier += 0.5; // If the member does not have a role defined in less strict roles, add 0.5 to the multiplier. if (typeof this.member !== 'undefined') { var lessStrict = false; this.member.roles .filter((role) => { return this.guild.settings.antispamLessStrictRoles.indexOf(role.id) !== -1; }) .each((role) => { lessStrict = true; }); if (!lessStrict) multiplier += 0.5; } if (isMuted) multiplier = 1.5; // Text channel conflict resolution should have very strict antispam regardless of bot settings. if (this.channel && this.channel.settings.conflictResolution && this.channel.settings.conflictResolution.indexOf("ACTIVE") !== -1) multiplier = 2; //console.log(`${multiplier} multiplier`); // Flag messages with a high spam score var modLog = this.guild.settings.flagLogChannel; const _channel = this.client.channels.resolve(modLog); if (score > this.guild.settings.antispamCooldown) { if (_channel) { var embed = new MessageEmbed() .setTitle(`Flagged message`) .setDescription(`${this.cleanContent}`) .setAuthor(this.author.tag, this.author.displayAvatarURL()) .setFooter(`Message channel **${this.channel.name}**`) .addField(`Total Spam Score`, `Base: ${score}; multiplier: ${multiplier}; total: ${score * multiplier}`) .setColor(`#ff7878`); for (var key in scoreReasons) { if (Object.prototype.hasOwnProperty.call(scoreReasons, key)) { embed.addField(key, scoreReasons[ key ]); } } _channel.sendEmbed(embed, `:bangbang: Please review message ${this.id}; it was flagged for having a high spam score.`) } } score = parseInt(score * multiplier); console.log(`Total score: ${score}`) } console.log('Message spam score ' + this.id) // Add 5 score for each mention; mention spam var nummentions = this.mentions.users.size + this.mentions.roles.size; score += (5 * nummentions); if (nummentions > 0) { scoreReasons[ "Mentions" ] = (nummentions * 5) } // Add 10 score for each embed; link/embed spam var numembeds = this.embeds.length; score += (10 * numembeds); if (numembeds > 0) { scoreReasons[ "Embeds" ] = (numembeds * 10) } // Add 10 score for each attachment; attachment spam var numattachments = this.attachments.size; score += (10 * numattachments); if (numattachments > 0) { scoreReasons[ "Attachments" ] = (numattachments * 10) } // Calculate how many seconds this message took to type based off of 7 characters per second. var messageTime = (this.cleanContent.length / 7); //console.log(`${messageTime} messagetime`); // Iterate through messages of this channel from the last 3 minutes by the same author var collection = this.channel.messages .filter((message) => { if (message.partial || message === null || !message) return false; return message.id !== this.id && message.author.id === this.author.id && moment(this.createdAt).subtract(3, 'minutes').isBefore(moment(message.createdAt)) && moment(this.createdAt).isAfter(moment(message.createdAt)); }); //console.log(`${collection.size} messages`); collection.each((message) => { // If the current message was sent at a time that causes the typing speed to be more than 7 characters per second, // add score for flooding / copypasting. The faster / more characters typed, the more score added. var timediff = moment(this.createdAt).diff(moment(message.createdAt), 'seconds'); if (timediff <= messageTime && !this.author.bot) { score += parseInt((messageTime - timediff) + 1); scoreReasons[ "Flooding / Rapid Typing" ] = parseInt((messageTime - timediff) + 1) } // If the current message is more than 80% or more similar to the comparing message, // add 1 score for every (similarity % - 80) / 2; copy/paste spam. Multiply by 1 + (0.1 * (numcharacters / 100)) var similarity = stringSimilarity.compareTwoStrings(`${this.content || ''}${JSON.stringify(this.embeds)}${JSON.stringify(this.attachments.array())}`, `${message.content || ''}${JSON.stringify(message.embeds)}${JSON.stringify(message.attachments.array())}`); if (similarity >= 0.8) { score += parseInt((10 - ((1 - similarity) * 50)) * (1 + (0.1 * (this.cleanContent ? this.cleanContent.length / 100 : 0)))); scoreReasons[ "Copy-Pasting" ] = parseInt((10 - ((1 - similarity) * 50)) * (1 + (0.1 * (this.cleanContent ? this.cleanContent.length / 100 : 0)))) } }); // Score checks only if message content exists if (this.cleanContent && this.cleanContent.length > 0) { /* DISABLED; many false positives for emojis etc // If the message contains any off-the-wall characters, consider it spam and add 10 to the score. if (/[^\x20-\x7E]/g.test(this.cleanContent || '')) { score += 10; console.log(`special characters: 10`); } */ // Count uppercase and lowercase letters var uppercase = this.cleanContent.replace(/[^A-Z]/g, "").length; var lowercase = this.cleanContent.replace(/[^a-z]/g, "").length; // If 50% or more of the characters are uppercase, consider it shout spam, // and add a score of 5, plus 1 for every 12.5 uppercase characters. if (uppercase >= lowercase) { score += parseInt(5 + (20 * (uppercase / 250))); scoreReasons[ "Uppercase / Shouting" ] = parseInt(5 + (20 * (uppercase / 250))) } // Add score for repeating consecutive characters // 20 or more consecutive repeating characters = extremely spammy. Add 20 score. if (/(.)\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1/.test(this.cleanContent.toLowerCase())) { score += 20; scoreReasons[ "Repeating Characters" ] = 20 // 10 or more consecutive repeating characters = spammy. Add 10 score. } else if (/(.)\1\1\1\1\1\1\1\1\1\1/.test(this.cleanContent.toLowerCase())) { score += 10; scoreReasons[ "Repeating Characters" ] = 10 // 5 or more consecutive repeating characters = a little bit spammy. Add 5 score. } else if (/(.)\1\1\1\1\1/.test(this.cleanContent.toLowerCase())) { score += 5; scoreReasons[ "Repeating Characters" ] = 5 } // Add 40 score for here and everyone mentions as these are VERY spammy. if (this.content.includes("@here") || this.content.includes("@everyone")) { score += 40; scoreReasons[ "Here / Everyone Mention" ] = 40 } // Add spam score for every new line; but the more content : new lines, the less spam score is added. // New lines when content length is 128 characters or less are considered very spammy. var newlines = this.cleanContent.split(/\r\n|\r|\n/).length - 1; var ratio = newlines / (this.cleanContent.length > 128 ? Math.ceil(this.cleanContent.length / 128) / 2 : 0.25); score += Math.round(ratio); if (newlines > 0 && ratio > 0) { scoreReasons[ "New Lines / Scrolling" ] = Math.round(ratio) } // Add score for repeating patterns // TODO: improve this algorithm var newstring = this.cleanContent; var regex = /(\W|^)(.+)\s\2/gmi; var matcher = regex.exec(this.cleanContent); while (matcher !== null) { newstring = newstring.replace(matcher[ 2 ], ``); matcher = regex.exec(this.cleanContent); } var patternScore = (this.cleanContent.length > 0 ? (newstring.length / this.cleanContent.length) : 1); // Pattern score of 100% means no repeating patterns. For every 4% less than 100%, add 1 score. Multiply depending on content length. score += parseInt(((1 - patternScore) * 25) * (1 + (0.1 * (this.cleanContent ? this.cleanContent.length / 100 : 0)))) if (patternScore < 1) { scoreReasons[ "Repeating Patterns" ] = parseInt(((1 - patternScore) * 25) * (1 + (0.1 * (this.cleanContent ? this.cleanContent.length / 100 : 0)))) } // Add 3 points for every profane word used; excessive profanity spam config.profanity.map((word) => { var numbers = getIndicesOf(word, this.cleanContent, false); if (numbers.length > 0) { score += (numbers.length * 3); if (typeof scoreReasons[ "Profanity" ] === `undefined`) scoreReasons[ "Profanity" ] = 0 scoreReasons[ "Profanity" ] += (numbers.length * 3); //console.log(`profanity`); } }); afterFunction() return resolve(score) } else { afterFunction() return resolve(score) } }) } get
() { return this._earnedSpamScore; } set earnedSpamScore (value) { this._earnedSpamScore = value; } }; function getIndicesOf (searchStr, str, caseSensitive) { var searchStrLen = searchStr.length; if (searchStrLen == 0) { return []; } var startIndex = 0, index, indices = []; if (!caseSensitive) { str = str.toLowerCase(); searchStr = searchStr.toLowerCase(); } while ((index = str.indexOf(searchStr, startIndex)) > -1) { indices.push(index); startIndex = index + searchStrLen; } return indices; }
earnedSpamScore
identifier_name
spamScore.js
const { Extendable } = require('klasa'); const config = require("../config"); const moment = require("moment"); const stringSimilarity = require("string-similarity"); const { Message, MessageEmbed } = require('discord.js'); module.exports = class extends Extendable { constructor(...args) { super(...args, { appliesTo: [ Message ] }); this._earnedSpamScore = 0 } get spamScore () { return new Promise(async (resolve, reject) => { if (this.type !== 'DEFAULT' || this.author.id === this.client.user.id) return resolve(0); // Start with a base score of 2 var score = 2; var scoreReasons = {}; /* // Add 3 points for every profane word used; excessive profanity spam config.profanity.map((word) => { var numbers = getIndicesOf(word, this.cleanContent, false); if (numbers.length > 0) { score += 3; //console.log(`profanity`); } }); */ // Executed after finishing perspective; manages multipliers var afterFunction = () => { // Start with a spam score multiplier of 0.5 // spam score 50% if less strict channel AND less strict role // Spam score 100% if less strict channel OR less strict role // Spam score 150% if neither less strict channel nor less strict role // If the member is muted, the spam score will always be 150% var multiplier = 0.5; var isMuted = (this.member && this.guild && this.member.roles.get(this.guild.settings.muteRole)); // If this is not a less strict channel, add 0.5 to the multiplier. if (this.guild.settings.antispamLessStrictChannels.indexOf(this.channel.id) === -1) multiplier += 0.5; // If the member does not have a role defined in less strict roles, add 0.5 to the multiplier. if (typeof this.member !== 'undefined') { var lessStrict = false; this.member.roles .filter((role) => { return this.guild.settings.antispamLessStrictRoles.indexOf(role.id) !== -1; }) .each((role) => { lessStrict = true; }); if (!lessStrict) multiplier += 0.5; } if (isMuted) multiplier = 1.5; // Text channel conflict resolution should have very strict antispam regardless of bot settings. if (this.channel && this.channel.settings.conflictResolution && this.channel.settings.conflictResolution.indexOf("ACTIVE") !== -1) multiplier = 2; //console.log(`${multiplier} multiplier`); // Flag messages with a high spam score var modLog = this.guild.settings.flagLogChannel; const _channel = this.client.channels.resolve(modLog); if (score > this.guild.settings.antispamCooldown) { if (_channel) { var embed = new MessageEmbed() .setTitle(`Flagged message`) .setDescription(`${this.cleanContent}`) .setAuthor(this.author.tag, this.author.displayAvatarURL()) .setFooter(`Message channel **${this.channel.name}**`) .addField(`Total Spam Score`, `Base: ${score}; multiplier: ${multiplier}; total: ${score * multiplier}`) .setColor(`#ff7878`); for (var key in scoreReasons) { if (Object.prototype.hasOwnProperty.call(scoreReasons, key)) { embed.addField(key, scoreReasons[ key ]); } } _channel.sendEmbed(embed, `:bangbang: Please review message ${this.id}; it was flagged for having a high spam score.`) } } score = parseInt(score * multiplier); console.log(`Total score: ${score}`) } console.log('Message spam score ' + this.id) // Add 5 score for each mention; mention spam var nummentions = this.mentions.users.size + this.mentions.roles.size; score += (5 * nummentions); if (nummentions > 0) { scoreReasons[ "Mentions" ] = (nummentions * 5) } // Add 10 score for each embed; link/embed spam var numembeds = this.embeds.length; score += (10 * numembeds); if (numembeds > 0) { scoreReasons[ "Embeds" ] = (numembeds * 10) } // Add 10 score for each attachment; attachment spam var numattachments = this.attachments.size; score += (10 * numattachments); if (numattachments > 0) { scoreReasons[ "Attachments" ] = (numattachments * 10) } // Calculate how many seconds this message took to type based off of 7 characters per second. var messageTime = (this.cleanContent.length / 7); //console.log(`${messageTime} messagetime`); // Iterate through messages of this channel from the last 3 minutes by the same author var collection = this.channel.messages .filter((message) => { if (message.partial || message === null || !message) return false; return message.id !== this.id && message.author.id === this.author.id && moment(this.createdAt).subtract(3, 'minutes').isBefore(moment(message.createdAt)) && moment(this.createdAt).isAfter(moment(message.createdAt)); }); //console.log(`${collection.size} messages`); collection.each((message) => { // If the current message was sent at a time that causes the typing speed to be more than 7 characters per second, // add score for flooding / copypasting. The faster / more characters typed, the more score added. var timediff = moment(this.createdAt).diff(moment(message.createdAt), 'seconds'); if (timediff <= messageTime && !this.author.bot) { score += parseInt((messageTime - timediff) + 1); scoreReasons[ "Flooding / Rapid Typing" ] = parseInt((messageTime - timediff) + 1) } // If the current message is more than 80% or more similar to the comparing message, // add 1 score for every (similarity % - 80) / 2; copy/paste spam. Multiply by 1 + (0.1 * (numcharacters / 100)) var similarity = stringSimilarity.compareTwoStrings(`${this.content || ''}${JSON.stringify(this.embeds)}${JSON.stringify(this.attachments.array())}`, `${message.content || ''}${JSON.stringify(message.embeds)}${JSON.stringify(message.attachments.array())}`); if (similarity >= 0.8) { score += parseInt((10 - ((1 - similarity) * 50)) * (1 + (0.1 * (this.cleanContent ? this.cleanContent.length / 100 : 0)))); scoreReasons[ "Copy-Pasting" ] = parseInt((10 - ((1 - similarity) * 50)) * (1 + (0.1 * (this.cleanContent ? this.cleanContent.length / 100 : 0)))) } }); // Score checks only if message content exists if (this.cleanContent && this.cleanContent.length > 0) { /* DISABLED; many false positives for emojis etc // If the message contains any off-the-wall characters, consider it spam and add 10 to the score. if (/[^\x20-\x7E]/g.test(this.cleanContent || '')) { score += 10; console.log(`special characters: 10`); } */ // Count uppercase and lowercase letters var uppercase = this.cleanContent.replace(/[^A-Z]/g, "").length; var lowercase = this.cleanContent.replace(/[^a-z]/g, "").length; // If 50% or more of the characters are uppercase, consider it shout spam, // and add a score of 5, plus 1 for every 12.5 uppercase characters. if (uppercase >= lowercase) { score += parseInt(5 + (20 * (uppercase / 250))); scoreReasons[ "Uppercase / Shouting" ] = parseInt(5 + (20 * (uppercase / 250))) } // Add score for repeating consecutive characters // 20 or more consecutive repeating characters = extremely spammy. Add 20 score. if (/(.)\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1/.test(this.cleanContent.toLowerCase()))
else if (/(.)\1\1\1\1\1\1\1\1\1\1/.test(this.cleanContent.toLowerCase())) { score += 10; scoreReasons[ "Repeating Characters" ] = 10 // 5 or more consecutive repeating characters = a little bit spammy. Add 5 score. } else if (/(.)\1\1\1\1\1/.test(this.cleanContent.toLowerCase())) { score += 5; scoreReasons[ "Repeating Characters" ] = 5 } // Add 40 score for here and everyone mentions as these are VERY spammy. if (this.content.includes("@here") || this.content.includes("@everyone")) { score += 40; scoreReasons[ "Here / Everyone Mention" ] = 40 } // Add spam score for every new line; but the more content : new lines, the less spam score is added. // New lines when content length is 128 characters or less are considered very spammy. var newlines = this.cleanContent.split(/\r\n|\r|\n/).length - 1; var ratio = newlines / (this.cleanContent.length > 128 ? Math.ceil(this.cleanContent.length / 128) / 2 : 0.25); score += Math.round(ratio); if (newlines > 0 && ratio > 0) { scoreReasons[ "New Lines / Scrolling" ] = Math.round(ratio) } // Add score for repeating patterns // TODO: improve this algorithm var newstring = this.cleanContent; var regex = /(\W|^)(.+)\s\2/gmi; var matcher = regex.exec(this.cleanContent); while (matcher !== null) { newstring = newstring.replace(matcher[ 2 ], ``); matcher = regex.exec(this.cleanContent); } var patternScore = (this.cleanContent.length > 0 ? (newstring.length / this.cleanContent.length) : 1); // Pattern score of 100% means no repeating patterns. For every 4% less than 100%, add 1 score. Multiply depending on content length. score += parseInt(((1 - patternScore) * 25) * (1 + (0.1 * (this.cleanContent ? this.cleanContent.length / 100 : 0)))) if (patternScore < 1) { scoreReasons[ "Repeating Patterns" ] = parseInt(((1 - patternScore) * 25) * (1 + (0.1 * (this.cleanContent ? this.cleanContent.length / 100 : 0)))) } // Add 3 points for every profane word used; excessive profanity spam config.profanity.map((word) => { var numbers = getIndicesOf(word, this.cleanContent, false); if (numbers.length > 0) { score += (numbers.length * 3); if (typeof scoreReasons[ "Profanity" ] === `undefined`) scoreReasons[ "Profanity" ] = 0 scoreReasons[ "Profanity" ] += (numbers.length * 3); //console.log(`profanity`); } }); afterFunction() return resolve(score) } else { afterFunction() return resolve(score) } }) } get earnedSpamScore () { return this._earnedSpamScore; } set earnedSpamScore (value) { this._earnedSpamScore = value; } }; function getIndicesOf (searchStr, str, caseSensitive) { var searchStrLen = searchStr.length; if (searchStrLen == 0) { return []; } var startIndex = 0, index, indices = []; if (!caseSensitive) { str = str.toLowerCase(); searchStr = searchStr.toLowerCase(); } while ((index = str.indexOf(searchStr, startIndex)) > -1) { indices.push(index); startIndex = index + searchStrLen; } return indices; }
{ score += 20; scoreReasons[ "Repeating Characters" ] = 20 // 10 or more consecutive repeating characters = spammy. Add 10 score. }
conditional_block
spamScore.js
const { Extendable } = require('klasa'); const config = require("../config"); const moment = require("moment"); const stringSimilarity = require("string-similarity"); const { Message, MessageEmbed } = require('discord.js'); module.exports = class extends Extendable { constructor(...args) { super(...args, { appliesTo: [ Message ] }); this._earnedSpamScore = 0 } get spamScore () { return new Promise(async (resolve, reject) => { if (this.type !== 'DEFAULT' || this.author.id === this.client.user.id) return resolve(0); // Start with a base score of 2 var score = 2; var scoreReasons = {}; /* // Add 3 points for every profane word used; excessive profanity spam config.profanity.map((word) => { var numbers = getIndicesOf(word, this.cleanContent, false); if (numbers.length > 0) { score += 3; //console.log(`profanity`); } }); */ // Executed after finishing perspective; manages multipliers var afterFunction = () => { // Start with a spam score multiplier of 0.5 // spam score 50% if less strict channel AND less strict role // Spam score 100% if less strict channel OR less strict role // Spam score 150% if neither less strict channel nor less strict role // If the member is muted, the spam score will always be 150% var multiplier = 0.5; var isMuted = (this.member && this.guild && this.member.roles.get(this.guild.settings.muteRole)); // If this is not a less strict channel, add 0.5 to the multiplier. if (this.guild.settings.antispamLessStrictChannels.indexOf(this.channel.id) === -1) multiplier += 0.5; // If the member does not have a role defined in less strict roles, add 0.5 to the multiplier. if (typeof this.member !== 'undefined') { var lessStrict = false; this.member.roles .filter((role) => { return this.guild.settings.antispamLessStrictRoles.indexOf(role.id) !== -1; }) .each((role) => { lessStrict = true; }); if (!lessStrict) multiplier += 0.5; } if (isMuted) multiplier = 1.5; // Text channel conflict resolution should have very strict antispam regardless of bot settings. if (this.channel && this.channel.settings.conflictResolution && this.channel.settings.conflictResolution.indexOf("ACTIVE") !== -1) multiplier = 2; //console.log(`${multiplier} multiplier`); // Flag messages with a high spam score var modLog = this.guild.settings.flagLogChannel; const _channel = this.client.channels.resolve(modLog); if (score > this.guild.settings.antispamCooldown) { if (_channel) { var embed = new MessageEmbed() .setTitle(`Flagged message`) .setDescription(`${this.cleanContent}`) .setAuthor(this.author.tag, this.author.displayAvatarURL()) .setFooter(`Message channel **${this.channel.name}**`) .addField(`Total Spam Score`, `Base: ${score}; multiplier: ${multiplier}; total: ${score * multiplier}`) .setColor(`#ff7878`); for (var key in scoreReasons) { if (Object.prototype.hasOwnProperty.call(scoreReasons, key)) { embed.addField(key, scoreReasons[ key ]); } } _channel.sendEmbed(embed, `:bangbang: Please review message ${this.id}; it was flagged for having a high spam score.`) } } score = parseInt(score * multiplier); console.log(`Total score: ${score}`) } console.log('Message spam score ' + this.id) // Add 5 score for each mention; mention spam var nummentions = this.mentions.users.size + this.mentions.roles.size; score += (5 * nummentions); if (nummentions > 0) { scoreReasons[ "Mentions" ] = (nummentions * 5) } // Add 10 score for each embed; link/embed spam var numembeds = this.embeds.length; score += (10 * numembeds); if (numembeds > 0) { scoreReasons[ "Embeds" ] = (numembeds * 10) } // Add 10 score for each attachment; attachment spam var numattachments = this.attachments.size; score += (10 * numattachments); if (numattachments > 0) { scoreReasons[ "Attachments" ] = (numattachments * 10) } // Calculate how many seconds this message took to type based off of 7 characters per second. var messageTime = (this.cleanContent.length / 7); //console.log(`${messageTime} messagetime`); // Iterate through messages of this channel from the last 3 minutes by the same author var collection = this.channel.messages .filter((message) => { if (message.partial || message === null || !message) return false; return message.id !== this.id && message.author.id === this.author.id && moment(this.createdAt).subtract(3, 'minutes').isBefore(moment(message.createdAt)) && moment(this.createdAt).isAfter(moment(message.createdAt)); }); //console.log(`${collection.size} messages`); collection.each((message) => { // If the current message was sent at a time that causes the typing speed to be more than 7 characters per second, // add score for flooding / copypasting. The faster / more characters typed, the more score added. var timediff = moment(this.createdAt).diff(moment(message.createdAt), 'seconds'); if (timediff <= messageTime && !this.author.bot) { score += parseInt((messageTime - timediff) + 1); scoreReasons[ "Flooding / Rapid Typing" ] = parseInt((messageTime - timediff) + 1) } // If the current message is more than 80% or more similar to the comparing message, // add 1 score for every (similarity % - 80) / 2; copy/paste spam. Multiply by 1 + (0.1 * (numcharacters / 100)) var similarity = stringSimilarity.compareTwoStrings(`${this.content || ''}${JSON.stringify(this.embeds)}${JSON.stringify(this.attachments.array())}`, `${message.content || ''}${JSON.stringify(message.embeds)}${JSON.stringify(message.attachments.array())}`); if (similarity >= 0.8) { score += parseInt((10 - ((1 - similarity) * 50)) * (1 + (0.1 * (this.cleanContent ? this.cleanContent.length / 100 : 0)))); scoreReasons[ "Copy-Pasting" ] = parseInt((10 - ((1 - similarity) * 50)) * (1 + (0.1 * (this.cleanContent ? this.cleanContent.length / 100 : 0)))) } }); // Score checks only if message content exists if (this.cleanContent && this.cleanContent.length > 0) { /* DISABLED; many false positives for emojis etc // If the message contains any off-the-wall characters, consider it spam and add 10 to the score. if (/[^\x20-\x7E]/g.test(this.cleanContent || '')) { score += 10; console.log(`special characters: 10`); } */ // Count uppercase and lowercase letters var uppercase = this.cleanContent.replace(/[^A-Z]/g, "").length; var lowercase = this.cleanContent.replace(/[^a-z]/g, "").length; // If 50% or more of the characters are uppercase, consider it shout spam, // and add a score of 5, plus 1 for every 12.5 uppercase characters. if (uppercase >= lowercase) { score += parseInt(5 + (20 * (uppercase / 250))); scoreReasons[ "Uppercase / Shouting" ] = parseInt(5 + (20 * (uppercase / 250))) } // Add score for repeating consecutive characters // 20 or more consecutive repeating characters = extremely spammy. Add 20 score. if (/(.)\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1/.test(this.cleanContent.toLowerCase())) { score += 20; scoreReasons[ "Repeating Characters" ] = 20 // 10 or more consecutive repeating characters = spammy. Add 10 score. } else if (/(.)\1\1\1\1\1\1\1\1\1\1/.test(this.cleanContent.toLowerCase())) { score += 10; scoreReasons[ "Repeating Characters" ] = 10 // 5 or more consecutive repeating characters = a little bit spammy. Add 5 score. } else if (/(.)\1\1\1\1\1/.test(this.cleanContent.toLowerCase())) { score += 5; scoreReasons[ "Repeating Characters" ] = 5 } // Add 40 score for here and everyone mentions as these are VERY spammy. if (this.content.includes("@here") || this.content.includes("@everyone")) { score += 40; scoreReasons[ "Here / Everyone Mention" ] = 40 } // Add spam score for every new line; but the more content : new lines, the less spam score is added. // New lines when content length is 128 characters or less are considered very spammy. var newlines = this.cleanContent.split(/\r\n|\r|\n/).length - 1; var ratio = newlines / (this.cleanContent.length > 128 ? Math.ceil(this.cleanContent.length / 128) / 2 : 0.25); score += Math.round(ratio); if (newlines > 0 && ratio > 0) { scoreReasons[ "New Lines / Scrolling" ] = Math.round(ratio) } // Add score for repeating patterns // TODO: improve this algorithm var newstring = this.cleanContent; var regex = /(\W|^)(.+)\s\2/gmi; var matcher = regex.exec(this.cleanContent); while (matcher !== null) { newstring = newstring.replace(matcher[ 2 ], ``); matcher = regex.exec(this.cleanContent); }
if (patternScore < 1) { scoreReasons[ "Repeating Patterns" ] = parseInt(((1 - patternScore) * 25) * (1 + (0.1 * (this.cleanContent ? this.cleanContent.length / 100 : 0)))) } // Add 3 points for every profane word used; excessive profanity spam config.profanity.map((word) => { var numbers = getIndicesOf(word, this.cleanContent, false); if (numbers.length > 0) { score += (numbers.length * 3); if (typeof scoreReasons[ "Profanity" ] === `undefined`) scoreReasons[ "Profanity" ] = 0 scoreReasons[ "Profanity" ] += (numbers.length * 3); //console.log(`profanity`); } }); afterFunction() return resolve(score) } else { afterFunction() return resolve(score) } }) } get earnedSpamScore () { return this._earnedSpamScore; } set earnedSpamScore (value) { this._earnedSpamScore = value; } }; function getIndicesOf (searchStr, str, caseSensitive) { var searchStrLen = searchStr.length; if (searchStrLen == 0) { return []; } var startIndex = 0, index, indices = []; if (!caseSensitive) { str = str.toLowerCase(); searchStr = searchStr.toLowerCase(); } while ((index = str.indexOf(searchStr, startIndex)) > -1) { indices.push(index); startIndex = index + searchStrLen; } return indices; }
var patternScore = (this.cleanContent.length > 0 ? (newstring.length / this.cleanContent.length) : 1); // Pattern score of 100% means no repeating patterns. For every 4% less than 100%, add 1 score. Multiply depending on content length. score += parseInt(((1 - patternScore) * 25) * (1 + (0.1 * (this.cleanContent ? this.cleanContent.length / 100 : 0))))
random_line_split
spamScore.js
const { Extendable } = require('klasa'); const config = require("../config"); const moment = require("moment"); const stringSimilarity = require("string-similarity"); const { Message, MessageEmbed } = require('discord.js'); module.exports = class extends Extendable { constructor(...args) { super(...args, { appliesTo: [ Message ] }); this._earnedSpamScore = 0 } get spamScore () { return new Promise(async (resolve, reject) => { if (this.type !== 'DEFAULT' || this.author.id === this.client.user.id) return resolve(0); // Start with a base score of 2 var score = 2; var scoreReasons = {}; /* // Add 3 points for every profane word used; excessive profanity spam config.profanity.map((word) => { var numbers = getIndicesOf(word, this.cleanContent, false); if (numbers.length > 0) { score += 3; //console.log(`profanity`); } }); */ // Executed after finishing perspective; manages multipliers var afterFunction = () => { // Start with a spam score multiplier of 0.5 // spam score 50% if less strict channel AND less strict role // Spam score 100% if less strict channel OR less strict role // Spam score 150% if neither less strict channel nor less strict role // If the member is muted, the spam score will always be 150% var multiplier = 0.5; var isMuted = (this.member && this.guild && this.member.roles.get(this.guild.settings.muteRole)); // If this is not a less strict channel, add 0.5 to the multiplier. if (this.guild.settings.antispamLessStrictChannels.indexOf(this.channel.id) === -1) multiplier += 0.5; // If the member does not have a role defined in less strict roles, add 0.5 to the multiplier. if (typeof this.member !== 'undefined') { var lessStrict = false; this.member.roles .filter((role) => { return this.guild.settings.antispamLessStrictRoles.indexOf(role.id) !== -1; }) .each((role) => { lessStrict = true; }); if (!lessStrict) multiplier += 0.5; } if (isMuted) multiplier = 1.5; // Text channel conflict resolution should have very strict antispam regardless of bot settings. if (this.channel && this.channel.settings.conflictResolution && this.channel.settings.conflictResolution.indexOf("ACTIVE") !== -1) multiplier = 2; //console.log(`${multiplier} multiplier`); // Flag messages with a high spam score var modLog = this.guild.settings.flagLogChannel; const _channel = this.client.channels.resolve(modLog); if (score > this.guild.settings.antispamCooldown) { if (_channel) { var embed = new MessageEmbed() .setTitle(`Flagged message`) .setDescription(`${this.cleanContent}`) .setAuthor(this.author.tag, this.author.displayAvatarURL()) .setFooter(`Message channel **${this.channel.name}**`) .addField(`Total Spam Score`, `Base: ${score}; multiplier: ${multiplier}; total: ${score * multiplier}`) .setColor(`#ff7878`); for (var key in scoreReasons) { if (Object.prototype.hasOwnProperty.call(scoreReasons, key)) { embed.addField(key, scoreReasons[ key ]); } } _channel.sendEmbed(embed, `:bangbang: Please review message ${this.id}; it was flagged for having a high spam score.`) } } score = parseInt(score * multiplier); console.log(`Total score: ${score}`) } console.log('Message spam score ' + this.id) // Add 5 score for each mention; mention spam var nummentions = this.mentions.users.size + this.mentions.roles.size; score += (5 * nummentions); if (nummentions > 0) { scoreReasons[ "Mentions" ] = (nummentions * 5) } // Add 10 score for each embed; link/embed spam var numembeds = this.embeds.length; score += (10 * numembeds); if (numembeds > 0) { scoreReasons[ "Embeds" ] = (numembeds * 10) } // Add 10 score for each attachment; attachment spam var numattachments = this.attachments.size; score += (10 * numattachments); if (numattachments > 0) { scoreReasons[ "Attachments" ] = (numattachments * 10) } // Calculate how many seconds this message took to type based off of 7 characters per second. var messageTime = (this.cleanContent.length / 7); //console.log(`${messageTime} messagetime`); // Iterate through messages of this channel from the last 3 minutes by the same author var collection = this.channel.messages .filter((message) => { if (message.partial || message === null || !message) return false; return message.id !== this.id && message.author.id === this.author.id && moment(this.createdAt).subtract(3, 'minutes').isBefore(moment(message.createdAt)) && moment(this.createdAt).isAfter(moment(message.createdAt)); }); //console.log(`${collection.size} messages`); collection.each((message) => { // If the current message was sent at a time that causes the typing speed to be more than 7 characters per second, // add score for flooding / copypasting. The faster / more characters typed, the more score added. var timediff = moment(this.createdAt).diff(moment(message.createdAt), 'seconds'); if (timediff <= messageTime && !this.author.bot) { score += parseInt((messageTime - timediff) + 1); scoreReasons[ "Flooding / Rapid Typing" ] = parseInt((messageTime - timediff) + 1) } // If the current message is more than 80% or more similar to the comparing message, // add 1 score for every (similarity % - 80) / 2; copy/paste spam. Multiply by 1 + (0.1 * (numcharacters / 100)) var similarity = stringSimilarity.compareTwoStrings(`${this.content || ''}${JSON.stringify(this.embeds)}${JSON.stringify(this.attachments.array())}`, `${message.content || ''}${JSON.stringify(message.embeds)}${JSON.stringify(message.attachments.array())}`); if (similarity >= 0.8) { score += parseInt((10 - ((1 - similarity) * 50)) * (1 + (0.1 * (this.cleanContent ? this.cleanContent.length / 100 : 0)))); scoreReasons[ "Copy-Pasting" ] = parseInt((10 - ((1 - similarity) * 50)) * (1 + (0.1 * (this.cleanContent ? this.cleanContent.length / 100 : 0)))) } }); // Score checks only if message content exists if (this.cleanContent && this.cleanContent.length > 0) { /* DISABLED; many false positives for emojis etc // If the message contains any off-the-wall characters, consider it spam and add 10 to the score. if (/[^\x20-\x7E]/g.test(this.cleanContent || '')) { score += 10; console.log(`special characters: 10`); } */ // Count uppercase and lowercase letters var uppercase = this.cleanContent.replace(/[^A-Z]/g, "").length; var lowercase = this.cleanContent.replace(/[^a-z]/g, "").length; // If 50% or more of the characters are uppercase, consider it shout spam, // and add a score of 5, plus 1 for every 12.5 uppercase characters. if (uppercase >= lowercase) { score += parseInt(5 + (20 * (uppercase / 250))); scoreReasons[ "Uppercase / Shouting" ] = parseInt(5 + (20 * (uppercase / 250))) } // Add score for repeating consecutive characters // 20 or more consecutive repeating characters = extremely spammy. Add 20 score. if (/(.)\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1/.test(this.cleanContent.toLowerCase())) { score += 20; scoreReasons[ "Repeating Characters" ] = 20 // 10 or more consecutive repeating characters = spammy. Add 10 score. } else if (/(.)\1\1\1\1\1\1\1\1\1\1/.test(this.cleanContent.toLowerCase())) { score += 10; scoreReasons[ "Repeating Characters" ] = 10 // 5 or more consecutive repeating characters = a little bit spammy. Add 5 score. } else if (/(.)\1\1\1\1\1/.test(this.cleanContent.toLowerCase())) { score += 5; scoreReasons[ "Repeating Characters" ] = 5 } // Add 40 score for here and everyone mentions as these are VERY spammy. if (this.content.includes("@here") || this.content.includes("@everyone")) { score += 40; scoreReasons[ "Here / Everyone Mention" ] = 40 } // Add spam score for every new line; but the more content : new lines, the less spam score is added. // New lines when content length is 128 characters or less are considered very spammy. var newlines = this.cleanContent.split(/\r\n|\r|\n/).length - 1; var ratio = newlines / (this.cleanContent.length > 128 ? Math.ceil(this.cleanContent.length / 128) / 2 : 0.25); score += Math.round(ratio); if (newlines > 0 && ratio > 0) { scoreReasons[ "New Lines / Scrolling" ] = Math.round(ratio) } // Add score for repeating patterns // TODO: improve this algorithm var newstring = this.cleanContent; var regex = /(\W|^)(.+)\s\2/gmi; var matcher = regex.exec(this.cleanContent); while (matcher !== null) { newstring = newstring.replace(matcher[ 2 ], ``); matcher = regex.exec(this.cleanContent); } var patternScore = (this.cleanContent.length > 0 ? (newstring.length / this.cleanContent.length) : 1); // Pattern score of 100% means no repeating patterns. For every 4% less than 100%, add 1 score. Multiply depending on content length. score += parseInt(((1 - patternScore) * 25) * (1 + (0.1 * (this.cleanContent ? this.cleanContent.length / 100 : 0)))) if (patternScore < 1) { scoreReasons[ "Repeating Patterns" ] = parseInt(((1 - patternScore) * 25) * (1 + (0.1 * (this.cleanContent ? this.cleanContent.length / 100 : 0)))) } // Add 3 points for every profane word used; excessive profanity spam config.profanity.map((word) => { var numbers = getIndicesOf(word, this.cleanContent, false); if (numbers.length > 0) { score += (numbers.length * 3); if (typeof scoreReasons[ "Profanity" ] === `undefined`) scoreReasons[ "Profanity" ] = 0 scoreReasons[ "Profanity" ] += (numbers.length * 3); //console.log(`profanity`); } }); afterFunction() return resolve(score) } else { afterFunction() return resolve(score) } }) } get earnedSpamScore () { return this._earnedSpamScore; } set earnedSpamScore (value)
}; function getIndicesOf (searchStr, str, caseSensitive) { var searchStrLen = searchStr.length; if (searchStrLen == 0) { return []; } var startIndex = 0, index, indices = []; if (!caseSensitive) { str = str.toLowerCase(); searchStr = searchStr.toLowerCase(); } while ((index = str.indexOf(searchStr, startIndex)) > -1) { indices.push(index); startIndex = index + searchStrLen; } return indices; }
{ this._earnedSpamScore = value; }
identifier_body
artifacts.py
""" Detect Cube Sat and Processing Plant artifacts """ from datetime import timedelta import os from io import StringIO from statistics import median import cv2 import numpy as np from pathlib import Path from osgar.node import Node from osgar.bus import BusShutdownException from moon.moonnode import CAMERA_WIDTH, CAMERA_HEIGHT, CAMERA_FOCAL_LENGTH curdir = Path(__file__).parent def union(a,b): x = min(a[0], b[0]) y = min(a[1], b[1]) w = max(a[0]+a[2], b[0]+b[2]) - x h = max(a[1]+a[3], b[1]+b[3]) - y return (x, y, w, h) class ArtifactDetector(Node): def __init__(self, config, bus): super().__init__(config, bus) bus.register("artf", "dropped") self.verbose = False self.dump_dir = None # optional debug ouput into directory self.scan = None # should laster initialize super() self.depth = None # more precise definiton of depth image self.width = None # detect from incoming images self.look_for_artefacts = config.get('artefacts', []) self.estimate_distance = config.get('estimate_distance', False) window_size = 5 min_disp = 16 num_disp = 192-min_disp blockSize = window_size uniquenessRatio = 7 speckleRange = 3 speckleWindowSize = 75 disp12MaxDiff = 200 P1 = 8*3*window_size**2 P2 = 32 * 3 * window_size ** 2 self.stereo_calc = cv2.StereoSGBM_create( minDisparity = min_disp, numDisparities = num_disp, blockSize = window_size, uniquenessRatio = uniquenessRatio, speckleRange = speckleRange, speckleWindowSize = speckleWindowSize, disp12MaxDiff = disp12MaxDiff, P1 = P1, P2 = P2 ) self.Q = np.float32([[1, 0, 0, -0.5*CAMERA_WIDTH], [0,-1, 0, 0.5*CAMERA_HEIGHT], # turn points 180 deg around x-axis, [0, 0, 0, CAMERA_FOCAL_LENGTH], # so that y-axis looks up [0, 0, 1/0.42, 0]]) self.detectors = [ { 'artefact_name': 'cubesat', 'detector_type': 'classifier', 'classifier': cv2.CascadeClassifier(str(curdir/'xml/cubesat.xml')), 'min_size': 5, 'max_size': 110, 'subsequent_detects_required': 3 }, { 'artefact_name': 'homebase', 'detector_type': 'classifier', 'classifier': cv2.CascadeClassifier(str(curdir/'xml/homebase.xml')), 'min_size': 20, 'max_size': 400, 'subsequent_detects_required': 3 }, { 'artefact_name': 'basemarker', 'detector_type': 'colormatch', 'min_size': 10, 'max_size': 500, 'mask': [CAMERA_HEIGHT//2, CAMERA_HEIGHT, 0, CAMERA_WIDTH], # [Y,X] order, look only in lower half of the screen (avoid solar panels) 'pixel_count_threshold': 100, 'bbox_union_count': 1, 'hue_max_difference': 10, 'hue_match': 100, # from RGB 007DBD 'subsequent_detects_required': 3 # noise will add some of this color, wait for a consistent sequence }, { 'artefact_name': 'homebase', 'detector_type': 'colormatch', 'min_size': 20, 'max_size': 700, 'mask': None, 'pixel_count_threshold': 400, 'bbox_union_count': 5, 'hue_max_difference': 10, 'hue_match': 19, # from RGB FFA616 'subsequent_detects_required': 3 }, { 'artefact_name': 'rover', 'detector_type': 'colormatch', 'min_size': 10, 'max_size': 700, 'mask': [180, CAMERA_HEIGHT, 0, CAMERA_WIDTH], # [Y,X] order - only look in lower half of screen 'pixel_count_threshold': 150, 'bbox_union_count': 10, 'hue_max_difference': 3, 'hue_match': 27, # from RGB FFA616 'subsequent_detects_required': 1 }, { 'artefact_name': 'excavator_arm', 'detector_type': 'colormatch', 'min_size': 10, 'max_size': 200, 'mask': [0, 120, 0, CAMERA_WIDTH], # [Y,X] order 'pixel_count_threshold': 150, 'bbox_union_count': 3, 'hue_max_difference': 3, 'hue_match': 27, # from RGB FFA616 'subsequent_detects_required': 1 } ] self.detect_sequences = {} def stdout(self, *args, **kwargs): # maybe refactor to Node?
def waitForImage(self): self.left_image = self.right_image = None while self.left_image is None or self.right_image is None: self.time, channel, data = self.listen() if channel == "left_image": self.left_image = data elif channel == "right_image": self.right_image = data return self.time def run(self): try: dropped = 0 while True: now = self.publish("dropped", dropped) dropped = -1 timestamp = now while timestamp <= now: # this thread is always running but wait and drop images if simulation is slower timestamp = self.waitForImage() dropped += 1 self.detect_and_publish(self.left_image, self.right_image) except BusShutdownException: pass def detect_and_publish(self, left_image, right_image): results = self.detect(left_image, right_image) for r in results: self.publish('artf', r) def detect(self, left_image, right_image): results = [] limg = cv2.imdecode(np.frombuffer(left_image, dtype=np.uint8), cv2.IMREAD_COLOR) rimg = cv2.imdecode(np.frombuffer(right_image, dtype=np.uint8), cv2.IMREAD_COLOR) if self.width is None: self.stdout('Image resolution', limg.shape) self.width = limg.shape[1] assert self.width == limg.shape[1], (self.width, limg.shape[1]) def box_area(b): return b[2]*b[3] limg_rgb = cv2.cvtColor(limg, cv2.COLOR_BGR2RGB) rimg_rgb = cv2.cvtColor(rimg, cv2.COLOR_BGR2RGB) hsv = cv2.cvtColor(limg, cv2.COLOR_BGR2HSV) hsv_blurred = cv2.medianBlur(hsv,5) # some frames have noise, need to blur otherwise threshold doesn't work objects_detected = [] for c in self.detectors: if c['artefact_name'] not in self.look_for_artefacts: continue if c['artefact_name'] not in self.detect_sequences: self.detect_sequences[c['artefact_name']] = 0 if c['detector_type'] == 'colormatch': lower_hue = np.array([c['hue_match'] - c['hue_max_difference'],50,50]) upper_hue = np.array([c['hue_match'] + c['hue_max_difference'],255,255]) # Threshold the HSV image to get only the matching colors mask = cv2.inRange(hsv_blurred, lower_hue, upper_hue) if c['mask'] is not None: m = np.zeros([CAMERA_HEIGHT,CAMERA_WIDTH], dtype=np.uint8) m[c['mask'][0]:c['mask'][1],c['mask'][2]:c['mask'][3]] = 255 mask &= m bboxes = [] contours = cv2.findContours(mask, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE) contours = contours[0] if len(contours) == 2 else contours[1] for cont in contours: contours_poly = cv2.approxPolyDP(cont, 3, True) x,y,w,h = cv2.boundingRect(contours_poly) if w > 1 or h > 1: # ignore isolated pixels bboxes.append([int(x),int(y),int(w),int(h)]) if len(bboxes) > 0: sb = sorted(bboxes, key = box_area, reverse = True)[:c['bbox_union_count']] bbox = sb[0] for b in sb[1:]: bbox = union(bbox,b) x, y, w, h = bbox match_count = cv2.countNonZero(mask[y:y+h,x:x+w]) if ( match_count > c['pixel_count_threshold'] and w >= c['min_size'] and h >= c['min_size'] and w <= c['max_size'] and h <= c['max_size'] ): # print ("%s match count: %d; [%d %d %d %d]" % (c['artefact_name'], match_count, x, y, w, h)) objects_detected.append(c['artefact_name']) if self.detect_sequences[c['artefact_name']] < c['subsequent_detects_required']: # do not act until you have detections in a row self.detect_sequences[c['artefact_name']] += 1 else: if self.estimate_distance: disp = self.stereo_calc.compute(limg_rgb, rimg_rgb).astype(np.float32) / 16.0 points = cv2.reprojectImageTo3D(disp, self.Q) matching_points = points[mask != 0] distances = matching_points[:,2] # third column are Z coords (distances) mean = np.mean(distances) sd = np.std(distances) distances_clean = [x for x in distances if mean - 2 * sd < x < mean + 2 * sd] #print("Artf distance: min %.1f median: %.1f" % (min(distances), median(distances))) if len(distances_clean) == 0: distances_clean = distances # print("Artf cleaned: min %.1f median: %.1f" % (min(final_list), median(final_list))) dist = max(0.0, min(distances_clean)) # subtract about half length of the rover else: dist = 0.0 results.append((c['artefact_name'], int(x), int(y), int(w), int(h), int(match_count), float(dist))) if c['detector_type'] == 'classifier': lfound = c['classifier'].detectMultiScale(limg_rgb, minSize =(c['min_size'], c['min_size']), maxSize =(c['max_size'], c['max_size'])) rfound = c['classifier'].detectMultiScale(rimg_rgb, minSize =(c['min_size'], c['min_size']), maxSize =(c['max_size'], c['max_size'])) if len(lfound) > 0 and len(rfound) > 0: # only report if both cameras see it objects_detected.append(c['artefact_name']) if self.detect_sequences[c['artefact_name']] < c['subsequent_detects_required']: # do not act until you have detections in a row self.detect_sequences[c['artefact_name']] += 1 else: # TODO: tweak the filtering (blur and threshold), sometimes not all background is filtered out and the bbox looks bigger than it should be x,y,width,height = lfound[0] # print(self.time, "Pre: %d %d %d %d" % (x,y,width,height)) gray = cv2.cvtColor(limg_rgb[y:y+height, x:x+width], cv2.COLOR_BGR2GRAY) blur = cv2.medianBlur(gray,3) # some frames have noise, need to blur otherwise threshold doesn't work th, threshed = cv2.threshold(blur, 30, 255, cv2.THRESH_BINARY) coords = cv2.findNonZero(threshed) nonzerocount = cv2.countNonZero(threshed) nx, ny, nw, nh = cv2.boundingRect(coords) # print(self.time, "Post: %d %d %d %d" % (x+nx,y+ny,nw,nh)) results.append((c['artefact_name'], int(x+nx), int(y+ny), int(nw), int(nh), int(nonzerocount))) for artefact_name in self.detect_sequences.keys(): if artefact_name not in objects_detected: self.detect_sequences[artefact_name] = 0 return results def debug2dir(filename, out_dir): from osgar.logger import LogReader, lookup_stream_names from osgar.lib.serialize import deserialize names = lookup_stream_names(filename) assert 'detector.debug_artf' in names, names assert 'detector.artf' in names, names assert 'rosmsg.sim_time_sec' in names, names image_id = names.index('detector.debug_artf') + 1 artf_id = names.index('detector.artf') + 1 sim_sec_id = names.index('rosmsg.sim_time_sec') + 1 sim_time_sec = None image = None artf = None for dt, channel, data in LogReader(filename, only_stream_id=[image_id, artf_id, sim_sec_id]): data = deserialize(data) if channel == sim_sec_id: sim_time_sec = data elif channel == image_id: image = data assert artf is not None time_sec = sim_time_sec if sim_time_sec is not None else int(dt.total_seconds()) name = os.path.basename(filename)[:-4] + '-' + artf[0] + '-' + str(time_sec) + '.jpg' print(name) with open(os.path.join(out_dir, name), 'wb') as f: f.write(image) elif channel == artf_id: artf = data if __name__ == '__main__': from unittest.mock import MagicMock from queue import Queue import argparse import datetime import sys from osgar.bus import Bus parser = argparse.ArgumentParser(description='Run artifact detection and classification for given JPEG image') parser.add_argument('filename', help='JPEG filename') parser.add_argument('--debug2dir', help='dump clasified debug images into directory') parser.add_argument('-v', '--verbose', help='verbose mode', action='store_true') args = parser.parse_args() if args.debug2dir is not None: debug2dir(args.filename, args.debug2dir) sys.exit() with open(args.filename.replace('.npz', '.jpg'), 'rb') as f: jpeg_data = f.read() config = {'virtual_world': True} # for now logger = MagicMock() logger.register = MagicMock(return_value=1) def counter(): start = datetime.datetime.utcnow() while True: dt = datetime.datetime.utcnow() - start yield dt logger.write = MagicMock(side_effect=counter()) bus = Bus(logger) detector = ArtifactDetector(config, bus.handle('detector')) detector.verbose = args.verbose tester = bus.handle('tester') tester.register('scan', 'left_image', 'right_image', 'tick') bus.connect('tester.scan', 'detector.scan') bus.connect('tester.left_image', 'detector.left_image') bus.connect('tester.right_image', 'detector.right_image') bus.connect('detector.artf', 'tester.artf') bus.connect('tester.tick', 'tester.tick') bus.connect('detector.dropped', 'tester.dropped') tester.publish('scan', [2000]*270) # pretend that everything is at 2 meters detector.start() for i in range(10 + 1): # workaround for local minima a = tester.listen() # print(i, a) tester.sleep(0.01) tester.publish('left_image', jpeg_data) # TODO right image detector.request_stop() detector.join() tester.publish('tick', None) a = tester.listen() print(a) # vim: expandtab sw=4 ts=4
output = StringIO() print(*args, file=output, **kwargs) contents = output.getvalue().strip() output.close() # self.publish('stdout', contents) print(contents)
identifier_body
artifacts.py
""" Detect Cube Sat and Processing Plant artifacts """ from datetime import timedelta import os from io import StringIO from statistics import median import cv2 import numpy as np from pathlib import Path from osgar.node import Node from osgar.bus import BusShutdownException from moon.moonnode import CAMERA_WIDTH, CAMERA_HEIGHT, CAMERA_FOCAL_LENGTH curdir = Path(__file__).parent def union(a,b): x = min(a[0], b[0]) y = min(a[1], b[1]) w = max(a[0]+a[2], b[0]+b[2]) - x h = max(a[1]+a[3], b[1]+b[3]) - y return (x, y, w, h) class ArtifactDetector(Node): def __init__(self, config, bus): super().__init__(config, bus) bus.register("artf", "dropped") self.verbose = False self.dump_dir = None # optional debug ouput into directory self.scan = None # should laster initialize super() self.depth = None # more precise definiton of depth image self.width = None # detect from incoming images self.look_for_artefacts = config.get('artefacts', []) self.estimate_distance = config.get('estimate_distance', False) window_size = 5 min_disp = 16 num_disp = 192-min_disp blockSize = window_size uniquenessRatio = 7 speckleRange = 3 speckleWindowSize = 75 disp12MaxDiff = 200 P1 = 8*3*window_size**2 P2 = 32 * 3 * window_size ** 2 self.stereo_calc = cv2.StereoSGBM_create( minDisparity = min_disp, numDisparities = num_disp, blockSize = window_size, uniquenessRatio = uniquenessRatio, speckleRange = speckleRange, speckleWindowSize = speckleWindowSize, disp12MaxDiff = disp12MaxDiff, P1 = P1, P2 = P2 ) self.Q = np.float32([[1, 0, 0, -0.5*CAMERA_WIDTH], [0,-1, 0, 0.5*CAMERA_HEIGHT], # turn points 180 deg around x-axis, [0, 0, 0, CAMERA_FOCAL_LENGTH], # so that y-axis looks up [0, 0, 1/0.42, 0]]) self.detectors = [ { 'artefact_name': 'cubesat', 'detector_type': 'classifier', 'classifier': cv2.CascadeClassifier(str(curdir/'xml/cubesat.xml')), 'min_size': 5, 'max_size': 110, 'subsequent_detects_required': 3 }, { 'artefact_name': 'homebase', 'detector_type': 'classifier', 'classifier': cv2.CascadeClassifier(str(curdir/'xml/homebase.xml')), 'min_size': 20, 'max_size': 400, 'subsequent_detects_required': 3 }, { 'artefact_name': 'basemarker', 'detector_type': 'colormatch', 'min_size': 10, 'max_size': 500, 'mask': [CAMERA_HEIGHT//2, CAMERA_HEIGHT, 0, CAMERA_WIDTH], # [Y,X] order, look only in lower half of the screen (avoid solar panels) 'pixel_count_threshold': 100, 'bbox_union_count': 1, 'hue_max_difference': 10, 'hue_match': 100, # from RGB 007DBD 'subsequent_detects_required': 3 # noise will add some of this color, wait for a consistent sequence }, { 'artefact_name': 'homebase', 'detector_type': 'colormatch', 'min_size': 20, 'max_size': 700, 'mask': None, 'pixel_count_threshold': 400, 'bbox_union_count': 5, 'hue_max_difference': 10, 'hue_match': 19, # from RGB FFA616 'subsequent_detects_required': 3 }, { 'artefact_name': 'rover', 'detector_type': 'colormatch', 'min_size': 10, 'max_size': 700, 'mask': [180, CAMERA_HEIGHT, 0, CAMERA_WIDTH], # [Y,X] order - only look in lower half of screen 'pixel_count_threshold': 150, 'bbox_union_count': 10, 'hue_max_difference': 3, 'hue_match': 27, # from RGB FFA616 'subsequent_detects_required': 1 }, { 'artefact_name': 'excavator_arm', 'detector_type': 'colormatch', 'min_size': 10, 'max_size': 200, 'mask': [0, 120, 0, CAMERA_WIDTH], # [Y,X] order 'pixel_count_threshold': 150, 'bbox_union_count': 3, 'hue_max_difference': 3, 'hue_match': 27, # from RGB FFA616 'subsequent_detects_required': 1 } ] self.detect_sequences = {} def
(self, *args, **kwargs): # maybe refactor to Node? output = StringIO() print(*args, file=output, **kwargs) contents = output.getvalue().strip() output.close() # self.publish('stdout', contents) print(contents) def waitForImage(self): self.left_image = self.right_image = None while self.left_image is None or self.right_image is None: self.time, channel, data = self.listen() if channel == "left_image": self.left_image = data elif channel == "right_image": self.right_image = data return self.time def run(self): try: dropped = 0 while True: now = self.publish("dropped", dropped) dropped = -1 timestamp = now while timestamp <= now: # this thread is always running but wait and drop images if simulation is slower timestamp = self.waitForImage() dropped += 1 self.detect_and_publish(self.left_image, self.right_image) except BusShutdownException: pass def detect_and_publish(self, left_image, right_image): results = self.detect(left_image, right_image) for r in results: self.publish('artf', r) def detect(self, left_image, right_image): results = [] limg = cv2.imdecode(np.frombuffer(left_image, dtype=np.uint8), cv2.IMREAD_COLOR) rimg = cv2.imdecode(np.frombuffer(right_image, dtype=np.uint8), cv2.IMREAD_COLOR) if self.width is None: self.stdout('Image resolution', limg.shape) self.width = limg.shape[1] assert self.width == limg.shape[1], (self.width, limg.shape[1]) def box_area(b): return b[2]*b[3] limg_rgb = cv2.cvtColor(limg, cv2.COLOR_BGR2RGB) rimg_rgb = cv2.cvtColor(rimg, cv2.COLOR_BGR2RGB) hsv = cv2.cvtColor(limg, cv2.COLOR_BGR2HSV) hsv_blurred = cv2.medianBlur(hsv,5) # some frames have noise, need to blur otherwise threshold doesn't work objects_detected = [] for c in self.detectors: if c['artefact_name'] not in self.look_for_artefacts: continue if c['artefact_name'] not in self.detect_sequences: self.detect_sequences[c['artefact_name']] = 0 if c['detector_type'] == 'colormatch': lower_hue = np.array([c['hue_match'] - c['hue_max_difference'],50,50]) upper_hue = np.array([c['hue_match'] + c['hue_max_difference'],255,255]) # Threshold the HSV image to get only the matching colors mask = cv2.inRange(hsv_blurred, lower_hue, upper_hue) if c['mask'] is not None: m = np.zeros([CAMERA_HEIGHT,CAMERA_WIDTH], dtype=np.uint8) m[c['mask'][0]:c['mask'][1],c['mask'][2]:c['mask'][3]] = 255 mask &= m bboxes = [] contours = cv2.findContours(mask, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE) contours = contours[0] if len(contours) == 2 else contours[1] for cont in contours: contours_poly = cv2.approxPolyDP(cont, 3, True) x,y,w,h = cv2.boundingRect(contours_poly) if w > 1 or h > 1: # ignore isolated pixels bboxes.append([int(x),int(y),int(w),int(h)]) if len(bboxes) > 0: sb = sorted(bboxes, key = box_area, reverse = True)[:c['bbox_union_count']] bbox = sb[0] for b in sb[1:]: bbox = union(bbox,b) x, y, w, h = bbox match_count = cv2.countNonZero(mask[y:y+h,x:x+w]) if ( match_count > c['pixel_count_threshold'] and w >= c['min_size'] and h >= c['min_size'] and w <= c['max_size'] and h <= c['max_size'] ): # print ("%s match count: %d; [%d %d %d %d]" % (c['artefact_name'], match_count, x, y, w, h)) objects_detected.append(c['artefact_name']) if self.detect_sequences[c['artefact_name']] < c['subsequent_detects_required']: # do not act until you have detections in a row self.detect_sequences[c['artefact_name']] += 1 else: if self.estimate_distance: disp = self.stereo_calc.compute(limg_rgb, rimg_rgb).astype(np.float32) / 16.0 points = cv2.reprojectImageTo3D(disp, self.Q) matching_points = points[mask != 0] distances = matching_points[:,2] # third column are Z coords (distances) mean = np.mean(distances) sd = np.std(distances) distances_clean = [x for x in distances if mean - 2 * sd < x < mean + 2 * sd] #print("Artf distance: min %.1f median: %.1f" % (min(distances), median(distances))) if len(distances_clean) == 0: distances_clean = distances # print("Artf cleaned: min %.1f median: %.1f" % (min(final_list), median(final_list))) dist = max(0.0, min(distances_clean)) # subtract about half length of the rover else: dist = 0.0 results.append((c['artefact_name'], int(x), int(y), int(w), int(h), int(match_count), float(dist))) if c['detector_type'] == 'classifier': lfound = c['classifier'].detectMultiScale(limg_rgb, minSize =(c['min_size'], c['min_size']), maxSize =(c['max_size'], c['max_size'])) rfound = c['classifier'].detectMultiScale(rimg_rgb, minSize =(c['min_size'], c['min_size']), maxSize =(c['max_size'], c['max_size'])) if len(lfound) > 0 and len(rfound) > 0: # only report if both cameras see it objects_detected.append(c['artefact_name']) if self.detect_sequences[c['artefact_name']] < c['subsequent_detects_required']: # do not act until you have detections in a row self.detect_sequences[c['artefact_name']] += 1 else: # TODO: tweak the filtering (blur and threshold), sometimes not all background is filtered out and the bbox looks bigger than it should be x,y,width,height = lfound[0] # print(self.time, "Pre: %d %d %d %d" % (x,y,width,height)) gray = cv2.cvtColor(limg_rgb[y:y+height, x:x+width], cv2.COLOR_BGR2GRAY) blur = cv2.medianBlur(gray,3) # some frames have noise, need to blur otherwise threshold doesn't work th, threshed = cv2.threshold(blur, 30, 255, cv2.THRESH_BINARY) coords = cv2.findNonZero(threshed) nonzerocount = cv2.countNonZero(threshed) nx, ny, nw, nh = cv2.boundingRect(coords) # print(self.time, "Post: %d %d %d %d" % (x+nx,y+ny,nw,nh)) results.append((c['artefact_name'], int(x+nx), int(y+ny), int(nw), int(nh), int(nonzerocount))) for artefact_name in self.detect_sequences.keys(): if artefact_name not in objects_detected: self.detect_sequences[artefact_name] = 0 return results def debug2dir(filename, out_dir): from osgar.logger import LogReader, lookup_stream_names from osgar.lib.serialize import deserialize names = lookup_stream_names(filename) assert 'detector.debug_artf' in names, names assert 'detector.artf' in names, names assert 'rosmsg.sim_time_sec' in names, names image_id = names.index('detector.debug_artf') + 1 artf_id = names.index('detector.artf') + 1 sim_sec_id = names.index('rosmsg.sim_time_sec') + 1 sim_time_sec = None image = None artf = None for dt, channel, data in LogReader(filename, only_stream_id=[image_id, artf_id, sim_sec_id]): data = deserialize(data) if channel == sim_sec_id: sim_time_sec = data elif channel == image_id: image = data assert artf is not None time_sec = sim_time_sec if sim_time_sec is not None else int(dt.total_seconds()) name = os.path.basename(filename)[:-4] + '-' + artf[0] + '-' + str(time_sec) + '.jpg' print(name) with open(os.path.join(out_dir, name), 'wb') as f: f.write(image) elif channel == artf_id: artf = data if __name__ == '__main__': from unittest.mock import MagicMock from queue import Queue import argparse import datetime import sys from osgar.bus import Bus parser = argparse.ArgumentParser(description='Run artifact detection and classification for given JPEG image') parser.add_argument('filename', help='JPEG filename') parser.add_argument('--debug2dir', help='dump clasified debug images into directory') parser.add_argument('-v', '--verbose', help='verbose mode', action='store_true') args = parser.parse_args() if args.debug2dir is not None: debug2dir(args.filename, args.debug2dir) sys.exit() with open(args.filename.replace('.npz', '.jpg'), 'rb') as f: jpeg_data = f.read() config = {'virtual_world': True} # for now logger = MagicMock() logger.register = MagicMock(return_value=1) def counter(): start = datetime.datetime.utcnow() while True: dt = datetime.datetime.utcnow() - start yield dt logger.write = MagicMock(side_effect=counter()) bus = Bus(logger) detector = ArtifactDetector(config, bus.handle('detector')) detector.verbose = args.verbose tester = bus.handle('tester') tester.register('scan', 'left_image', 'right_image', 'tick') bus.connect('tester.scan', 'detector.scan') bus.connect('tester.left_image', 'detector.left_image') bus.connect('tester.right_image', 'detector.right_image') bus.connect('detector.artf', 'tester.artf') bus.connect('tester.tick', 'tester.tick') bus.connect('detector.dropped', 'tester.dropped') tester.publish('scan', [2000]*270) # pretend that everything is at 2 meters detector.start() for i in range(10 + 1): # workaround for local minima a = tester.listen() # print(i, a) tester.sleep(0.01) tester.publish('left_image', jpeg_data) # TODO right image detector.request_stop() detector.join() tester.publish('tick', None) a = tester.listen() print(a) # vim: expandtab sw=4 ts=4
stdout
identifier_name
artifacts.py
""" Detect Cube Sat and Processing Plant artifacts """ from datetime import timedelta import os from io import StringIO from statistics import median import cv2 import numpy as np from pathlib import Path from osgar.node import Node from osgar.bus import BusShutdownException from moon.moonnode import CAMERA_WIDTH, CAMERA_HEIGHT, CAMERA_FOCAL_LENGTH curdir = Path(__file__).parent def union(a,b): x = min(a[0], b[0]) y = min(a[1], b[1]) w = max(a[0]+a[2], b[0]+b[2]) - x h = max(a[1]+a[3], b[1]+b[3]) - y return (x, y, w, h) class ArtifactDetector(Node): def __init__(self, config, bus): super().__init__(config, bus) bus.register("artf", "dropped") self.verbose = False self.dump_dir = None # optional debug ouput into directory self.scan = None # should laster initialize super() self.depth = None # more precise definiton of depth image self.width = None # detect from incoming images self.look_for_artefacts = config.get('artefacts', []) self.estimate_distance = config.get('estimate_distance', False) window_size = 5 min_disp = 16 num_disp = 192-min_disp blockSize = window_size uniquenessRatio = 7 speckleRange = 3 speckleWindowSize = 75 disp12MaxDiff = 200 P1 = 8*3*window_size**2 P2 = 32 * 3 * window_size ** 2 self.stereo_calc = cv2.StereoSGBM_create( minDisparity = min_disp, numDisparities = num_disp, blockSize = window_size, uniquenessRatio = uniquenessRatio, speckleRange = speckleRange, speckleWindowSize = speckleWindowSize, disp12MaxDiff = disp12MaxDiff, P1 = P1, P2 = P2 ) self.Q = np.float32([[1, 0, 0, -0.5*CAMERA_WIDTH], [0,-1, 0, 0.5*CAMERA_HEIGHT], # turn points 180 deg around x-axis, [0, 0, 0, CAMERA_FOCAL_LENGTH], # so that y-axis looks up [0, 0, 1/0.42, 0]]) self.detectors = [ { 'artefact_name': 'cubesat', 'detector_type': 'classifier', 'classifier': cv2.CascadeClassifier(str(curdir/'xml/cubesat.xml')), 'min_size': 5, 'max_size': 110, 'subsequent_detects_required': 3 }, { 'artefact_name': 'homebase', 'detector_type': 'classifier', 'classifier': cv2.CascadeClassifier(str(curdir/'xml/homebase.xml')), 'min_size': 20, 'max_size': 400, 'subsequent_detects_required': 3 }, { 'artefact_name': 'basemarker', 'detector_type': 'colormatch', 'min_size': 10, 'max_size': 500, 'mask': [CAMERA_HEIGHT//2, CAMERA_HEIGHT, 0, CAMERA_WIDTH], # [Y,X] order, look only in lower half of the screen (avoid solar panels) 'pixel_count_threshold': 100, 'bbox_union_count': 1, 'hue_max_difference': 10, 'hue_match': 100, # from RGB 007DBD 'subsequent_detects_required': 3 # noise will add some of this color, wait for a consistent sequence }, { 'artefact_name': 'homebase', 'detector_type': 'colormatch', 'min_size': 20, 'max_size': 700, 'mask': None, 'pixel_count_threshold': 400, 'bbox_union_count': 5, 'hue_max_difference': 10, 'hue_match': 19, # from RGB FFA616 'subsequent_detects_required': 3 }, { 'artefact_name': 'rover', 'detector_type': 'colormatch', 'min_size': 10, 'max_size': 700, 'mask': [180, CAMERA_HEIGHT, 0, CAMERA_WIDTH], # [Y,X] order - only look in lower half of screen 'pixel_count_threshold': 150, 'bbox_union_count': 10, 'hue_max_difference': 3, 'hue_match': 27, # from RGB FFA616 'subsequent_detects_required': 1 }, { 'artefact_name': 'excavator_arm', 'detector_type': 'colormatch', 'min_size': 10, 'max_size': 200, 'mask': [0, 120, 0, CAMERA_WIDTH], # [Y,X] order 'pixel_count_threshold': 150, 'bbox_union_count': 3, 'hue_max_difference': 3, 'hue_match': 27, # from RGB FFA616 'subsequent_detects_required': 1 } ] self.detect_sequences = {} def stdout(self, *args, **kwargs): # maybe refactor to Node? output = StringIO() print(*args, file=output, **kwargs) contents = output.getvalue().strip() output.close() # self.publish('stdout', contents) print(contents) def waitForImage(self): self.left_image = self.right_image = None while self.left_image is None or self.right_image is None: self.time, channel, data = self.listen() if channel == "left_image": self.left_image = data elif channel == "right_image":
return self.time def run(self): try: dropped = 0 while True: now = self.publish("dropped", dropped) dropped = -1 timestamp = now while timestamp <= now: # this thread is always running but wait and drop images if simulation is slower timestamp = self.waitForImage() dropped += 1 self.detect_and_publish(self.left_image, self.right_image) except BusShutdownException: pass def detect_and_publish(self, left_image, right_image): results = self.detect(left_image, right_image) for r in results: self.publish('artf', r) def detect(self, left_image, right_image): results = [] limg = cv2.imdecode(np.frombuffer(left_image, dtype=np.uint8), cv2.IMREAD_COLOR) rimg = cv2.imdecode(np.frombuffer(right_image, dtype=np.uint8), cv2.IMREAD_COLOR) if self.width is None: self.stdout('Image resolution', limg.shape) self.width = limg.shape[1] assert self.width == limg.shape[1], (self.width, limg.shape[1]) def box_area(b): return b[2]*b[3] limg_rgb = cv2.cvtColor(limg, cv2.COLOR_BGR2RGB) rimg_rgb = cv2.cvtColor(rimg, cv2.COLOR_BGR2RGB) hsv = cv2.cvtColor(limg, cv2.COLOR_BGR2HSV) hsv_blurred = cv2.medianBlur(hsv,5) # some frames have noise, need to blur otherwise threshold doesn't work objects_detected = [] for c in self.detectors: if c['artefact_name'] not in self.look_for_artefacts: continue if c['artefact_name'] not in self.detect_sequences: self.detect_sequences[c['artefact_name']] = 0 if c['detector_type'] == 'colormatch': lower_hue = np.array([c['hue_match'] - c['hue_max_difference'],50,50]) upper_hue = np.array([c['hue_match'] + c['hue_max_difference'],255,255]) # Threshold the HSV image to get only the matching colors mask = cv2.inRange(hsv_blurred, lower_hue, upper_hue) if c['mask'] is not None: m = np.zeros([CAMERA_HEIGHT,CAMERA_WIDTH], dtype=np.uint8) m[c['mask'][0]:c['mask'][1],c['mask'][2]:c['mask'][3]] = 255 mask &= m bboxes = [] contours = cv2.findContours(mask, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE) contours = contours[0] if len(contours) == 2 else contours[1] for cont in contours: contours_poly = cv2.approxPolyDP(cont, 3, True) x,y,w,h = cv2.boundingRect(contours_poly) if w > 1 or h > 1: # ignore isolated pixels bboxes.append([int(x),int(y),int(w),int(h)]) if len(bboxes) > 0: sb = sorted(bboxes, key = box_area, reverse = True)[:c['bbox_union_count']] bbox = sb[0] for b in sb[1:]: bbox = union(bbox,b) x, y, w, h = bbox match_count = cv2.countNonZero(mask[y:y+h,x:x+w]) if ( match_count > c['pixel_count_threshold'] and w >= c['min_size'] and h >= c['min_size'] and w <= c['max_size'] and h <= c['max_size'] ): # print ("%s match count: %d; [%d %d %d %d]" % (c['artefact_name'], match_count, x, y, w, h)) objects_detected.append(c['artefact_name']) if self.detect_sequences[c['artefact_name']] < c['subsequent_detects_required']: # do not act until you have detections in a row self.detect_sequences[c['artefact_name']] += 1 else: if self.estimate_distance: disp = self.stereo_calc.compute(limg_rgb, rimg_rgb).astype(np.float32) / 16.0 points = cv2.reprojectImageTo3D(disp, self.Q) matching_points = points[mask != 0] distances = matching_points[:,2] # third column are Z coords (distances) mean = np.mean(distances) sd = np.std(distances) distances_clean = [x for x in distances if mean - 2 * sd < x < mean + 2 * sd] #print("Artf distance: min %.1f median: %.1f" % (min(distances), median(distances))) if len(distances_clean) == 0: distances_clean = distances # print("Artf cleaned: min %.1f median: %.1f" % (min(final_list), median(final_list))) dist = max(0.0, min(distances_clean)) # subtract about half length of the rover else: dist = 0.0 results.append((c['artefact_name'], int(x), int(y), int(w), int(h), int(match_count), float(dist))) if c['detector_type'] == 'classifier': lfound = c['classifier'].detectMultiScale(limg_rgb, minSize =(c['min_size'], c['min_size']), maxSize =(c['max_size'], c['max_size'])) rfound = c['classifier'].detectMultiScale(rimg_rgb, minSize =(c['min_size'], c['min_size']), maxSize =(c['max_size'], c['max_size'])) if len(lfound) > 0 and len(rfound) > 0: # only report if both cameras see it objects_detected.append(c['artefact_name']) if self.detect_sequences[c['artefact_name']] < c['subsequent_detects_required']: # do not act until you have detections in a row self.detect_sequences[c['artefact_name']] += 1 else: # TODO: tweak the filtering (blur and threshold), sometimes not all background is filtered out and the bbox looks bigger than it should be x,y,width,height = lfound[0] # print(self.time, "Pre: %d %d %d %d" % (x,y,width,height)) gray = cv2.cvtColor(limg_rgb[y:y+height, x:x+width], cv2.COLOR_BGR2GRAY) blur = cv2.medianBlur(gray,3) # some frames have noise, need to blur otherwise threshold doesn't work th, threshed = cv2.threshold(blur, 30, 255, cv2.THRESH_BINARY) coords = cv2.findNonZero(threshed) nonzerocount = cv2.countNonZero(threshed) nx, ny, nw, nh = cv2.boundingRect(coords) # print(self.time, "Post: %d %d %d %d" % (x+nx,y+ny,nw,nh)) results.append((c['artefact_name'], int(x+nx), int(y+ny), int(nw), int(nh), int(nonzerocount))) for artefact_name in self.detect_sequences.keys(): if artefact_name not in objects_detected: self.detect_sequences[artefact_name] = 0 return results def debug2dir(filename, out_dir): from osgar.logger import LogReader, lookup_stream_names from osgar.lib.serialize import deserialize names = lookup_stream_names(filename) assert 'detector.debug_artf' in names, names assert 'detector.artf' in names, names assert 'rosmsg.sim_time_sec' in names, names image_id = names.index('detector.debug_artf') + 1 artf_id = names.index('detector.artf') + 1 sim_sec_id = names.index('rosmsg.sim_time_sec') + 1 sim_time_sec = None image = None artf = None for dt, channel, data in LogReader(filename, only_stream_id=[image_id, artf_id, sim_sec_id]): data = deserialize(data) if channel == sim_sec_id: sim_time_sec = data elif channel == image_id: image = data assert artf is not None time_sec = sim_time_sec if sim_time_sec is not None else int(dt.total_seconds()) name = os.path.basename(filename)[:-4] + '-' + artf[0] + '-' + str(time_sec) + '.jpg' print(name) with open(os.path.join(out_dir, name), 'wb') as f: f.write(image) elif channel == artf_id: artf = data if __name__ == '__main__': from unittest.mock import MagicMock from queue import Queue import argparse import datetime import sys from osgar.bus import Bus parser = argparse.ArgumentParser(description='Run artifact detection and classification for given JPEG image') parser.add_argument('filename', help='JPEG filename') parser.add_argument('--debug2dir', help='dump clasified debug images into directory') parser.add_argument('-v', '--verbose', help='verbose mode', action='store_true') args = parser.parse_args() if args.debug2dir is not None: debug2dir(args.filename, args.debug2dir) sys.exit() with open(args.filename.replace('.npz', '.jpg'), 'rb') as f: jpeg_data = f.read() config = {'virtual_world': True} # for now logger = MagicMock() logger.register = MagicMock(return_value=1) def counter(): start = datetime.datetime.utcnow() while True: dt = datetime.datetime.utcnow() - start yield dt logger.write = MagicMock(side_effect=counter()) bus = Bus(logger) detector = ArtifactDetector(config, bus.handle('detector')) detector.verbose = args.verbose tester = bus.handle('tester') tester.register('scan', 'left_image', 'right_image', 'tick') bus.connect('tester.scan', 'detector.scan') bus.connect('tester.left_image', 'detector.left_image') bus.connect('tester.right_image', 'detector.right_image') bus.connect('detector.artf', 'tester.artf') bus.connect('tester.tick', 'tester.tick') bus.connect('detector.dropped', 'tester.dropped') tester.publish('scan', [2000]*270) # pretend that everything is at 2 meters detector.start() for i in range(10 + 1): # workaround for local minima a = tester.listen() # print(i, a) tester.sleep(0.01) tester.publish('left_image', jpeg_data) # TODO right image detector.request_stop() detector.join() tester.publish('tick', None) a = tester.listen() print(a) # vim: expandtab sw=4 ts=4
self.right_image = data
random_line_split
artifacts.py
""" Detect Cube Sat and Processing Plant artifacts """ from datetime import timedelta import os from io import StringIO from statistics import median import cv2 import numpy as np from pathlib import Path from osgar.node import Node from osgar.bus import BusShutdownException from moon.moonnode import CAMERA_WIDTH, CAMERA_HEIGHT, CAMERA_FOCAL_LENGTH curdir = Path(__file__).parent def union(a,b): x = min(a[0], b[0]) y = min(a[1], b[1]) w = max(a[0]+a[2], b[0]+b[2]) - x h = max(a[1]+a[3], b[1]+b[3]) - y return (x, y, w, h) class ArtifactDetector(Node): def __init__(self, config, bus): super().__init__(config, bus) bus.register("artf", "dropped") self.verbose = False self.dump_dir = None # optional debug ouput into directory self.scan = None # should laster initialize super() self.depth = None # more precise definiton of depth image self.width = None # detect from incoming images self.look_for_artefacts = config.get('artefacts', []) self.estimate_distance = config.get('estimate_distance', False) window_size = 5 min_disp = 16 num_disp = 192-min_disp blockSize = window_size uniquenessRatio = 7 speckleRange = 3 speckleWindowSize = 75 disp12MaxDiff = 200 P1 = 8*3*window_size**2 P2 = 32 * 3 * window_size ** 2 self.stereo_calc = cv2.StereoSGBM_create( minDisparity = min_disp, numDisparities = num_disp, blockSize = window_size, uniquenessRatio = uniquenessRatio, speckleRange = speckleRange, speckleWindowSize = speckleWindowSize, disp12MaxDiff = disp12MaxDiff, P1 = P1, P2 = P2 ) self.Q = np.float32([[1, 0, 0, -0.5*CAMERA_WIDTH], [0,-1, 0, 0.5*CAMERA_HEIGHT], # turn points 180 deg around x-axis, [0, 0, 0, CAMERA_FOCAL_LENGTH], # so that y-axis looks up [0, 0, 1/0.42, 0]]) self.detectors = [ { 'artefact_name': 'cubesat', 'detector_type': 'classifier', 'classifier': cv2.CascadeClassifier(str(curdir/'xml/cubesat.xml')), 'min_size': 5, 'max_size': 110, 'subsequent_detects_required': 3 }, { 'artefact_name': 'homebase', 'detector_type': 'classifier', 'classifier': cv2.CascadeClassifier(str(curdir/'xml/homebase.xml')), 'min_size': 20, 'max_size': 400, 'subsequent_detects_required': 3 }, { 'artefact_name': 'basemarker', 'detector_type': 'colormatch', 'min_size': 10, 'max_size': 500, 'mask': [CAMERA_HEIGHT//2, CAMERA_HEIGHT, 0, CAMERA_WIDTH], # [Y,X] order, look only in lower half of the screen (avoid solar panels) 'pixel_count_threshold': 100, 'bbox_union_count': 1, 'hue_max_difference': 10, 'hue_match': 100, # from RGB 007DBD 'subsequent_detects_required': 3 # noise will add some of this color, wait for a consistent sequence }, { 'artefact_name': 'homebase', 'detector_type': 'colormatch', 'min_size': 20, 'max_size': 700, 'mask': None, 'pixel_count_threshold': 400, 'bbox_union_count': 5, 'hue_max_difference': 10, 'hue_match': 19, # from RGB FFA616 'subsequent_detects_required': 3 }, { 'artefact_name': 'rover', 'detector_type': 'colormatch', 'min_size': 10, 'max_size': 700, 'mask': [180, CAMERA_HEIGHT, 0, CAMERA_WIDTH], # [Y,X] order - only look in lower half of screen 'pixel_count_threshold': 150, 'bbox_union_count': 10, 'hue_max_difference': 3, 'hue_match': 27, # from RGB FFA616 'subsequent_detects_required': 1 }, { 'artefact_name': 'excavator_arm', 'detector_type': 'colormatch', 'min_size': 10, 'max_size': 200, 'mask': [0, 120, 0, CAMERA_WIDTH], # [Y,X] order 'pixel_count_threshold': 150, 'bbox_union_count': 3, 'hue_max_difference': 3, 'hue_match': 27, # from RGB FFA616 'subsequent_detects_required': 1 } ] self.detect_sequences = {} def stdout(self, *args, **kwargs): # maybe refactor to Node? output = StringIO() print(*args, file=output, **kwargs) contents = output.getvalue().strip() output.close() # self.publish('stdout', contents) print(contents) def waitForImage(self): self.left_image = self.right_image = None while self.left_image is None or self.right_image is None: self.time, channel, data = self.listen() if channel == "left_image": self.left_image = data elif channel == "right_image": self.right_image = data return self.time def run(self): try: dropped = 0 while True: now = self.publish("dropped", dropped) dropped = -1 timestamp = now while timestamp <= now: # this thread is always running but wait and drop images if simulation is slower timestamp = self.waitForImage() dropped += 1 self.detect_and_publish(self.left_image, self.right_image) except BusShutdownException: pass def detect_and_publish(self, left_image, right_image): results = self.detect(left_image, right_image) for r in results: self.publish('artf', r) def detect(self, left_image, right_image): results = [] limg = cv2.imdecode(np.frombuffer(left_image, dtype=np.uint8), cv2.IMREAD_COLOR) rimg = cv2.imdecode(np.frombuffer(right_image, dtype=np.uint8), cv2.IMREAD_COLOR) if self.width is None: self.stdout('Image resolution', limg.shape) self.width = limg.shape[1] assert self.width == limg.shape[1], (self.width, limg.shape[1]) def box_area(b): return b[2]*b[3] limg_rgb = cv2.cvtColor(limg, cv2.COLOR_BGR2RGB) rimg_rgb = cv2.cvtColor(rimg, cv2.COLOR_BGR2RGB) hsv = cv2.cvtColor(limg, cv2.COLOR_BGR2HSV) hsv_blurred = cv2.medianBlur(hsv,5) # some frames have noise, need to blur otherwise threshold doesn't work objects_detected = [] for c in self.detectors: if c['artefact_name'] not in self.look_for_artefacts: continue if c['artefact_name'] not in self.detect_sequences: self.detect_sequences[c['artefact_name']] = 0 if c['detector_type'] == 'colormatch': lower_hue = np.array([c['hue_match'] - c['hue_max_difference'],50,50]) upper_hue = np.array([c['hue_match'] + c['hue_max_difference'],255,255]) # Threshold the HSV image to get only the matching colors mask = cv2.inRange(hsv_blurred, lower_hue, upper_hue) if c['mask'] is not None: m = np.zeros([CAMERA_HEIGHT,CAMERA_WIDTH], dtype=np.uint8) m[c['mask'][0]:c['mask'][1],c['mask'][2]:c['mask'][3]] = 255 mask &= m bboxes = [] contours = cv2.findContours(mask, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE) contours = contours[0] if len(contours) == 2 else contours[1] for cont in contours: contours_poly = cv2.approxPolyDP(cont, 3, True) x,y,w,h = cv2.boundingRect(contours_poly) if w > 1 or h > 1: # ignore isolated pixels bboxes.append([int(x),int(y),int(w),int(h)]) if len(bboxes) > 0: sb = sorted(bboxes, key = box_area, reverse = True)[:c['bbox_union_count']] bbox = sb[0] for b in sb[1:]: bbox = union(bbox,b) x, y, w, h = bbox match_count = cv2.countNonZero(mask[y:y+h,x:x+w]) if ( match_count > c['pixel_count_threshold'] and w >= c['min_size'] and h >= c['min_size'] and w <= c['max_size'] and h <= c['max_size'] ): # print ("%s match count: %d; [%d %d %d %d]" % (c['artefact_name'], match_count, x, y, w, h)) objects_detected.append(c['artefact_name']) if self.detect_sequences[c['artefact_name']] < c['subsequent_detects_required']: # do not act until you have detections in a row self.detect_sequences[c['artefact_name']] += 1 else: if self.estimate_distance: disp = self.stereo_calc.compute(limg_rgb, rimg_rgb).astype(np.float32) / 16.0 points = cv2.reprojectImageTo3D(disp, self.Q) matching_points = points[mask != 0] distances = matching_points[:,2] # third column are Z coords (distances) mean = np.mean(distances) sd = np.std(distances) distances_clean = [x for x in distances if mean - 2 * sd < x < mean + 2 * sd] #print("Artf distance: min %.1f median: %.1f" % (min(distances), median(distances))) if len(distances_clean) == 0: distances_clean = distances # print("Artf cleaned: min %.1f median: %.1f" % (min(final_list), median(final_list))) dist = max(0.0, min(distances_clean)) # subtract about half length of the rover else: dist = 0.0 results.append((c['artefact_name'], int(x), int(y), int(w), int(h), int(match_count), float(dist))) if c['detector_type'] == 'classifier': lfound = c['classifier'].detectMultiScale(limg_rgb, minSize =(c['min_size'], c['min_size']), maxSize =(c['max_size'], c['max_size'])) rfound = c['classifier'].detectMultiScale(rimg_rgb, minSize =(c['min_size'], c['min_size']), maxSize =(c['max_size'], c['max_size'])) if len(lfound) > 0 and len(rfound) > 0: # only report if both cameras see it objects_detected.append(c['artefact_name']) if self.detect_sequences[c['artefact_name']] < c['subsequent_detects_required']: # do not act until you have detections in a row self.detect_sequences[c['artefact_name']] += 1 else: # TODO: tweak the filtering (blur and threshold), sometimes not all background is filtered out and the bbox looks bigger than it should be x,y,width,height = lfound[0] # print(self.time, "Pre: %d %d %d %d" % (x,y,width,height)) gray = cv2.cvtColor(limg_rgb[y:y+height, x:x+width], cv2.COLOR_BGR2GRAY) blur = cv2.medianBlur(gray,3) # some frames have noise, need to blur otherwise threshold doesn't work th, threshed = cv2.threshold(blur, 30, 255, cv2.THRESH_BINARY) coords = cv2.findNonZero(threshed) nonzerocount = cv2.countNonZero(threshed) nx, ny, nw, nh = cv2.boundingRect(coords) # print(self.time, "Post: %d %d %d %d" % (x+nx,y+ny,nw,nh)) results.append((c['artefact_name'], int(x+nx), int(y+ny), int(nw), int(nh), int(nonzerocount))) for artefact_name in self.detect_sequences.keys(): if artefact_name not in objects_detected: self.detect_sequences[artefact_name] = 0 return results def debug2dir(filename, out_dir): from osgar.logger import LogReader, lookup_stream_names from osgar.lib.serialize import deserialize names = lookup_stream_names(filename) assert 'detector.debug_artf' in names, names assert 'detector.artf' in names, names assert 'rosmsg.sim_time_sec' in names, names image_id = names.index('detector.debug_artf') + 1 artf_id = names.index('detector.artf') + 1 sim_sec_id = names.index('rosmsg.sim_time_sec') + 1 sim_time_sec = None image = None artf = None for dt, channel, data in LogReader(filename, only_stream_id=[image_id, artf_id, sim_sec_id]): data = deserialize(data) if channel == sim_sec_id: sim_time_sec = data elif channel == image_id: image = data assert artf is not None time_sec = sim_time_sec if sim_time_sec is not None else int(dt.total_seconds()) name = os.path.basename(filename)[:-4] + '-' + artf[0] + '-' + str(time_sec) + '.jpg' print(name) with open(os.path.join(out_dir, name), 'wb') as f: f.write(image) elif channel == artf_id: artf = data if __name__ == '__main__':
# vim: expandtab sw=4 ts=4
from unittest.mock import MagicMock from queue import Queue import argparse import datetime import sys from osgar.bus import Bus parser = argparse.ArgumentParser(description='Run artifact detection and classification for given JPEG image') parser.add_argument('filename', help='JPEG filename') parser.add_argument('--debug2dir', help='dump clasified debug images into directory') parser.add_argument('-v', '--verbose', help='verbose mode', action='store_true') args = parser.parse_args() if args.debug2dir is not None: debug2dir(args.filename, args.debug2dir) sys.exit() with open(args.filename.replace('.npz', '.jpg'), 'rb') as f: jpeg_data = f.read() config = {'virtual_world': True} # for now logger = MagicMock() logger.register = MagicMock(return_value=1) def counter(): start = datetime.datetime.utcnow() while True: dt = datetime.datetime.utcnow() - start yield dt logger.write = MagicMock(side_effect=counter()) bus = Bus(logger) detector = ArtifactDetector(config, bus.handle('detector')) detector.verbose = args.verbose tester = bus.handle('tester') tester.register('scan', 'left_image', 'right_image', 'tick') bus.connect('tester.scan', 'detector.scan') bus.connect('tester.left_image', 'detector.left_image') bus.connect('tester.right_image', 'detector.right_image') bus.connect('detector.artf', 'tester.artf') bus.connect('tester.tick', 'tester.tick') bus.connect('detector.dropped', 'tester.dropped') tester.publish('scan', [2000]*270) # pretend that everything is at 2 meters detector.start() for i in range(10 + 1): # workaround for local minima a = tester.listen() # print(i, a) tester.sleep(0.01) tester.publish('left_image', jpeg_data) # TODO right image detector.request_stop() detector.join() tester.publish('tick', None) a = tester.listen() print(a)
conditional_block
main.go
package main // 3840 * 2160 = 8 294 400 import ( "github.com/SynthBrain/synthBrain/app" ) func
() { // Create and run application app.Create().Run() } //package main // // //import ( // "flag" // "github.com/SynthBrain/synthBrain/baseStruct" // "github.com/SynthBrain/synthBrain/myGui" // "github.com/g3n/engine/app" // "github.com/g3n/engine/camera" // "github.com/g3n/engine/core" // "github.com/g3n/engine/geometry" // "github.com/g3n/engine/gls" // "github.com/g3n/engine/graphic" // "github.com/g3n/engine/gui" // "github.com/g3n/engine/light" // "github.com/g3n/engine/material" // "github.com/g3n/engine/math32" // "github.com/g3n/engine/renderer" // // "github.com/g3n/engine/util/helper" // "github.com/g3n/engine/util/logger" // "github.com/g3n/engine/window" // // "time" //) // //var ( // // TODO uncomment and implement usage of the following flags // //oFullScreen = flag.Bool("fullscreen", false, "Starts application with full screen") // //oSwapInterval = flag.Int("swapinterval", -1, "Sets the swap buffers interval to this value") // oHideFPS = flag.Bool("hidefps", false, "Do now show calculated FPS in the GUI") // oUpdateFPS = flag.Uint("updatefps", 1000, "Time interval in milliseconds to update the FPS in the GUI") // oTargetFPS = flag.Uint("targetfps", 60, "Sets the frame rate in frames per second") // oNoglErrors = flag.Bool("noglerrors", false, "Do not check OpenGL errors at each call (may increase FPS)") // oCpuProfile = flag.String("cpuprofile", "", "Activate cpu profiling writing profile to the specified file") // oExecTrace = flag.String("exectrace", "", "Activate execution tracer writing data to the specified file") // oNogui = flag.Bool("nogui", false, "Do not show the GUI, only the specified demo") // oLogs = flag.String("logs", "", "Set log levels for packages. Ex: gui:debug,gls:info") // oStats = flag.Bool("stats", false, "Shows statistics control panel in the GUI") // oRenderStats = flag.Bool("renderstats", false, "Shows gui renderer statistics in the console") //) // //var log *logger.Logger // ///* // Рисовать только тех что имеют достаточный уровень активность и окончательно не затухли //*/ //func main() { // // Create application and scene // app := app.App() // scene := core.NewNode() // // // Set the scene to be managed by the gui manager // gui.Manager().Set(scene) // // // Create SynthBrain struct // synB := new(baseStruct.SynthBrain) // //frameRater := synB.FrameRater // // // Create perspective camera // cam := camera.New(1) // cam.SetPosition(0, 0, 3) // scene.Add(cam) // // // Set up orbit control for the camera // camera.NewOrbitControl(cam) // // // Set up callback to update viewport and camera aspect ratio when the window is resized // onResize := func(evname string, ev interface{}) { // // Get framebuffer size and update viewport accordingly // width, height := app.GetSize() // app.Gls().Viewport(0, 0, int32(width), int32(height)) // // Update the camera's aspect ratio // cam.SetAspect(float32(width) / float32(height)) // } // app.Subscribe(window.OnWindowSize, onResize) // onResize("", nil) // // // Create a blue torus and add it to the scene // //geom := geometry.NewTorus(1, .4, 12, 32, math32.Pi*2) // geom := geometry.NewTorus(0, .4, 3, 3, math32.Pi*2) // mat := material.NewStandard(math32.NewColor("DarkBlue")) // mesh := graphic.NewMesh(geom, mat) // scene.Add(mesh) // // // Create and add a button to the scene // btn := gui.NewButton("Make Red") // btn.SetPosition(30, 40) // btn.SetSize(40, 40) // btn.Subscribe(gui.OnClick, func(name string, ev interface{}) { // mat.SetColor(math32.NewColor("DarkRed")) // }) // scene.Add(btn) // // Create and add a button to the scene // btn1 := gui.NewButton("Make Blue") // btn1.SetPosition(30, 90) // btn1.SetSize(40, 40) // btn1.Subscribe(gui.OnClick, func(name string, ev interface{}) { // mat.SetColor(math32.NewColor("DarkBlue")) // }) // scene.Add(btn1) // // Create and add a button to the scene // onOff := false // chOnOffFlag := make(chan bool, 1) // exit := myGui.Exit(30, 240, &onOff, app, chOnOffFlag) // scene.Add(exit) // // // Create and add a button to the scene // lbl := gui.NewLabel("FPS: ") // lbl.SetPosition(10, 10) // lbl.SetPaddings(2, 2, 2, 2) // scene.Add(lbl) // // // Create and add lights to the scene // scene.Add(light.NewAmbient(&math32.Color{1.0, 1.0, 1.0}, 0.8)) // pointLight := light.NewPoint(&math32.Color{1, 1, 1}, 5.0) // pointLight.SetPosition(1, 0, 2) // scene.Add(pointLight) // // // Create and add an axis helper to the scene // scene.Add(helper.NewAxes(0.5)) // // // Set background color to gray // app.Gls().ClearColor(0.5, 0.5, 0.5, 1.0) // // //synB.InitLevel(0) // // now := time.Now() // newNow := time.Now() // //log.Info("Starting Render Loop") // //Run the application // app.Run(func(renderer *renderer.Renderer, deltaTime time.Duration) { // app.Gls().Clear(gls.DEPTH_BUFFER_BIT | gls.STENCIL_BUFFER_BIT | gls.COLOR_BUFFER_BIT) // newNow = time.Now() // timeDelta := now.Sub(newNow) // now = newNow // //fps, pfps, _ := frameRater.FPS(time.Duration(*oUpdateFPS) * time.Millisecond) // // // //lbl.SetText("FPS: ") // //lbl.SetText("FPS: " + fmt.Sprintf("%3.1f / %3.1f", fps, pfps) ) // synB.Update(timeDelta.Seconds()) // renderer.Render(scene, cam) // }) // // // ABROAD********************************************************************************************** // // //// OpenGL functions must be executed in the same thread where // //// the context was created (by window.New()) // //runtime.LockOSThread() // // // //// Parse command line flags // //showLog := flag.Bool("debug", false, "display the debug log") // //flag.Parse() // // // //// Create logger // //log = logger.New("SynthBrain", nil) // //log.AddWriter(logger.NewConsole(false)) // //log.SetFormat(logger.FTIME | logger.FMICROS) // //if *showLog == true { // // log.SetLevel(logger.DEBUG) // //} else { // // log.SetLevel(logger.INFO) // //} // //log.Info("Initializing SynthBrain") // // // //// Create SynthBrain struct // //synB := new(baseStruct.SynthBrain) // // // //// Manually scan the $GOPATH directories to find the data directory // //rawPaths := os.Getenv("GOPATH") // //paths := strings.Split(rawPaths, ":") // //for _, j := range paths { // // // Checks data path // // path := filepath.Join(j, "src", "github.com", "SynthBrain", "synthBrain") // // if _, err := os.Stat(path); err == nil { // // synB.DataDir = path // // } // //} // // // //// Get the window manager // //var err error // //synB.Wmgr, err = window.Manager("glfw") // //if err != nil { // // panic(err) // //} // // // //// Create window and OpenGL context // //synB.Win, err = synB.Wmgr.CreateWindow(900, 640, "SynthBrain", false) // //if err != nil { // // panic(err) // //} // // // //// Create OpenGL state // //synB.Gs, err = gls.New() // //if err != nil { // // panic(err) // //} // // // //// Speed up a bit by not checking OpenGL errors // //synB.Gs.SetCheckErrors(false) // // // //// Sets window background color // //synB.Gs.ClearColor(0, 0.2, 0.4, 1) //(0.1, 0.1, 0.1, 1.0) // // // //// Sets the OpenGL viewport size the same as the window size // //// This normally should be updated if the window is resized. // //width, height := synB.Win.Size() // //synB.Gs.Viewport(0, 0, int32(width), int32(height)) // // // //// Creates GUI root panel // //synB.Root = gui.NewRoot(synB.Gs, synB.Win) // //synB.Root.SetSize(float32(width), float32(height)) // // // //// Update window if resize // //synB.Win.Subscribe(window.OnWindowSize, func(evname string, ev interface{}) { // // width, height := synB.Win.Size() // // synB.Gs.Viewport(0, 0, int32(width), int32(height)) // // synB.Root.SetSize(float32(width), float32(height)) // // aspect := float32(width) / float32(height) // // synB.Camera.SetAspect(aspect) // //}) // // // ////add GUI********************************************************* // //// Create and add a label to the root panel // //synB.LabelFps = myGui.LabelFps(10, 10, "240") // //synB.Root.Add(synB.LabelFps) // // // //// Create and add button 1 to the root panel // //onOff := false // //chOnOffFlag := make(chan bool, 1) // //synB.WebCam = myGui.WebCam(10, 40, &onOff, chOnOffFlag) // //synB.Root.Add(synB.WebCam) // // // //// Create and add exit button to the root panel // //synB.Exit = myGui.Exit(10, 70, &onOff, synB.Win, chOnOffFlag) // //synB.Root.Add(synB.Exit) // ////**************************************************************** // // // //// Creates a renderer and adds default shaders // //synB.Renderer = renderer.NewRenderer(synB.Gs) // ////g.renderer.SetSortObjects(false) // //err = synB.Renderer.AddDefaultShaders() // //if err != nil { // // panic(err) // //} // //synB.Renderer.SetGui(synB.Root) // // // //// Adds a perspective camera to the scene // //// The camera aspect ratio should be updated if the window is resized. // //aspect := float32(width) / float32(height) // //synB.Camera = camera.NewPerspective(65, aspect, 0.01, 1000) // //synB.Camera.SetPosition(10, 10, 10) // //synB.Camera.LookAt(&math32.Vector3{0, 0, 0}) // // // //// Create orbit control and set limits // //synB.OrbitControl = control.NewOrbitControl(synB.Camera, synB.Win) // //synB.OrbitControl.Enabled = true //false // //synB.OrbitControl.EnablePan = false // //synB.OrbitControl.MaxPolarAngle = 2 * math32.Pi / 3 // //synB.OrbitControl.MinDistance = 0.1 // //synB.OrbitControl.MaxDistance = 10000 // // // //// Create main scene and child levelScene // //synB.Scene = core.NewNode() // //synB.LevelScene = core.NewNode() // //synB.Scene.Add(synB.Camera) // //synB.Scene.Add(synB.LevelScene) // //synB.StepDelta = math32.NewVector2(0, 0) // //synB.Renderer.SetScene(synB.Scene) // // // //// Add white ambient light to the scene // //ambLight := light.NewAmbient(&math32.Color{1.0, 1.0, 1.0}, 0.4) // //synB.Scene.Add(ambLight) // // // //synB.LevelStyle = drawing3D.NewBaseStyle() // // // ////synB.SetupGui(width, height) // //synB.RenderFrame() // // // ////synB.LoadSkyBox() // //synB.LoadLevels() // // // //size := 10 // //gridHelp := graphic.NewGridHelper(float32(size), 1, math32.NewColor("LightGrey")) // //gridHelp.SetPosition(float32(size/2), -0.2, float32(size/2)) // //synB.Scene.Add(gridHelp) // // // //// Done Loading - hide the loading label, show the menu, and initialize the level // ////synB.LoadingLabel.SetVisible(false) // // // //synB.InitLevel(0) // // // //now := time.Now() // //newNow := time.Now() // //log.Info("Starting Render Loop") // //// Start the render loop // //for !synB.Win.ShouldClose() { // // newNow = time.Now() // // timeDelta := now.Sub(newNow) // // now = newNow // // //fmt.Println(now) // // // // //vision.ReadImg(synB.DataDir, "/assets/0.jpg") // // synB.Update(timeDelta.Seconds()) // // synB.RenderFrame() // //} // // //fmt.Printf("app was running for %f \n", application.Get().RunSeconds()) //} // //// go func() { //// for { //// if a, b, c := app.FrameRater().FPS(60); a > 0 && b > 0 && c == true { //// fmt.Println("FPS ", int(b)) //// } //// } //// }() ////fps := float32(app.FrameCount()) / application.Get().RunSeconds() ////go myGui.LabelFpsTest(10, 10, strconv.Itoa(int(app.FrameCount()) / int(application.Get().RunSeconds())), app) // ////IndCh := make(chan int) // //// fmt.Println("Start NeuroMatrix") //// app, err := application.Create(application.Options{ //// Title: "NeuroMatrix", //// Width: 1280, //// Height: 600, //// }) //// if err != nil { //// panic(err) //// } // //// // add GUI********************************************************* //// // Create and add a label to the root panel //// l1 := myGui.LabelFps(10, 10, "240") //// app.Gui().Root().Add(l1) // //// // Create and add button 1 to the root panel //// onOff := false //// b1 := myGui.WebCam(10, 40, &onOff, app) //// app.Gui().Root().Add(b1) // //// // Create and add exit button to the root panel //// b2 := myGui.Exit(10, 70, &onOff, app) //// app.Gui().Root().Add(b2) ////****************************************************************** // //// // Создать и протестировать линии - синапсы // //// go func() { //// myDots := 0 //// maxD := 700 //// dotlist := make(map[int]*drawing3D.Neuron3DBody) // //// for { //// if myDots < maxD { //// dotlist[myDots] = drawing3D.NewBody(app, math32.NewColor("White")) //// dotlist[myDots].CreateBody() //// //dotlist[myDots].SetPosition(float32(rand.Int31n(20)), float32(rand.Int31n(20)), float32(rand.Int31n(20))) //// myDots++ //// //fmt.Println(len(dotlist), myDots) //// } //// if myDots == maxD { //// for _, v := range dotlist { //// v.SetPosition(float32(rand.Int31n(20)), float32(rand.Int31n(20)), float32(rand.Int31n(20))) //// time.Sleep(time.Millisecond * 10) // //// } //// } //// } //// }() // //// //Add lights to the scene //// helpers.LightsScene(app) // //// // Add an axis helper to the scene //// helpers.AxisHelper(0.5, app) // //// // Add an grid helper to the scene //// helpers.GridHelper(10, app) // //// // Add camera to the scene //// app.CameraPersp().SetPosition(15, 15, 15) //// //app.Gl().ClearColor(0, 0.5, 0.7, 1) //// app.Gl().ClearColor(0, 0.2, 0.4, 1) // //// // Start application //// err = app.Run() //// if err != nil { //// panic(err) //// }
main
identifier_name
main.go
package main // 3840 * 2160 = 8 294 400 import ( "github.com/SynthBrain/synthBrain/app" ) func main() { // Create and run application app.Create().Run() } //package main // // //import ( // "flag" // "github.com/SynthBrain/synthBrain/baseStruct" // "github.com/SynthBrain/synthBrain/myGui" // "github.com/g3n/engine/app" // "github.com/g3n/engine/camera" // "github.com/g3n/engine/core" // "github.com/g3n/engine/geometry" // "github.com/g3n/engine/gls" // "github.com/g3n/engine/graphic" // "github.com/g3n/engine/gui" // "github.com/g3n/engine/light" // "github.com/g3n/engine/material" // "github.com/g3n/engine/math32" // "github.com/g3n/engine/renderer" // // "github.com/g3n/engine/util/helper" // "github.com/g3n/engine/util/logger" // "github.com/g3n/engine/window" // // "time" //) // //var ( // // TODO uncomment and implement usage of the following flags // //oFullScreen = flag.Bool("fullscreen", false, "Starts application with full screen") // //oSwapInterval = flag.Int("swapinterval", -1, "Sets the swap buffers interval to this value") // oHideFPS = flag.Bool("hidefps", false, "Do now show calculated FPS in the GUI") // oUpdateFPS = flag.Uint("updatefps", 1000, "Time interval in milliseconds to update the FPS in the GUI") // oTargetFPS = flag.Uint("targetfps", 60, "Sets the frame rate in frames per second") // oNoglErrors = flag.Bool("noglerrors", false, "Do not check OpenGL errors at each call (may increase FPS)") // oCpuProfile = flag.String("cpuprofile", "", "Activate cpu profiling writing profile to the specified file") // oExecTrace = flag.String("exectrace", "", "Activate execution tracer writing data to the specified file") // oNogui = flag.Bool("nogui", false, "Do not show the GUI, only the specified demo") // oLogs = flag.String("logs", "", "Set log levels for packages. Ex: gui:debug,gls:info") // oStats = flag.Bool("stats", false, "Shows statistics control panel in the GUI") // oRenderStats = flag.Bool("renderstats", false, "Shows gui renderer statistics in the console") //) // //var log *logger.Logger // ///* // Рисовать только тех что имеют достаточный уровень активность и окончательно не затухли //*/ //func main() {
// app := app.App() // scene := core.NewNode() // // // Set the scene to be managed by the gui manager // gui.Manager().Set(scene) // // // Create SynthBrain struct // synB := new(baseStruct.SynthBrain) // //frameRater := synB.FrameRater // // // Create perspective camera // cam := camera.New(1) // cam.SetPosition(0, 0, 3) // scene.Add(cam) // // // Set up orbit control for the camera // camera.NewOrbitControl(cam) // // // Set up callback to update viewport and camera aspect ratio when the window is resized // onResize := func(evname string, ev interface{}) { // // Get framebuffer size and update viewport accordingly // width, height := app.GetSize() // app.Gls().Viewport(0, 0, int32(width), int32(height)) // // Update the camera's aspect ratio // cam.SetAspect(float32(width) / float32(height)) // } // app.Subscribe(window.OnWindowSize, onResize) // onResize("", nil) // // // Create a blue torus and add it to the scene // //geom := geometry.NewTorus(1, .4, 12, 32, math32.Pi*2) // geom := geometry.NewTorus(0, .4, 3, 3, math32.Pi*2) // mat := material.NewStandard(math32.NewColor("DarkBlue")) // mesh := graphic.NewMesh(geom, mat) // scene.Add(mesh) // // // Create and add a button to the scene // btn := gui.NewButton("Make Red") // btn.SetPosition(30, 40) // btn.SetSize(40, 40) // btn.Subscribe(gui.OnClick, func(name string, ev interface{}) { // mat.SetColor(math32.NewColor("DarkRed")) // }) // scene.Add(btn) // // Create and add a button to the scene // btn1 := gui.NewButton("Make Blue") // btn1.SetPosition(30, 90) // btn1.SetSize(40, 40) // btn1.Subscribe(gui.OnClick, func(name string, ev interface{}) { // mat.SetColor(math32.NewColor("DarkBlue")) // }) // scene.Add(btn1) // // Create and add a button to the scene // onOff := false // chOnOffFlag := make(chan bool, 1) // exit := myGui.Exit(30, 240, &onOff, app, chOnOffFlag) // scene.Add(exit) // // // Create and add a button to the scene // lbl := gui.NewLabel("FPS: ") // lbl.SetPosition(10, 10) // lbl.SetPaddings(2, 2, 2, 2) // scene.Add(lbl) // // // Create and add lights to the scene // scene.Add(light.NewAmbient(&math32.Color{1.0, 1.0, 1.0}, 0.8)) // pointLight := light.NewPoint(&math32.Color{1, 1, 1}, 5.0) // pointLight.SetPosition(1, 0, 2) // scene.Add(pointLight) // // // Create and add an axis helper to the scene // scene.Add(helper.NewAxes(0.5)) // // // Set background color to gray // app.Gls().ClearColor(0.5, 0.5, 0.5, 1.0) // // //synB.InitLevel(0) // // now := time.Now() // newNow := time.Now() // //log.Info("Starting Render Loop") // //Run the application // app.Run(func(renderer *renderer.Renderer, deltaTime time.Duration) { // app.Gls().Clear(gls.DEPTH_BUFFER_BIT | gls.STENCIL_BUFFER_BIT | gls.COLOR_BUFFER_BIT) // newNow = time.Now() // timeDelta := now.Sub(newNow) // now = newNow // //fps, pfps, _ := frameRater.FPS(time.Duration(*oUpdateFPS) * time.Millisecond) // // // //lbl.SetText("FPS: ") // //lbl.SetText("FPS: " + fmt.Sprintf("%3.1f / %3.1f", fps, pfps) ) // synB.Update(timeDelta.Seconds()) // renderer.Render(scene, cam) // }) // // // ABROAD********************************************************************************************** // // //// OpenGL functions must be executed in the same thread where // //// the context was created (by window.New()) // //runtime.LockOSThread() // // // //// Parse command line flags // //showLog := flag.Bool("debug", false, "display the debug log") // //flag.Parse() // // // //// Create logger // //log = logger.New("SynthBrain", nil) // //log.AddWriter(logger.NewConsole(false)) // //log.SetFormat(logger.FTIME | logger.FMICROS) // //if *showLog == true { // // log.SetLevel(logger.DEBUG) // //} else { // // log.SetLevel(logger.INFO) // //} // //log.Info("Initializing SynthBrain") // // // //// Create SynthBrain struct // //synB := new(baseStruct.SynthBrain) // // // //// Manually scan the $GOPATH directories to find the data directory // //rawPaths := os.Getenv("GOPATH") // //paths := strings.Split(rawPaths, ":") // //for _, j := range paths { // // // Checks data path // // path := filepath.Join(j, "src", "github.com", "SynthBrain", "synthBrain") // // if _, err := os.Stat(path); err == nil { // // synB.DataDir = path // // } // //} // // // //// Get the window manager // //var err error // //synB.Wmgr, err = window.Manager("glfw") // //if err != nil { // // panic(err) // //} // // // //// Create window and OpenGL context // //synB.Win, err = synB.Wmgr.CreateWindow(900, 640, "SynthBrain", false) // //if err != nil { // // panic(err) // //} // // // //// Create OpenGL state // //synB.Gs, err = gls.New() // //if err != nil { // // panic(err) // //} // // // //// Speed up a bit by not checking OpenGL errors // //synB.Gs.SetCheckErrors(false) // // // //// Sets window background color // //synB.Gs.ClearColor(0, 0.2, 0.4, 1) //(0.1, 0.1, 0.1, 1.0) // // // //// Sets the OpenGL viewport size the same as the window size // //// This normally should be updated if the window is resized. // //width, height := synB.Win.Size() // //synB.Gs.Viewport(0, 0, int32(width), int32(height)) // // // //// Creates GUI root panel // //synB.Root = gui.NewRoot(synB.Gs, synB.Win) // //synB.Root.SetSize(float32(width), float32(height)) // // // //// Update window if resize // //synB.Win.Subscribe(window.OnWindowSize, func(evname string, ev interface{}) { // // width, height := synB.Win.Size() // // synB.Gs.Viewport(0, 0, int32(width), int32(height)) // // synB.Root.SetSize(float32(width), float32(height)) // // aspect := float32(width) / float32(height) // // synB.Camera.SetAspect(aspect) // //}) // // // ////add GUI********************************************************* // //// Create and add a label to the root panel // //synB.LabelFps = myGui.LabelFps(10, 10, "240") // //synB.Root.Add(synB.LabelFps) // // // //// Create and add button 1 to the root panel // //onOff := false // //chOnOffFlag := make(chan bool, 1) // //synB.WebCam = myGui.WebCam(10, 40, &onOff, chOnOffFlag) // //synB.Root.Add(synB.WebCam) // // // //// Create and add exit button to the root panel // //synB.Exit = myGui.Exit(10, 70, &onOff, synB.Win, chOnOffFlag) // //synB.Root.Add(synB.Exit) // ////**************************************************************** // // // //// Creates a renderer and adds default shaders // //synB.Renderer = renderer.NewRenderer(synB.Gs) // ////g.renderer.SetSortObjects(false) // //err = synB.Renderer.AddDefaultShaders() // //if err != nil { // // panic(err) // //} // //synB.Renderer.SetGui(synB.Root) // // // //// Adds a perspective camera to the scene // //// The camera aspect ratio should be updated if the window is resized. // //aspect := float32(width) / float32(height) // //synB.Camera = camera.NewPerspective(65, aspect, 0.01, 1000) // //synB.Camera.SetPosition(10, 10, 10) // //synB.Camera.LookAt(&math32.Vector3{0, 0, 0}) // // // //// Create orbit control and set limits // //synB.OrbitControl = control.NewOrbitControl(synB.Camera, synB.Win) // //synB.OrbitControl.Enabled = true //false // //synB.OrbitControl.EnablePan = false // //synB.OrbitControl.MaxPolarAngle = 2 * math32.Pi / 3 // //synB.OrbitControl.MinDistance = 0.1 // //synB.OrbitControl.MaxDistance = 10000 // // // //// Create main scene and child levelScene // //synB.Scene = core.NewNode() // //synB.LevelScene = core.NewNode() // //synB.Scene.Add(synB.Camera) // //synB.Scene.Add(synB.LevelScene) // //synB.StepDelta = math32.NewVector2(0, 0) // //synB.Renderer.SetScene(synB.Scene) // // // //// Add white ambient light to the scene // //ambLight := light.NewAmbient(&math32.Color{1.0, 1.0, 1.0}, 0.4) // //synB.Scene.Add(ambLight) // // // //synB.LevelStyle = drawing3D.NewBaseStyle() // // // ////synB.SetupGui(width, height) // //synB.RenderFrame() // // // ////synB.LoadSkyBox() // //synB.LoadLevels() // // // //size := 10 // //gridHelp := graphic.NewGridHelper(float32(size), 1, math32.NewColor("LightGrey")) // //gridHelp.SetPosition(float32(size/2), -0.2, float32(size/2)) // //synB.Scene.Add(gridHelp) // // // //// Done Loading - hide the loading label, show the menu, and initialize the level // ////synB.LoadingLabel.SetVisible(false) // // // //synB.InitLevel(0) // // // //now := time.Now() // //newNow := time.Now() // //log.Info("Starting Render Loop") // //// Start the render loop // //for !synB.Win.ShouldClose() { // // newNow = time.Now() // // timeDelta := now.Sub(newNow) // // now = newNow // // //fmt.Println(now) // // // // //vision.ReadImg(synB.DataDir, "/assets/0.jpg") // // synB.Update(timeDelta.Seconds()) // // synB.RenderFrame() // //} // // //fmt.Printf("app was running for %f \n", application.Get().RunSeconds()) //} // //// go func() { //// for { //// if a, b, c := app.FrameRater().FPS(60); a > 0 && b > 0 && c == true { //// fmt.Println("FPS ", int(b)) //// } //// } //// }() ////fps := float32(app.FrameCount()) / application.Get().RunSeconds() ////go myGui.LabelFpsTest(10, 10, strconv.Itoa(int(app.FrameCount()) / int(application.Get().RunSeconds())), app) // ////IndCh := make(chan int) // //// fmt.Println("Start NeuroMatrix") //// app, err := application.Create(application.Options{ //// Title: "NeuroMatrix", //// Width: 1280, //// Height: 600, //// }) //// if err != nil { //// panic(err) //// } // //// // add GUI********************************************************* //// // Create and add a label to the root panel //// l1 := myGui.LabelFps(10, 10, "240") //// app.Gui().Root().Add(l1) // //// // Create and add button 1 to the root panel //// onOff := false //// b1 := myGui.WebCam(10, 40, &onOff, app) //// app.Gui().Root().Add(b1) // //// // Create and add exit button to the root panel //// b2 := myGui.Exit(10, 70, &onOff, app) //// app.Gui().Root().Add(b2) ////****************************************************************** // //// // Создать и протестировать линии - синапсы // //// go func() { //// myDots := 0 //// maxD := 700 //// dotlist := make(map[int]*drawing3D.Neuron3DBody) // //// for { //// if myDots < maxD { //// dotlist[myDots] = drawing3D.NewBody(app, math32.NewColor("White")) //// dotlist[myDots].CreateBody() //// //dotlist[myDots].SetPosition(float32(rand.Int31n(20)), float32(rand.Int31n(20)), float32(rand.Int31n(20))) //// myDots++ //// //fmt.Println(len(dotlist), myDots) //// } //// if myDots == maxD { //// for _, v := range dotlist { //// v.SetPosition(float32(rand.Int31n(20)), float32(rand.Int31n(20)), float32(rand.Int31n(20))) //// time.Sleep(time.Millisecond * 10) // //// } //// } //// } //// }() // //// //Add lights to the scene //// helpers.LightsScene(app) // //// // Add an axis helper to the scene //// helpers.AxisHelper(0.5, app) // //// // Add an grid helper to the scene //// helpers.GridHelper(10, app) // //// // Add camera to the scene //// app.CameraPersp().SetPosition(15, 15, 15) //// //app.Gl().ClearColor(0, 0.5, 0.7, 1) //// app.Gl().ClearColor(0, 0.2, 0.4, 1) // //// // Start application //// err = app.Run() //// if err != nil { //// panic(err) //// }
// // Create application and scene
random_line_split
main.go
package main // 3840 * 2160 = 8 294 400 import ( "github.com/SynthBrain/synthBrain/app" ) func main()
//package main // // //import ( // "flag" // "github.com/SynthBrain/synthBrain/baseStruct" // "github.com/SynthBrain/synthBrain/myGui" // "github.com/g3n/engine/app" // "github.com/g3n/engine/camera" // "github.com/g3n/engine/core" // "github.com/g3n/engine/geometry" // "github.com/g3n/engine/gls" // "github.com/g3n/engine/graphic" // "github.com/g3n/engine/gui" // "github.com/g3n/engine/light" // "github.com/g3n/engine/material" // "github.com/g3n/engine/math32" // "github.com/g3n/engine/renderer" // // "github.com/g3n/engine/util/helper" // "github.com/g3n/engine/util/logger" // "github.com/g3n/engine/window" // // "time" //) // //var ( // // TODO uncomment and implement usage of the following flags // //oFullScreen = flag.Bool("fullscreen", false, "Starts application with full screen") // //oSwapInterval = flag.Int("swapinterval", -1, "Sets the swap buffers interval to this value") // oHideFPS = flag.Bool("hidefps", false, "Do now show calculated FPS in the GUI") // oUpdateFPS = flag.Uint("updatefps", 1000, "Time interval in milliseconds to update the FPS in the GUI") // oTargetFPS = flag.Uint("targetfps", 60, "Sets the frame rate in frames per second") // oNoglErrors = flag.Bool("noglerrors", false, "Do not check OpenGL errors at each call (may increase FPS)") // oCpuProfile = flag.String("cpuprofile", "", "Activate cpu profiling writing profile to the specified file") // oExecTrace = flag.String("exectrace", "", "Activate execution tracer writing data to the specified file") // oNogui = flag.Bool("nogui", false, "Do not show the GUI, only the specified demo") // oLogs = flag.String("logs", "", "Set log levels for packages. Ex: gui:debug,gls:info") // oStats = flag.Bool("stats", false, "Shows statistics control panel in the GUI") // oRenderStats = flag.Bool("renderstats", false, "Shows gui renderer statistics in the console") //) // //var log *logger.Logger // ///* // Рисовать только тех что имеют достаточный уровень активность и окончательно не затухли //*/ //func main() { // // Create application and scene // app := app.App() // scene := core.NewNode() // // // Set the scene to be managed by the gui manager // gui.Manager().Set(scene) // // // Create SynthBrain struct // synB := new(baseStruct.SynthBrain) // //frameRater := synB.FrameRater // // // Create perspective camera // cam := camera.New(1) // cam.SetPosition(0, 0, 3) // scene.Add(cam) // // // Set up orbit control for the camera // camera.NewOrbitControl(cam) // // // Set up callback to update viewport and camera aspect ratio when the window is resized // onResize := func(evname string, ev interface{}) { // // Get framebuffer size and update viewport accordingly // width, height := app.GetSize() // app.Gls().Viewport(0, 0, int32(width), int32(height)) // // Update the camera's aspect ratio // cam.SetAspect(float32(width) / float32(height)) // } // app.Subscribe(window.OnWindowSize, onResize) // onResize("", nil) // // // Create a blue torus and add it to the scene // //geom := geometry.NewTorus(1, .4, 12, 32, math32.Pi*2) // geom := geometry.NewTorus(0, .4, 3, 3, math32.Pi*2) // mat := material.NewStandard(math32.NewColor("DarkBlue")) // mesh := graphic.NewMesh(geom, mat) // scene.Add(mesh) // // // Create and add a button to the scene // btn := gui.NewButton("Make Red") // btn.SetPosition(30, 40) // btn.SetSize(40, 40) // btn.Subscribe(gui.OnClick, func(name string, ev interface{}) { // mat.SetColor(math32.NewColor("DarkRed")) // }) // scene.Add(btn) // // Create and add a button to the scene // btn1 := gui.NewButton("Make Blue") // btn1.SetPosition(30, 90) // btn1.SetSize(40, 40) // btn1.Subscribe(gui.OnClick, func(name string, ev interface{}) { // mat.SetColor(math32.NewColor("DarkBlue")) // }) // scene.Add(btn1) // // Create and add a button to the scene // onOff := false // chOnOffFlag := make(chan bool, 1) // exit := myGui.Exit(30, 240, &onOff, app, chOnOffFlag) // scene.Add(exit) // // // Create and add a button to the scene // lbl := gui.NewLabel("FPS: ") // lbl.SetPosition(10, 10) // lbl.SetPaddings(2, 2, 2, 2) // scene.Add(lbl) // // // Create and add lights to the scene // scene.Add(light.NewAmbient(&math32.Color{1.0, 1.0, 1.0}, 0.8)) // pointLight := light.NewPoint(&math32.Color{1, 1, 1}, 5.0) // pointLight.SetPosition(1, 0, 2) // scene.Add(pointLight) // // // Create and add an axis helper to the scene // scene.Add(helper.NewAxes(0.5)) // // // Set background color to gray // app.Gls().ClearColor(0.5, 0.5, 0.5, 1.0) // // //synB.InitLevel(0) // // now := time.Now() // newNow := time.Now() // //log.Info("Starting Render Loop") // //Run the application // app.Run(func(renderer *renderer.Renderer, deltaTime time.Duration) { // app.Gls().Clear(gls.DEPTH_BUFFER_BIT | gls.STENCIL_BUFFER_BIT | gls.COLOR_BUFFER_BIT) // newNow = time.Now() // timeDelta := now.Sub(newNow) // now = newNow // //fps, pfps, _ := frameRater.FPS(time.Duration(*oUpdateFPS) * time.Millisecond) // // // //lbl.SetText("FPS: ") // //lbl.SetText("FPS: " + fmt.Sprintf("%3.1f / %3.1f", fps, pfps) ) // synB.Update(timeDelta.Seconds()) // renderer.Render(scene, cam) // }) // // // ABROAD********************************************************************************************** // // //// OpenGL functions must be executed in the same thread where // //// the context was created (by window.New()) // //runtime.LockOSThread() // // // //// Parse command line flags // //showLog := flag.Bool("debug", false, "display the debug log") // //flag.Parse() // // // //// Create logger // //log = logger.New("SynthBrain", nil) // //log.AddWriter(logger.NewConsole(false)) // //log.SetFormat(logger.FTIME | logger.FMICROS) // //if *showLog == true { // // log.SetLevel(logger.DEBUG) // //} else { // // log.SetLevel(logger.INFO) // //} // //log.Info("Initializing SynthBrain") // // // //// Create SynthBrain struct // //synB := new(baseStruct.SynthBrain) // // // //// Manually scan the $GOPATH directories to find the data directory // //rawPaths := os.Getenv("GOPATH") // //paths := strings.Split(rawPaths, ":") // //for _, j := range paths { // // // Checks data path // // path := filepath.Join(j, "src", "github.com", "SynthBrain", "synthBrain") // // if _, err := os.Stat(path); err == nil { // // synB.DataDir = path // // } // //} // // // //// Get the window manager // //var err error // //synB.Wmgr, err = window.Manager("glfw") // //if err != nil { // // panic(err) // //} // // // //// Create window and OpenGL context // //synB.Win, err = synB.Wmgr.CreateWindow(900, 640, "SynthBrain", false) // //if err != nil { // // panic(err) // //} // // // //// Create OpenGL state // //synB.Gs, err = gls.New() // //if err != nil { // // panic(err) // //} // // // //// Speed up a bit by not checking OpenGL errors // //synB.Gs.SetCheckErrors(false) // // // //// Sets window background color // //synB.Gs.ClearColor(0, 0.2, 0.4, 1) //(0.1, 0.1, 0.1, 1.0) // // // //// Sets the OpenGL viewport size the same as the window size // //// This normally should be updated if the window is resized. // //width, height := synB.Win.Size() // //synB.Gs.Viewport(0, 0, int32(width), int32(height)) // // // //// Creates GUI root panel // //synB.Root = gui.NewRoot(synB.Gs, synB.Win) // //synB.Root.SetSize(float32(width), float32(height)) // // // //// Update window if resize // //synB.Win.Subscribe(window.OnWindowSize, func(evname string, ev interface{}) { // // width, height := synB.Win.Size() // // synB.Gs.Viewport(0, 0, int32(width), int32(height)) // // synB.Root.SetSize(float32(width), float32(height)) // // aspect := float32(width) / float32(height) // // synB.Camera.SetAspect(aspect) // //}) // // // ////add GUI********************************************************* // //// Create and add a label to the root panel // //synB.LabelFps = myGui.LabelFps(10, 10, "240") // //synB.Root.Add(synB.LabelFps) // // // //// Create and add button 1 to the root panel // //onOff := false // //chOnOffFlag := make(chan bool, 1) // //synB.WebCam = myGui.WebCam(10, 40, &onOff, chOnOffFlag) // //synB.Root.Add(synB.WebCam) // // // //// Create and add exit button to the root panel // //synB.Exit = myGui.Exit(10, 70, &onOff, synB.Win, chOnOffFlag) // //synB.Root.Add(synB.Exit) // ////**************************************************************** // // // //// Creates a renderer and adds default shaders // //synB.Renderer = renderer.NewRenderer(synB.Gs) // ////g.renderer.SetSortObjects(false) // //err = synB.Renderer.AddDefaultShaders() // //if err != nil { // // panic(err) // //} // //synB.Renderer.SetGui(synB.Root) // // // //// Adds a perspective camera to the scene // //// The camera aspect ratio should be updated if the window is resized. // //aspect := float32(width) / float32(height) // //synB.Camera = camera.NewPerspective(65, aspect, 0.01, 1000) // //synB.Camera.SetPosition(10, 10, 10) // //synB.Camera.LookAt(&math32.Vector3{0, 0, 0}) // // // //// Create orbit control and set limits // //synB.OrbitControl = control.NewOrbitControl(synB.Camera, synB.Win) // //synB.OrbitControl.Enabled = true //false // //synB.OrbitControl.EnablePan = false // //synB.OrbitControl.MaxPolarAngle = 2 * math32.Pi / 3 // //synB.OrbitControl.MinDistance = 0.1 // //synB.OrbitControl.MaxDistance = 10000 // // // //// Create main scene and child levelScene // //synB.Scene = core.NewNode() // //synB.LevelScene = core.NewNode() // //synB.Scene.Add(synB.Camera) // //synB.Scene.Add(synB.LevelScene) // //synB.StepDelta = math32.NewVector2(0, 0) // //synB.Renderer.SetScene(synB.Scene) // // // //// Add white ambient light to the scene // //ambLight := light.NewAmbient(&math32.Color{1.0, 1.0, 1.0}, 0.4) // //synB.Scene.Add(ambLight) // // // //synB.LevelStyle = drawing3D.NewBaseStyle() // // // ////synB.SetupGui(width, height) // //synB.RenderFrame() // // // ////synB.LoadSkyBox() // //synB.LoadLevels() // // // //size := 10 // //gridHelp := graphic.NewGridHelper(float32(size), 1, math32.NewColor("LightGrey")) // //gridHelp.SetPosition(float32(size/2), -0.2, float32(size/2)) // //synB.Scene.Add(gridHelp) // // // //// Done Loading - hide the loading label, show the menu, and initialize the level // ////synB.LoadingLabel.SetVisible(false) // // // //synB.InitLevel(0) // // // //now := time.Now() // //newNow := time.Now() // //log.Info("Starting Render Loop") // //// Start the render loop // //for !synB.Win.ShouldClose() { // // newNow = time.Now() // // timeDelta := now.Sub(newNow) // // now = newNow // // //fmt.Println(now) // // // // //vision.ReadImg(synB.DataDir, "/assets/0.jpg") // // synB.Update(timeDelta.Seconds()) // // synB.RenderFrame() // //} // // //fmt.Printf("app was running for %f \n", application.Get().RunSeconds()) //} // //// go func() { //// for { //// if a, b, c := app.FrameRater().FPS(60); a > 0 && b > 0 && c == true { //// fmt.Println("FPS ", int(b)) //// } //// } //// }() ////fps := float32(app.FrameCount()) / application.Get().RunSeconds() ////go myGui.LabelFpsTest(10, 10, strconv.Itoa(int(app.FrameCount()) / int(application.Get().RunSeconds())), app) // ////IndCh := make(chan int) // //// fmt.Println("Start NeuroMatrix") //// app, err := application.Create(application.Options{ //// Title: "NeuroMatrix", //// Width: 1280, //// Height: 600, //// }) //// if err != nil { //// panic(err) //// } // //// // add GUI********************************************************* //// // Create and add a label to the root panel //// l1 := myGui.LabelFps(10, 10, "240") //// app.Gui().Root().Add(l1) // //// // Create and add button 1 to the root panel //// onOff := false //// b1 := myGui.WebCam(10, 40, &onOff, app) //// app.Gui().Root().Add(b1) // //// // Create and add exit button to the root panel //// b2 := myGui.Exit(10, 70, &onOff, app) //// app.Gui().Root().Add(b2) ////****************************************************************** // //// // Создать и протестировать линии - синапсы // //// go func() { //// myDots := 0 //// maxD := 700 //// dotlist := make(map[int]*drawing3D.Neuron3DBody) // //// for { //// if myDots < maxD { //// dotlist[myDots] = drawing3D.NewBody(app, math32.NewColor("White")) //// dotlist[myDots].CreateBody() //// //dotlist[myDots].SetPosition(float32(rand.Int31n(20)), float32(rand.Int31n(20)), float32(rand.Int31n(20))) //// myDots++ //// //fmt.Println(len(dotlist), myDots) //// } //// if myDots == maxD { //// for _, v := range dotlist { //// v.SetPosition(float32(rand.Int31n(20)), float32(rand.Int31n(20)), float32(rand.Int31n(20))) //// time.Sleep(time.Millisecond * 10) // //// } //// } //// } //// }() // //// //Add lights to the scene //// helpers.LightsScene(app) // //// // Add an axis helper to the scene //// helpers.AxisHelper(0.5, app) // //// // Add an grid helper to the scene //// helpers.GridHelper(10, app) // //// // Add camera to the scene //// app.CameraPersp().SetPosition(15, 15, 15) //// //app.Gl().ClearColor(0, 0.5, 0.7, 1) //// app.Gl().ClearColor(0, 0.2, 0.4, 1) // //// // Start application //// err = app.Run() //// if err != nil { //// panic(err) //// }
{ // Create and run application app.Create().Run() }
identifier_body
thm.py
# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. """This module contains the class TemporalHierarchicalModel class. """ import logging from math import gcd from typing import Dict, List, Optional, Type import numpy as np import pandas as pd from kats.consts import TimeSeriesData from kats.metrics import metrics from kats.models import ( arima, holtwinters, linear_model, prophet, quadratic_model, sarima, theta, ) from kats.models.model import Model from kats.models.reconciliation.base_models import BaseTHModel, GetAggregateTS from sklearn.covariance import MinCovDet # pyre-fixme[24]: Generic type `Model` expects 1 type parameter. BASE_MODELS: Dict[str, Type[Model]] = { "arima": arima.ARIMAModel, "holtwinters": holtwinters.HoltWintersModel, "sarima": sarima.SARIMAModel, "prophet": prophet.ProphetModel, "linear": linear_model.LinearModel, "quadratic": quadratic_model.QuadraticModel, "theta": theta.ThetaModel, } def _log_error(msg: str) -> ValueError: logging.error(msg) return ValueError(msg) class TemporalHierarchicalModel: """Temporal hierarchical model class. This framework combines the base models of different temporal aggregation levels to generate reconciled forecasts. This class provides fit, get_S, get_W, predict and median_validation. Attributes: data: A TimeSeriesData object storing the time series data for level 1 (i.e., the most disaggregate level). baseModels: A list BaseTHModel objects representing the base models for different levels. """ # pyre-fixme[24]: Generic type `Model` expects 1 type parameter. models: Optional[Dict[str, Model]] = None residuals: Optional[Dict[int, np.ndarray]] = None res_matrix: Optional[np.ndarray] = None def __init__(self, data: TimeSeriesData, baseModels: List[BaseTHModel]) -> None: if not data.is_univariate(): msg = f"Only univariate time series supported, but got {type(data.value)}." raise _log_error(msg) self.data = data for basemodel in baseModels: if not isinstance(basemodel, BaseTHModel): msg = ( "Base model should be a BaseTHModel object but is " f"{type(basemodel)}." ) raise _log_error(msg) levels = [bm.level for bm in baseModels] if 1 not in levels: raise _log_error("Model of level 1 is missing.") if len(levels) != len(set(levels)): raise _log_error("One level cannot receive multiple models.") # pyre-fixme[4]: Attribute must be annotated. self.levels = sorted(levels, reverse=True) m = self._get_m(levels) # pyre-fixme[4]: Attribute must be annotated. self.m = m # pyre-fixme[4]: Attribute must be annotated. self.freq = {k: int(m / k) for k in self.levels} self.baseModels = baseModels # pyre-fixme[4]: Attribute must be annotated. self.info_fcsts = {} # pyre-fixme[4]: Attribute must be annotated. self.info_residuals = {} def _get_m(self, ks: List[int]) -> int: """Calculate m. m is the minimum common multiple of all levels. Args: ks: the list of integers representing all the levels. Returns: An integer representing the minimum common multiple. """ base = 1 for c in ks: base = base * c // gcd(base, c) return base def fit(self) -> None: """Fit all base models. If base model only has residuals and forecasts, store the information. """ levels = self.levels TSs = GetAggregateTS(self.data).aggregate(levels) models = {} residuals = {} fcsts = {} for bm in self.baseModels: model_name = bm.model_name if model_name is None: # only residuals and fcsts are provided models[bm.level] = None residuals[bm.level] = bm.residuals fcsts[bm.level] = bm.fcsts else: m = BASE_MODELS[model_name]( data=TSs[bm.level], params=bm.model_params, ) m.fit() models[bm.level] = m self.models = models self.info_fcsts = fcsts self.info_residuals = residuals def get_S(self) -> np.ndarray: """Calculate S matrix. Returns: A np.array representing the S matrix. """ ans = [] levels = self.levels m = self.m for k in levels: for i in range(self.freq[k]): tem = np.zeros(m) tem[(i * k) : (i * k + k)] = 1.0 ans.append(tem) return np.row_stack(ans) def _aggregate_data(self, data: np.ndarray, k: int) -> np.ndarray: """Aggregate data according to level k.""" if k == 1: return data n = len(data) h = n // k return (data[: int(h * k)]).reshape(-1, k).sum(axis=1) # pyre-fixme[24]: Generic type `Model` expects 1 type parameter. def _get_residuals(self, model: Model) -> np.ndarray: """Calculate residuals of each base model. Args: model: a callable model object representing the trained base model. Returns: A np.ndarray of residuals. """ try: # pyre-fixme[16]: `Model` has no attribute `model`. return model.model.resid.values except Exception: fcst = model.predict(steps=1, freq="D", include_history=True) # pyre-fixme[16]: `None` has no attribute `merge`. # pyre-fixme[16]: `Optional` has no attribute `to_dataframe`. merge = fcst.merge(model.data.to_dataframe(), on="time") for col in merge.columns: if col != "time" and ("fcst" not in col): return merge[col].values - merge["fcst"].values raise ValueError("Couldn't find residual or forecast values in model") def _get_all_residuals(self) -> Dict[int, np.ndarray]: """ Calculate residuals for all base models. Returns: Dictionary for residuals, whose key is level and value is residual array. """ residuals = self.residuals # if residuals have not been calculated yet if residuals is None: levels = self.levels models = self.models residuals = {} for k in levels: # assert models is not None # pyre-fixme[16]: `Optional` has no attribute `__getitem__`. if models[k] is not None: try: vals = self._get_residuals(models[k]) except Exception as e: msg = ( f"Failed to get residuals for level {k} with error " f"message {e}." ) raise _log_error(msg) residuals[k] = vals else: residuals[k] = self.info_residuals[k] self.residuals = residuals return residuals def _get_residual_matrix(self) -> np.ndarray: """ Reshape residuals into matrix format. Returns: Residual matrix. """ res_matrix = self.res_matrix if res_matrix is None: residuals = self._get_all_residuals() ks = self.levels freq = self.freq h = np.min([len(residuals[k]) // freq[k] for k in ks]) res_matrix = [] for k in ks: n = h * freq[k] res_matrix.append(residuals[k][-n:].reshape(h, -1).T) res_matrix = np.row_stack(res_matrix) self.res_matrix = res_matrix return res_matrix def get_W(self, method: str = "struc", eps: float = 1e-5) -> np.ndarray:
# pyre-fixme[2]: Parameter must be annotated. def _predict_origin(self, steps: int, method="struc") -> Dict[int, np.ndarray]: """ Generate original forecasts from each base model (without time index). Args: steps: Number of forecasts for level 1. methd: Reconciliation method. Returns: Dictionary of forecasts of each level, whose key is level and value is forecast array. """ m = self.m levels = self.levels freq = self.freq h = int(np.ceil(steps / m)) hf = steps // m orig_fcst = {} models = self.models # generate forecasts for each level for k in levels: num = int(freq[k] * h) # assert models is not None # pyre-fixme[16]: `Optional` has no attribute `__getitem__`. if models[k] is not None: orig_fcst[k] = models[k].predict(steps=num, freq="D")["fcst"].values else: fcst_num = len(self.info_fcsts[k]) if fcst_num < num: if fcst_num >= hf * freq[k]: # since the final output only needs hf*freq[k] forecasts # for level k, we pad the forecast array to desirable # length. (note that the padding values would be ignored # in the final output.) orig_fcst[k] = np.concatenate( [ self.info_fcsts[k], [self.info_fcsts[k][-1]] * (num - fcst_num), ] ) elif method == "bu" and k != 1: # for 'bu' only level 1 is needed. orig_fcst[k] = self.info_fcsts[k] else: msg = ( f"{hf*freq[k]} steps of forecasts for level {k} are" f" needed, but only receive {fcst_num} steps (and " "forecast model is None)." ) raise _log_error(msg) else: orig_fcst[k] = self.info_fcsts[k][:num] return orig_fcst def _predict( self, steps: int, # pyre-fixme[2]: Parameter must be annotated. method="struc", origin_fcst: bool = False, fcst_levels: Optional[List[int]] = None, ) -> Dict[str, Dict[int, np.ndarray]]: """Generate forecasts for each level (without time index). Args: steps: Number of forecasts for level 1. methd: Reconciliation method. origin_fcst: Whether to return the forecasts of base models. fcst_levels: Levels that one wants to generate forecasts for. If None, then all forecasts for all levels of the base models are generated. Returns: Dictionary of forecasts, whose key is level and value is forecast array. """ if self.models is None: raise _log_error("Please fit base models via .fit() first.") m = self.m levels = self.levels freq = self.freq h = int(np.ceil(steps / m)) if fcst_levels is None: fcst_levels = list(levels) fcst = {} orig_fcst = self._predict_origin(steps, method) if method == "bu": # bottom_up method yhat = orig_fcst[1] elif method == "median": # median method tem = [] for k in levels: tem.append(np.repeat(orig_fcst[k] / k, k)) tem = np.row_stack(tem) yhat = np.median(tem, axis=0) elif method in {"struc", "svar", "hvar", "mint_shrink", "mint_sample"}: # transform fcsts into matrix yh = [] for k in levels: yh.append(orig_fcst[k].reshape(h, -1).T) yh = np.row_stack(yh) S = self.get_S() W = self.get_W(method) # when W is a vector, i.e., a simpler represent for a diagnoal matrix if len(W.shape) == 1: T = (S.T) / W else: T = np.linalg.solve(W, S).T yhat = np.dot(S, np.linalg.solve(T.dot(S), T)).dot(yh) # extract forecasts for level 1 yhat = (yhat[(-freq[1]) :, :].T).flatten()[:steps] else: raise _log_error(f"Reconciliation method {method} is invalid.") # aggregate fcsts for k in fcst_levels: fcst[k] = self._aggregate_data(yhat, k)[: (steps // k)] ans = {"fcst": fcst} if origin_fcst: for elm in orig_fcst: orig_fcst[elm] = orig_fcst[elm][: (steps // elm)] ans["origin_fcst"] = orig_fcst return ans def predict( self, steps: int, # pyre-fixme[2]: Parameter must be annotated. method="struc", freq: Optional[str] = None, origin_fcst: bool = False, fcst_levels: Optional[List[int]] = None, last_timestamp: Optional[pd.Timestamp] = None, ) -> Dict[str, Dict[int, pd.DataFrame]]: """Generate reconciled forecasts (with time index). Args: steps: The number of forecasts needed for level 1. methd: The name of the reconciliation method. Can be 'bu' (bottom-up), 'median', 'struc' (structure-variance), 'svar', 'hvar', 'mint_shrink' or 'mint_sample'. freq: The frequency of the time series at level 1. If None, then we infer the frequency via ts.infer_freq_robust(). origin_fcst: Whether or not to return the forecasts of base models. fcst_levels: The levels to generate forecasts for. Default is None, which generates forecasts for all the levels of the base models. Returns: A dictionary of forecasts, whose key is the level and the corresponding value is a np.array storing the forecasts. """ if freq is None: freq = self.data.infer_freq_robust() last_timestamp = self.data.time.max() fcsts = self._predict( steps, method=method, origin_fcst=origin_fcst, fcst_levels=fcst_levels ) ans = {} for elm in fcsts: tmp = {} for k in fcsts[elm]: fcst_num = len(fcsts[elm][k]) time = pd.date_range( last_timestamp + freq * k, last_timestamp + freq * k * fcst_num, periods=fcst_num, ) tmp[k] = pd.DataFrame({"time": time, "fcst": fcsts[elm][k]}, copy=False) ans[elm] = tmp return ans def median_validation( self, # pyre-fixme[2]: Parameter must be annotated. steps, dist_metric: str = "mae", threshold: float = 5.0, ) -> List[int]: """Filtering out bad fcsts based on median forecasts. This function detects the levels whose forecasts are greatly deviate from median forecasts, which is a strong indication of bad forecasts. Args: steps: The number of forecasts needed for level 1 for validation. dist_metric: The distance metric used to measure the distance between the base forecasts and the median forecasts. threshold: The threshold for deviance. The forecast whose distance from the median forecast is greater than threshold*std is taken as bad forecasts. Default is 3. Returns: A list of integers representing the levels whose forecasts are bad. """ diffs = {} ks = self.levels try: func = metrics.core_metric(dist_metric) except ValueError: raise _log_error(f"Invalid dist_metric {dist_metric}") median_fcst = self._predict(steps, method="median", origin_fcst=True) for k in ks: diffs[k] = func(median_fcst["fcst"][k], median_fcst["origin_fcst"][k]) if dist_metric == "mae": diffs[k] /= k vals = np.array(list(diffs.values())) try: cov = MinCovDet().fit(vals[:, None]) lqr = np.sqrt(cov.covariance_.flatten()[0]) except Exception: low, up = np.percentile(vals, [25, 75]) lqr = up - low up = np.median(vals) + lqr * threshold return [k for k in diffs if diffs[k] >= up]
""" Calculate W matrix. Args: method: Reconciliation method for temporal hierarchical model. Valid methods include 'struc', 'svar', 'hvar', 'mint_sample', and 'mint_shrink'. eps: Epsilons added to W for numerical stability. Returns: W matrix. (If W is a diagnoal matrix, only returns its diagnoal elements). """ levels = self.levels freq = self.freq if method == "struc": ans = [] for k in levels: ans.extend([k] * freq[k]) return np.array(ans) elif method == "svar": residuals = self._get_all_residuals() ans = [] for k in levels: ans.extend([np.nanmean(np.square(residuals[k]))] * freq[k]) return np.array(ans) + eps elif method == "hvar": res_matrix = self._get_residual_matrix() return np.nanvar(res_matrix, axis=1) + eps elif method == "mint_shrink": cov = np.cov(self._get_residual_matrix()) # get correlation matrix sqrt = np.sqrt(np.diag(cov)) cor = ( (cov / sqrt).T ) / sqrt # due to symmetry, no need to transpose the matrix again. mask = ~np.eye(cor.shape[0], dtype=bool) cor = cor[mask] lam = np.var(cor) / np.sum(cor**2) lam = np.max([0, lam]) cov = np.diag(np.diag(cov)) * lam + (1.0 - lam) * cov cov += np.eye(len(cov)) * eps return cov elif method == "mint_sample": cov = np.cov(self._get_residual_matrix()) cov += np.eye(len(cov)) * eps return cov else: raise _log_error(f"{method} is invalid for get_W() method.")
identifier_body
thm.py
# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. """This module contains the class TemporalHierarchicalModel class. """ import logging from math import gcd from typing import Dict, List, Optional, Type import numpy as np import pandas as pd from kats.consts import TimeSeriesData from kats.metrics import metrics from kats.models import ( arima, holtwinters, linear_model, prophet, quadratic_model, sarima, theta, ) from kats.models.model import Model from kats.models.reconciliation.base_models import BaseTHModel, GetAggregateTS from sklearn.covariance import MinCovDet # pyre-fixme[24]: Generic type `Model` expects 1 type parameter. BASE_MODELS: Dict[str, Type[Model]] = { "arima": arima.ARIMAModel, "holtwinters": holtwinters.HoltWintersModel, "sarima": sarima.SARIMAModel, "prophet": prophet.ProphetModel, "linear": linear_model.LinearModel, "quadratic": quadratic_model.QuadraticModel, "theta": theta.ThetaModel, } def _log_error(msg: str) -> ValueError: logging.error(msg) return ValueError(msg) class TemporalHierarchicalModel: """Temporal hierarchical model class. This framework combines the base models of different temporal aggregation levels to generate reconciled forecasts. This class provides fit, get_S, get_W, predict and median_validation. Attributes: data: A TimeSeriesData object storing the time series data for level 1 (i.e., the most disaggregate level). baseModels: A list BaseTHModel objects representing the base models for different levels. """ # pyre-fixme[24]: Generic type `Model` expects 1 type parameter. models: Optional[Dict[str, Model]] = None residuals: Optional[Dict[int, np.ndarray]] = None res_matrix: Optional[np.ndarray] = None def __init__(self, data: TimeSeriesData, baseModels: List[BaseTHModel]) -> None: if not data.is_univariate(): msg = f"Only univariate time series supported, but got {type(data.value)}." raise _log_error(msg) self.data = data for basemodel in baseModels: if not isinstance(basemodel, BaseTHModel): msg = (
"Base model should be a BaseTHModel object but is " f"{type(basemodel)}." ) raise _log_error(msg) levels = [bm.level for bm in baseModels] if 1 not in levels: raise _log_error("Model of level 1 is missing.") if len(levels) != len(set(levels)): raise _log_error("One level cannot receive multiple models.") # pyre-fixme[4]: Attribute must be annotated. self.levels = sorted(levels, reverse=True) m = self._get_m(levels) # pyre-fixme[4]: Attribute must be annotated. self.m = m # pyre-fixme[4]: Attribute must be annotated. self.freq = {k: int(m / k) for k in self.levels} self.baseModels = baseModels # pyre-fixme[4]: Attribute must be annotated. self.info_fcsts = {} # pyre-fixme[4]: Attribute must be annotated. self.info_residuals = {} def _get_m(self, ks: List[int]) -> int: """Calculate m. m is the minimum common multiple of all levels. Args: ks: the list of integers representing all the levels. Returns: An integer representing the minimum common multiple. """ base = 1 for c in ks: base = base * c // gcd(base, c) return base def fit(self) -> None: """Fit all base models. If base model only has residuals and forecasts, store the information. """ levels = self.levels TSs = GetAggregateTS(self.data).aggregate(levels) models = {} residuals = {} fcsts = {} for bm in self.baseModels: model_name = bm.model_name if model_name is None: # only residuals and fcsts are provided models[bm.level] = None residuals[bm.level] = bm.residuals fcsts[bm.level] = bm.fcsts else: m = BASE_MODELS[model_name]( data=TSs[bm.level], params=bm.model_params, ) m.fit() models[bm.level] = m self.models = models self.info_fcsts = fcsts self.info_residuals = residuals def get_S(self) -> np.ndarray: """Calculate S matrix. Returns: A np.array representing the S matrix. """ ans = [] levels = self.levels m = self.m for k in levels: for i in range(self.freq[k]): tem = np.zeros(m) tem[(i * k) : (i * k + k)] = 1.0 ans.append(tem) return np.row_stack(ans) def _aggregate_data(self, data: np.ndarray, k: int) -> np.ndarray: """Aggregate data according to level k.""" if k == 1: return data n = len(data) h = n // k return (data[: int(h * k)]).reshape(-1, k).sum(axis=1) # pyre-fixme[24]: Generic type `Model` expects 1 type parameter. def _get_residuals(self, model: Model) -> np.ndarray: """Calculate residuals of each base model. Args: model: a callable model object representing the trained base model. Returns: A np.ndarray of residuals. """ try: # pyre-fixme[16]: `Model` has no attribute `model`. return model.model.resid.values except Exception: fcst = model.predict(steps=1, freq="D", include_history=True) # pyre-fixme[16]: `None` has no attribute `merge`. # pyre-fixme[16]: `Optional` has no attribute `to_dataframe`. merge = fcst.merge(model.data.to_dataframe(), on="time") for col in merge.columns: if col != "time" and ("fcst" not in col): return merge[col].values - merge["fcst"].values raise ValueError("Couldn't find residual or forecast values in model") def _get_all_residuals(self) -> Dict[int, np.ndarray]: """ Calculate residuals for all base models. Returns: Dictionary for residuals, whose key is level and value is residual array. """ residuals = self.residuals # if residuals have not been calculated yet if residuals is None: levels = self.levels models = self.models residuals = {} for k in levels: # assert models is not None # pyre-fixme[16]: `Optional` has no attribute `__getitem__`. if models[k] is not None: try: vals = self._get_residuals(models[k]) except Exception as e: msg = ( f"Failed to get residuals for level {k} with error " f"message {e}." ) raise _log_error(msg) residuals[k] = vals else: residuals[k] = self.info_residuals[k] self.residuals = residuals return residuals def _get_residual_matrix(self) -> np.ndarray: """ Reshape residuals into matrix format. Returns: Residual matrix. """ res_matrix = self.res_matrix if res_matrix is None: residuals = self._get_all_residuals() ks = self.levels freq = self.freq h = np.min([len(residuals[k]) // freq[k] for k in ks]) res_matrix = [] for k in ks: n = h * freq[k] res_matrix.append(residuals[k][-n:].reshape(h, -1).T) res_matrix = np.row_stack(res_matrix) self.res_matrix = res_matrix return res_matrix def get_W(self, method: str = "struc", eps: float = 1e-5) -> np.ndarray: """ Calculate W matrix. Args: method: Reconciliation method for temporal hierarchical model. Valid methods include 'struc', 'svar', 'hvar', 'mint_sample', and 'mint_shrink'. eps: Epsilons added to W for numerical stability. Returns: W matrix. (If W is a diagnoal matrix, only returns its diagnoal elements). """ levels = self.levels freq = self.freq if method == "struc": ans = [] for k in levels: ans.extend([k] * freq[k]) return np.array(ans) elif method == "svar": residuals = self._get_all_residuals() ans = [] for k in levels: ans.extend([np.nanmean(np.square(residuals[k]))] * freq[k]) return np.array(ans) + eps elif method == "hvar": res_matrix = self._get_residual_matrix() return np.nanvar(res_matrix, axis=1) + eps elif method == "mint_shrink": cov = np.cov(self._get_residual_matrix()) # get correlation matrix sqrt = np.sqrt(np.diag(cov)) cor = ( (cov / sqrt).T ) / sqrt # due to symmetry, no need to transpose the matrix again. mask = ~np.eye(cor.shape[0], dtype=bool) cor = cor[mask] lam = np.var(cor) / np.sum(cor**2) lam = np.max([0, lam]) cov = np.diag(np.diag(cov)) * lam + (1.0 - lam) * cov cov += np.eye(len(cov)) * eps return cov elif method == "mint_sample": cov = np.cov(self._get_residual_matrix()) cov += np.eye(len(cov)) * eps return cov else: raise _log_error(f"{method} is invalid for get_W() method.") # pyre-fixme[2]: Parameter must be annotated. def _predict_origin(self, steps: int, method="struc") -> Dict[int, np.ndarray]: """ Generate original forecasts from each base model (without time index). Args: steps: Number of forecasts for level 1. methd: Reconciliation method. Returns: Dictionary of forecasts of each level, whose key is level and value is forecast array. """ m = self.m levels = self.levels freq = self.freq h = int(np.ceil(steps / m)) hf = steps // m orig_fcst = {} models = self.models # generate forecasts for each level for k in levels: num = int(freq[k] * h) # assert models is not None # pyre-fixme[16]: `Optional` has no attribute `__getitem__`. if models[k] is not None: orig_fcst[k] = models[k].predict(steps=num, freq="D")["fcst"].values else: fcst_num = len(self.info_fcsts[k]) if fcst_num < num: if fcst_num >= hf * freq[k]: # since the final output only needs hf*freq[k] forecasts # for level k, we pad the forecast array to desirable # length. (note that the padding values would be ignored # in the final output.) orig_fcst[k] = np.concatenate( [ self.info_fcsts[k], [self.info_fcsts[k][-1]] * (num - fcst_num), ] ) elif method == "bu" and k != 1: # for 'bu' only level 1 is needed. orig_fcst[k] = self.info_fcsts[k] else: msg = ( f"{hf*freq[k]} steps of forecasts for level {k} are" f" needed, but only receive {fcst_num} steps (and " "forecast model is None)." ) raise _log_error(msg) else: orig_fcst[k] = self.info_fcsts[k][:num] return orig_fcst def _predict( self, steps: int, # pyre-fixme[2]: Parameter must be annotated. method="struc", origin_fcst: bool = False, fcst_levels: Optional[List[int]] = None, ) -> Dict[str, Dict[int, np.ndarray]]: """Generate forecasts for each level (without time index). Args: steps: Number of forecasts for level 1. methd: Reconciliation method. origin_fcst: Whether to return the forecasts of base models. fcst_levels: Levels that one wants to generate forecasts for. If None, then all forecasts for all levels of the base models are generated. Returns: Dictionary of forecasts, whose key is level and value is forecast array. """ if self.models is None: raise _log_error("Please fit base models via .fit() first.") m = self.m levels = self.levels freq = self.freq h = int(np.ceil(steps / m)) if fcst_levels is None: fcst_levels = list(levels) fcst = {} orig_fcst = self._predict_origin(steps, method) if method == "bu": # bottom_up method yhat = orig_fcst[1] elif method == "median": # median method tem = [] for k in levels: tem.append(np.repeat(orig_fcst[k] / k, k)) tem = np.row_stack(tem) yhat = np.median(tem, axis=0) elif method in {"struc", "svar", "hvar", "mint_shrink", "mint_sample"}: # transform fcsts into matrix yh = [] for k in levels: yh.append(orig_fcst[k].reshape(h, -1).T) yh = np.row_stack(yh) S = self.get_S() W = self.get_W(method) # when W is a vector, i.e., a simpler represent for a diagnoal matrix if len(W.shape) == 1: T = (S.T) / W else: T = np.linalg.solve(W, S).T yhat = np.dot(S, np.linalg.solve(T.dot(S), T)).dot(yh) # extract forecasts for level 1 yhat = (yhat[(-freq[1]) :, :].T).flatten()[:steps] else: raise _log_error(f"Reconciliation method {method} is invalid.") # aggregate fcsts for k in fcst_levels: fcst[k] = self._aggregate_data(yhat, k)[: (steps // k)] ans = {"fcst": fcst} if origin_fcst: for elm in orig_fcst: orig_fcst[elm] = orig_fcst[elm][: (steps // elm)] ans["origin_fcst"] = orig_fcst return ans def predict( self, steps: int, # pyre-fixme[2]: Parameter must be annotated. method="struc", freq: Optional[str] = None, origin_fcst: bool = False, fcst_levels: Optional[List[int]] = None, last_timestamp: Optional[pd.Timestamp] = None, ) -> Dict[str, Dict[int, pd.DataFrame]]: """Generate reconciled forecasts (with time index). Args: steps: The number of forecasts needed for level 1. methd: The name of the reconciliation method. Can be 'bu' (bottom-up), 'median', 'struc' (structure-variance), 'svar', 'hvar', 'mint_shrink' or 'mint_sample'. freq: The frequency of the time series at level 1. If None, then we infer the frequency via ts.infer_freq_robust(). origin_fcst: Whether or not to return the forecasts of base models. fcst_levels: The levels to generate forecasts for. Default is None, which generates forecasts for all the levels of the base models. Returns: A dictionary of forecasts, whose key is the level and the corresponding value is a np.array storing the forecasts. """ if freq is None: freq = self.data.infer_freq_robust() last_timestamp = self.data.time.max() fcsts = self._predict( steps, method=method, origin_fcst=origin_fcst, fcst_levels=fcst_levels ) ans = {} for elm in fcsts: tmp = {} for k in fcsts[elm]: fcst_num = len(fcsts[elm][k]) time = pd.date_range( last_timestamp + freq * k, last_timestamp + freq * k * fcst_num, periods=fcst_num, ) tmp[k] = pd.DataFrame({"time": time, "fcst": fcsts[elm][k]}, copy=False) ans[elm] = tmp return ans def median_validation( self, # pyre-fixme[2]: Parameter must be annotated. steps, dist_metric: str = "mae", threshold: float = 5.0, ) -> List[int]: """Filtering out bad fcsts based on median forecasts. This function detects the levels whose forecasts are greatly deviate from median forecasts, which is a strong indication of bad forecasts. Args: steps: The number of forecasts needed for level 1 for validation. dist_metric: The distance metric used to measure the distance between the base forecasts and the median forecasts. threshold: The threshold for deviance. The forecast whose distance from the median forecast is greater than threshold*std is taken as bad forecasts. Default is 3. Returns: A list of integers representing the levels whose forecasts are bad. """ diffs = {} ks = self.levels try: func = metrics.core_metric(dist_metric) except ValueError: raise _log_error(f"Invalid dist_metric {dist_metric}") median_fcst = self._predict(steps, method="median", origin_fcst=True) for k in ks: diffs[k] = func(median_fcst["fcst"][k], median_fcst["origin_fcst"][k]) if dist_metric == "mae": diffs[k] /= k vals = np.array(list(diffs.values())) try: cov = MinCovDet().fit(vals[:, None]) lqr = np.sqrt(cov.covariance_.flatten()[0]) except Exception: low, up = np.percentile(vals, [25, 75]) lqr = up - low up = np.median(vals) + lqr * threshold return [k for k in diffs if diffs[k] >= up]
random_line_split
thm.py
# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. """This module contains the class TemporalHierarchicalModel class. """ import logging from math import gcd from typing import Dict, List, Optional, Type import numpy as np import pandas as pd from kats.consts import TimeSeriesData from kats.metrics import metrics from kats.models import ( arima, holtwinters, linear_model, prophet, quadratic_model, sarima, theta, ) from kats.models.model import Model from kats.models.reconciliation.base_models import BaseTHModel, GetAggregateTS from sklearn.covariance import MinCovDet # pyre-fixme[24]: Generic type `Model` expects 1 type parameter. BASE_MODELS: Dict[str, Type[Model]] = { "arima": arima.ARIMAModel, "holtwinters": holtwinters.HoltWintersModel, "sarima": sarima.SARIMAModel, "prophet": prophet.ProphetModel, "linear": linear_model.LinearModel, "quadratic": quadratic_model.QuadraticModel, "theta": theta.ThetaModel, } def _log_error(msg: str) -> ValueError: logging.error(msg) return ValueError(msg) class TemporalHierarchicalModel: """Temporal hierarchical model class. This framework combines the base models of different temporal aggregation levels to generate reconciled forecasts. This class provides fit, get_S, get_W, predict and median_validation. Attributes: data: A TimeSeriesData object storing the time series data for level 1 (i.e., the most disaggregate level). baseModels: A list BaseTHModel objects representing the base models for different levels. """ # pyre-fixme[24]: Generic type `Model` expects 1 type parameter. models: Optional[Dict[str, Model]] = None residuals: Optional[Dict[int, np.ndarray]] = None res_matrix: Optional[np.ndarray] = None def __init__(self, data: TimeSeriesData, baseModels: List[BaseTHModel]) -> None: if not data.is_univariate(): msg = f"Only univariate time series supported, but got {type(data.value)}." raise _log_error(msg) self.data = data for basemodel in baseModels: if not isinstance(basemodel, BaseTHModel): msg = ( "Base model should be a BaseTHModel object but is " f"{type(basemodel)}." ) raise _log_error(msg) levels = [bm.level for bm in baseModels] if 1 not in levels: raise _log_error("Model of level 1 is missing.") if len(levels) != len(set(levels)): raise _log_error("One level cannot receive multiple models.") # pyre-fixme[4]: Attribute must be annotated. self.levels = sorted(levels, reverse=True) m = self._get_m(levels) # pyre-fixme[4]: Attribute must be annotated. self.m = m # pyre-fixme[4]: Attribute must be annotated. self.freq = {k: int(m / k) for k in self.levels} self.baseModels = baseModels # pyre-fixme[4]: Attribute must be annotated. self.info_fcsts = {} # pyre-fixme[4]: Attribute must be annotated. self.info_residuals = {} def _get_m(self, ks: List[int]) -> int: """Calculate m. m is the minimum common multiple of all levels. Args: ks: the list of integers representing all the levels. Returns: An integer representing the minimum common multiple. """ base = 1 for c in ks: base = base * c // gcd(base, c) return base def fit(self) -> None: """Fit all base models. If base model only has residuals and forecasts, store the information. """ levels = self.levels TSs = GetAggregateTS(self.data).aggregate(levels) models = {} residuals = {} fcsts = {} for bm in self.baseModels: model_name = bm.model_name if model_name is None: # only residuals and fcsts are provided models[bm.level] = None residuals[bm.level] = bm.residuals fcsts[bm.level] = bm.fcsts else: m = BASE_MODELS[model_name]( data=TSs[bm.level], params=bm.model_params, ) m.fit() models[bm.level] = m self.models = models self.info_fcsts = fcsts self.info_residuals = residuals def get_S(self) -> np.ndarray: """Calculate S matrix. Returns: A np.array representing the S matrix. """ ans = [] levels = self.levels m = self.m for k in levels: for i in range(self.freq[k]): tem = np.zeros(m) tem[(i * k) : (i * k + k)] = 1.0 ans.append(tem) return np.row_stack(ans) def _aggregate_data(self, data: np.ndarray, k: int) -> np.ndarray: """Aggregate data according to level k.""" if k == 1: return data n = len(data) h = n // k return (data[: int(h * k)]).reshape(-1, k).sum(axis=1) # pyre-fixme[24]: Generic type `Model` expects 1 type parameter. def _get_residuals(self, model: Model) -> np.ndarray: """Calculate residuals of each base model. Args: model: a callable model object representing the trained base model. Returns: A np.ndarray of residuals. """ try: # pyre-fixme[16]: `Model` has no attribute `model`. return model.model.resid.values except Exception: fcst = model.predict(steps=1, freq="D", include_history=True) # pyre-fixme[16]: `None` has no attribute `merge`. # pyre-fixme[16]: `Optional` has no attribute `to_dataframe`. merge = fcst.merge(model.data.to_dataframe(), on="time") for col in merge.columns: if col != "time" and ("fcst" not in col): return merge[col].values - merge["fcst"].values raise ValueError("Couldn't find residual or forecast values in model") def _get_all_residuals(self) -> Dict[int, np.ndarray]: """ Calculate residuals for all base models. Returns: Dictionary for residuals, whose key is level and value is residual array. """ residuals = self.residuals # if residuals have not been calculated yet if residuals is None: levels = self.levels models = self.models residuals = {} for k in levels: # assert models is not None # pyre-fixme[16]: `Optional` has no attribute `__getitem__`. if models[k] is not None: try: vals = self._get_residuals(models[k]) except Exception as e: msg = ( f"Failed to get residuals for level {k} with error " f"message {e}." ) raise _log_error(msg) residuals[k] = vals else: residuals[k] = self.info_residuals[k] self.residuals = residuals return residuals def _get_residual_matrix(self) -> np.ndarray: """ Reshape residuals into matrix format. Returns: Residual matrix. """ res_matrix = self.res_matrix if res_matrix is None: residuals = self._get_all_residuals() ks = self.levels freq = self.freq h = np.min([len(residuals[k]) // freq[k] for k in ks]) res_matrix = [] for k in ks: n = h * freq[k] res_matrix.append(residuals[k][-n:].reshape(h, -1).T) res_matrix = np.row_stack(res_matrix) self.res_matrix = res_matrix return res_matrix def get_W(self, method: str = "struc", eps: float = 1e-5) -> np.ndarray: """ Calculate W matrix. Args: method: Reconciliation method for temporal hierarchical model. Valid methods include 'struc', 'svar', 'hvar', 'mint_sample', and 'mint_shrink'. eps: Epsilons added to W for numerical stability. Returns: W matrix. (If W is a diagnoal matrix, only returns its diagnoal elements). """ levels = self.levels freq = self.freq if method == "struc": ans = [] for k in levels: ans.extend([k] * freq[k]) return np.array(ans) elif method == "svar": residuals = self._get_all_residuals() ans = [] for k in levels: ans.extend([np.nanmean(np.square(residuals[k]))] * freq[k]) return np.array(ans) + eps elif method == "hvar": res_matrix = self._get_residual_matrix() return np.nanvar(res_matrix, axis=1) + eps elif method == "mint_shrink": cov = np.cov(self._get_residual_matrix()) # get correlation matrix sqrt = np.sqrt(np.diag(cov)) cor = ( (cov / sqrt).T ) / sqrt # due to symmetry, no need to transpose the matrix again. mask = ~np.eye(cor.shape[0], dtype=bool) cor = cor[mask] lam = np.var(cor) / np.sum(cor**2) lam = np.max([0, lam]) cov = np.diag(np.diag(cov)) * lam + (1.0 - lam) * cov cov += np.eye(len(cov)) * eps return cov elif method == "mint_sample": cov = np.cov(self._get_residual_matrix()) cov += np.eye(len(cov)) * eps return cov else: raise _log_error(f"{method} is invalid for get_W() method.") # pyre-fixme[2]: Parameter must be annotated. def
(self, steps: int, method="struc") -> Dict[int, np.ndarray]: """ Generate original forecasts from each base model (without time index). Args: steps: Number of forecasts for level 1. methd: Reconciliation method. Returns: Dictionary of forecasts of each level, whose key is level and value is forecast array. """ m = self.m levels = self.levels freq = self.freq h = int(np.ceil(steps / m)) hf = steps // m orig_fcst = {} models = self.models # generate forecasts for each level for k in levels: num = int(freq[k] * h) # assert models is not None # pyre-fixme[16]: `Optional` has no attribute `__getitem__`. if models[k] is not None: orig_fcst[k] = models[k].predict(steps=num, freq="D")["fcst"].values else: fcst_num = len(self.info_fcsts[k]) if fcst_num < num: if fcst_num >= hf * freq[k]: # since the final output only needs hf*freq[k] forecasts # for level k, we pad the forecast array to desirable # length. (note that the padding values would be ignored # in the final output.) orig_fcst[k] = np.concatenate( [ self.info_fcsts[k], [self.info_fcsts[k][-1]] * (num - fcst_num), ] ) elif method == "bu" and k != 1: # for 'bu' only level 1 is needed. orig_fcst[k] = self.info_fcsts[k] else: msg = ( f"{hf*freq[k]} steps of forecasts for level {k} are" f" needed, but only receive {fcst_num} steps (and " "forecast model is None)." ) raise _log_error(msg) else: orig_fcst[k] = self.info_fcsts[k][:num] return orig_fcst def _predict( self, steps: int, # pyre-fixme[2]: Parameter must be annotated. method="struc", origin_fcst: bool = False, fcst_levels: Optional[List[int]] = None, ) -> Dict[str, Dict[int, np.ndarray]]: """Generate forecasts for each level (without time index). Args: steps: Number of forecasts for level 1. methd: Reconciliation method. origin_fcst: Whether to return the forecasts of base models. fcst_levels: Levels that one wants to generate forecasts for. If None, then all forecasts for all levels of the base models are generated. Returns: Dictionary of forecasts, whose key is level and value is forecast array. """ if self.models is None: raise _log_error("Please fit base models via .fit() first.") m = self.m levels = self.levels freq = self.freq h = int(np.ceil(steps / m)) if fcst_levels is None: fcst_levels = list(levels) fcst = {} orig_fcst = self._predict_origin(steps, method) if method == "bu": # bottom_up method yhat = orig_fcst[1] elif method == "median": # median method tem = [] for k in levels: tem.append(np.repeat(orig_fcst[k] / k, k)) tem = np.row_stack(tem) yhat = np.median(tem, axis=0) elif method in {"struc", "svar", "hvar", "mint_shrink", "mint_sample"}: # transform fcsts into matrix yh = [] for k in levels: yh.append(orig_fcst[k].reshape(h, -1).T) yh = np.row_stack(yh) S = self.get_S() W = self.get_W(method) # when W is a vector, i.e., a simpler represent for a diagnoal matrix if len(W.shape) == 1: T = (S.T) / W else: T = np.linalg.solve(W, S).T yhat = np.dot(S, np.linalg.solve(T.dot(S), T)).dot(yh) # extract forecasts for level 1 yhat = (yhat[(-freq[1]) :, :].T).flatten()[:steps] else: raise _log_error(f"Reconciliation method {method} is invalid.") # aggregate fcsts for k in fcst_levels: fcst[k] = self._aggregate_data(yhat, k)[: (steps // k)] ans = {"fcst": fcst} if origin_fcst: for elm in orig_fcst: orig_fcst[elm] = orig_fcst[elm][: (steps // elm)] ans["origin_fcst"] = orig_fcst return ans def predict( self, steps: int, # pyre-fixme[2]: Parameter must be annotated. method="struc", freq: Optional[str] = None, origin_fcst: bool = False, fcst_levels: Optional[List[int]] = None, last_timestamp: Optional[pd.Timestamp] = None, ) -> Dict[str, Dict[int, pd.DataFrame]]: """Generate reconciled forecasts (with time index). Args: steps: The number of forecasts needed for level 1. methd: The name of the reconciliation method. Can be 'bu' (bottom-up), 'median', 'struc' (structure-variance), 'svar', 'hvar', 'mint_shrink' or 'mint_sample'. freq: The frequency of the time series at level 1. If None, then we infer the frequency via ts.infer_freq_robust(). origin_fcst: Whether or not to return the forecasts of base models. fcst_levels: The levels to generate forecasts for. Default is None, which generates forecasts for all the levels of the base models. Returns: A dictionary of forecasts, whose key is the level and the corresponding value is a np.array storing the forecasts. """ if freq is None: freq = self.data.infer_freq_robust() last_timestamp = self.data.time.max() fcsts = self._predict( steps, method=method, origin_fcst=origin_fcst, fcst_levels=fcst_levels ) ans = {} for elm in fcsts: tmp = {} for k in fcsts[elm]: fcst_num = len(fcsts[elm][k]) time = pd.date_range( last_timestamp + freq * k, last_timestamp + freq * k * fcst_num, periods=fcst_num, ) tmp[k] = pd.DataFrame({"time": time, "fcst": fcsts[elm][k]}, copy=False) ans[elm] = tmp return ans def median_validation( self, # pyre-fixme[2]: Parameter must be annotated. steps, dist_metric: str = "mae", threshold: float = 5.0, ) -> List[int]: """Filtering out bad fcsts based on median forecasts. This function detects the levels whose forecasts are greatly deviate from median forecasts, which is a strong indication of bad forecasts. Args: steps: The number of forecasts needed for level 1 for validation. dist_metric: The distance metric used to measure the distance between the base forecasts and the median forecasts. threshold: The threshold for deviance. The forecast whose distance from the median forecast is greater than threshold*std is taken as bad forecasts. Default is 3. Returns: A list of integers representing the levels whose forecasts are bad. """ diffs = {} ks = self.levels try: func = metrics.core_metric(dist_metric) except ValueError: raise _log_error(f"Invalid dist_metric {dist_metric}") median_fcst = self._predict(steps, method="median", origin_fcst=True) for k in ks: diffs[k] = func(median_fcst["fcst"][k], median_fcst["origin_fcst"][k]) if dist_metric == "mae": diffs[k] /= k vals = np.array(list(diffs.values())) try: cov = MinCovDet().fit(vals[:, None]) lqr = np.sqrt(cov.covariance_.flatten()[0]) except Exception: low, up = np.percentile(vals, [25, 75]) lqr = up - low up = np.median(vals) + lqr * threshold return [k for k in diffs if diffs[k] >= up]
_predict_origin
identifier_name
thm.py
# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. """This module contains the class TemporalHierarchicalModel class. """ import logging from math import gcd from typing import Dict, List, Optional, Type import numpy as np import pandas as pd from kats.consts import TimeSeriesData from kats.metrics import metrics from kats.models import ( arima, holtwinters, linear_model, prophet, quadratic_model, sarima, theta, ) from kats.models.model import Model from kats.models.reconciliation.base_models import BaseTHModel, GetAggregateTS from sklearn.covariance import MinCovDet # pyre-fixme[24]: Generic type `Model` expects 1 type parameter. BASE_MODELS: Dict[str, Type[Model]] = { "arima": arima.ARIMAModel, "holtwinters": holtwinters.HoltWintersModel, "sarima": sarima.SARIMAModel, "prophet": prophet.ProphetModel, "linear": linear_model.LinearModel, "quadratic": quadratic_model.QuadraticModel, "theta": theta.ThetaModel, } def _log_error(msg: str) -> ValueError: logging.error(msg) return ValueError(msg) class TemporalHierarchicalModel: """Temporal hierarchical model class. This framework combines the base models of different temporal aggregation levels to generate reconciled forecasts. This class provides fit, get_S, get_W, predict and median_validation. Attributes: data: A TimeSeriesData object storing the time series data for level 1 (i.e., the most disaggregate level). baseModels: A list BaseTHModel objects representing the base models for different levels. """ # pyre-fixme[24]: Generic type `Model` expects 1 type parameter. models: Optional[Dict[str, Model]] = None residuals: Optional[Dict[int, np.ndarray]] = None res_matrix: Optional[np.ndarray] = None def __init__(self, data: TimeSeriesData, baseModels: List[BaseTHModel]) -> None: if not data.is_univariate(): msg = f"Only univariate time series supported, but got {type(data.value)}." raise _log_error(msg) self.data = data for basemodel in baseModels: if not isinstance(basemodel, BaseTHModel): msg = ( "Base model should be a BaseTHModel object but is " f"{type(basemodel)}." ) raise _log_error(msg) levels = [bm.level for bm in baseModels] if 1 not in levels: raise _log_error("Model of level 1 is missing.") if len(levels) != len(set(levels)): raise _log_error("One level cannot receive multiple models.") # pyre-fixme[4]: Attribute must be annotated. self.levels = sorted(levels, reverse=True) m = self._get_m(levels) # pyre-fixme[4]: Attribute must be annotated. self.m = m # pyre-fixme[4]: Attribute must be annotated. self.freq = {k: int(m / k) for k in self.levels} self.baseModels = baseModels # pyre-fixme[4]: Attribute must be annotated. self.info_fcsts = {} # pyre-fixme[4]: Attribute must be annotated. self.info_residuals = {} def _get_m(self, ks: List[int]) -> int: """Calculate m. m is the minimum common multiple of all levels. Args: ks: the list of integers representing all the levels. Returns: An integer representing the minimum common multiple. """ base = 1 for c in ks: base = base * c // gcd(base, c) return base def fit(self) -> None: """Fit all base models. If base model only has residuals and forecasts, store the information. """ levels = self.levels TSs = GetAggregateTS(self.data).aggregate(levels) models = {} residuals = {} fcsts = {} for bm in self.baseModels: model_name = bm.model_name if model_name is None: # only residuals and fcsts are provided models[bm.level] = None residuals[bm.level] = bm.residuals fcsts[bm.level] = bm.fcsts else: m = BASE_MODELS[model_name]( data=TSs[bm.level], params=bm.model_params, ) m.fit() models[bm.level] = m self.models = models self.info_fcsts = fcsts self.info_residuals = residuals def get_S(self) -> np.ndarray: """Calculate S matrix. Returns: A np.array representing the S matrix. """ ans = [] levels = self.levels m = self.m for k in levels: for i in range(self.freq[k]): tem = np.zeros(m) tem[(i * k) : (i * k + k)] = 1.0 ans.append(tem) return np.row_stack(ans) def _aggregate_data(self, data: np.ndarray, k: int) -> np.ndarray: """Aggregate data according to level k.""" if k == 1: return data n = len(data) h = n // k return (data[: int(h * k)]).reshape(-1, k).sum(axis=1) # pyre-fixme[24]: Generic type `Model` expects 1 type parameter. def _get_residuals(self, model: Model) -> np.ndarray: """Calculate residuals of each base model. Args: model: a callable model object representing the trained base model. Returns: A np.ndarray of residuals. """ try: # pyre-fixme[16]: `Model` has no attribute `model`. return model.model.resid.values except Exception: fcst = model.predict(steps=1, freq="D", include_history=True) # pyre-fixme[16]: `None` has no attribute `merge`. # pyre-fixme[16]: `Optional` has no attribute `to_dataframe`. merge = fcst.merge(model.data.to_dataframe(), on="time") for col in merge.columns: if col != "time" and ("fcst" not in col): return merge[col].values - merge["fcst"].values raise ValueError("Couldn't find residual or forecast values in model") def _get_all_residuals(self) -> Dict[int, np.ndarray]: """ Calculate residuals for all base models. Returns: Dictionary for residuals, whose key is level and value is residual array. """ residuals = self.residuals # if residuals have not been calculated yet if residuals is None: levels = self.levels models = self.models residuals = {} for k in levels: # assert models is not None # pyre-fixme[16]: `Optional` has no attribute `__getitem__`. if models[k] is not None: try: vals = self._get_residuals(models[k]) except Exception as e: msg = ( f"Failed to get residuals for level {k} with error " f"message {e}." ) raise _log_error(msg) residuals[k] = vals else: residuals[k] = self.info_residuals[k] self.residuals = residuals return residuals def _get_residual_matrix(self) -> np.ndarray: """ Reshape residuals into matrix format. Returns: Residual matrix. """ res_matrix = self.res_matrix if res_matrix is None: residuals = self._get_all_residuals() ks = self.levels freq = self.freq h = np.min([len(residuals[k]) // freq[k] for k in ks]) res_matrix = [] for k in ks: n = h * freq[k] res_matrix.append(residuals[k][-n:].reshape(h, -1).T) res_matrix = np.row_stack(res_matrix) self.res_matrix = res_matrix return res_matrix def get_W(self, method: str = "struc", eps: float = 1e-5) -> np.ndarray: """ Calculate W matrix. Args: method: Reconciliation method for temporal hierarchical model. Valid methods include 'struc', 'svar', 'hvar', 'mint_sample', and 'mint_shrink'. eps: Epsilons added to W for numerical stability. Returns: W matrix. (If W is a diagnoal matrix, only returns its diagnoal elements). """ levels = self.levels freq = self.freq if method == "struc": ans = [] for k in levels: ans.extend([k] * freq[k]) return np.array(ans) elif method == "svar": residuals = self._get_all_residuals() ans = [] for k in levels: ans.extend([np.nanmean(np.square(residuals[k]))] * freq[k]) return np.array(ans) + eps elif method == "hvar":
elif method == "mint_shrink": cov = np.cov(self._get_residual_matrix()) # get correlation matrix sqrt = np.sqrt(np.diag(cov)) cor = ( (cov / sqrt).T ) / sqrt # due to symmetry, no need to transpose the matrix again. mask = ~np.eye(cor.shape[0], dtype=bool) cor = cor[mask] lam = np.var(cor) / np.sum(cor**2) lam = np.max([0, lam]) cov = np.diag(np.diag(cov)) * lam + (1.0 - lam) * cov cov += np.eye(len(cov)) * eps return cov elif method == "mint_sample": cov = np.cov(self._get_residual_matrix()) cov += np.eye(len(cov)) * eps return cov else: raise _log_error(f"{method} is invalid for get_W() method.") # pyre-fixme[2]: Parameter must be annotated. def _predict_origin(self, steps: int, method="struc") -> Dict[int, np.ndarray]: """ Generate original forecasts from each base model (without time index). Args: steps: Number of forecasts for level 1. methd: Reconciliation method. Returns: Dictionary of forecasts of each level, whose key is level and value is forecast array. """ m = self.m levels = self.levels freq = self.freq h = int(np.ceil(steps / m)) hf = steps // m orig_fcst = {} models = self.models # generate forecasts for each level for k in levels: num = int(freq[k] * h) # assert models is not None # pyre-fixme[16]: `Optional` has no attribute `__getitem__`. if models[k] is not None: orig_fcst[k] = models[k].predict(steps=num, freq="D")["fcst"].values else: fcst_num = len(self.info_fcsts[k]) if fcst_num < num: if fcst_num >= hf * freq[k]: # since the final output only needs hf*freq[k] forecasts # for level k, we pad the forecast array to desirable # length. (note that the padding values would be ignored # in the final output.) orig_fcst[k] = np.concatenate( [ self.info_fcsts[k], [self.info_fcsts[k][-1]] * (num - fcst_num), ] ) elif method == "bu" and k != 1: # for 'bu' only level 1 is needed. orig_fcst[k] = self.info_fcsts[k] else: msg = ( f"{hf*freq[k]} steps of forecasts for level {k} are" f" needed, but only receive {fcst_num} steps (and " "forecast model is None)." ) raise _log_error(msg) else: orig_fcst[k] = self.info_fcsts[k][:num] return orig_fcst def _predict( self, steps: int, # pyre-fixme[2]: Parameter must be annotated. method="struc", origin_fcst: bool = False, fcst_levels: Optional[List[int]] = None, ) -> Dict[str, Dict[int, np.ndarray]]: """Generate forecasts for each level (without time index). Args: steps: Number of forecasts for level 1. methd: Reconciliation method. origin_fcst: Whether to return the forecasts of base models. fcst_levels: Levels that one wants to generate forecasts for. If None, then all forecasts for all levels of the base models are generated. Returns: Dictionary of forecasts, whose key is level and value is forecast array. """ if self.models is None: raise _log_error("Please fit base models via .fit() first.") m = self.m levels = self.levels freq = self.freq h = int(np.ceil(steps / m)) if fcst_levels is None: fcst_levels = list(levels) fcst = {} orig_fcst = self._predict_origin(steps, method) if method == "bu": # bottom_up method yhat = orig_fcst[1] elif method == "median": # median method tem = [] for k in levels: tem.append(np.repeat(orig_fcst[k] / k, k)) tem = np.row_stack(tem) yhat = np.median(tem, axis=0) elif method in {"struc", "svar", "hvar", "mint_shrink", "mint_sample"}: # transform fcsts into matrix yh = [] for k in levels: yh.append(orig_fcst[k].reshape(h, -1).T) yh = np.row_stack(yh) S = self.get_S() W = self.get_W(method) # when W is a vector, i.e., a simpler represent for a diagnoal matrix if len(W.shape) == 1: T = (S.T) / W else: T = np.linalg.solve(W, S).T yhat = np.dot(S, np.linalg.solve(T.dot(S), T)).dot(yh) # extract forecasts for level 1 yhat = (yhat[(-freq[1]) :, :].T).flatten()[:steps] else: raise _log_error(f"Reconciliation method {method} is invalid.") # aggregate fcsts for k in fcst_levels: fcst[k] = self._aggregate_data(yhat, k)[: (steps // k)] ans = {"fcst": fcst} if origin_fcst: for elm in orig_fcst: orig_fcst[elm] = orig_fcst[elm][: (steps // elm)] ans["origin_fcst"] = orig_fcst return ans def predict( self, steps: int, # pyre-fixme[2]: Parameter must be annotated. method="struc", freq: Optional[str] = None, origin_fcst: bool = False, fcst_levels: Optional[List[int]] = None, last_timestamp: Optional[pd.Timestamp] = None, ) -> Dict[str, Dict[int, pd.DataFrame]]: """Generate reconciled forecasts (with time index). Args: steps: The number of forecasts needed for level 1. methd: The name of the reconciliation method. Can be 'bu' (bottom-up), 'median', 'struc' (structure-variance), 'svar', 'hvar', 'mint_shrink' or 'mint_sample'. freq: The frequency of the time series at level 1. If None, then we infer the frequency via ts.infer_freq_robust(). origin_fcst: Whether or not to return the forecasts of base models. fcst_levels: The levels to generate forecasts for. Default is None, which generates forecasts for all the levels of the base models. Returns: A dictionary of forecasts, whose key is the level and the corresponding value is a np.array storing the forecasts. """ if freq is None: freq = self.data.infer_freq_robust() last_timestamp = self.data.time.max() fcsts = self._predict( steps, method=method, origin_fcst=origin_fcst, fcst_levels=fcst_levels ) ans = {} for elm in fcsts: tmp = {} for k in fcsts[elm]: fcst_num = len(fcsts[elm][k]) time = pd.date_range( last_timestamp + freq * k, last_timestamp + freq * k * fcst_num, periods=fcst_num, ) tmp[k] = pd.DataFrame({"time": time, "fcst": fcsts[elm][k]}, copy=False) ans[elm] = tmp return ans def median_validation( self, # pyre-fixme[2]: Parameter must be annotated. steps, dist_metric: str = "mae", threshold: float = 5.0, ) -> List[int]: """Filtering out bad fcsts based on median forecasts. This function detects the levels whose forecasts are greatly deviate from median forecasts, which is a strong indication of bad forecasts. Args: steps: The number of forecasts needed for level 1 for validation. dist_metric: The distance metric used to measure the distance between the base forecasts and the median forecasts. threshold: The threshold for deviance. The forecast whose distance from the median forecast is greater than threshold*std is taken as bad forecasts. Default is 3. Returns: A list of integers representing the levels whose forecasts are bad. """ diffs = {} ks = self.levels try: func = metrics.core_metric(dist_metric) except ValueError: raise _log_error(f"Invalid dist_metric {dist_metric}") median_fcst = self._predict(steps, method="median", origin_fcst=True) for k in ks: diffs[k] = func(median_fcst["fcst"][k], median_fcst["origin_fcst"][k]) if dist_metric == "mae": diffs[k] /= k vals = np.array(list(diffs.values())) try: cov = MinCovDet().fit(vals[:, None]) lqr = np.sqrt(cov.covariance_.flatten()[0]) except Exception: low, up = np.percentile(vals, [25, 75]) lqr = up - low up = np.median(vals) + lqr * threshold return [k for k in diffs if diffs[k] >= up]
res_matrix = self._get_residual_matrix() return np.nanvar(res_matrix, axis=1) + eps
conditional_block
Thyroid annotator.py
### Hacked together by Johnson Thomas ### Annotation UI created in Streamlit ### Can be used for binary or multilabel annotation ### Importing libraries from streamlit.hashing import _CodeHasher from streamlit.report_thread import get_report_ctx from streamlit.server.server import Server import streamlit as st from PIL import Image import os import pandas as pd import re ### Creating a 3 column layout in streamlit col1, col2, col3= st.beta_columns([3, 1,1]) ### Folder where the image files are kept. This path is in windows format. ### If you are runnin it in Linux, chnage the path appropriately. #source_dir = r'C:\Users\JOHNY\CV_recepies\cv\images' source_dir = None csv_to_log = r'C:\Users\JOHNY\CV_recepies\annot1.csv' proj_file = r'C:\Users\JOHNY\CV_recepies\St_annot.csv' ### Function to create a python list cotaning paths to image files in a specific folder ### This function is decorated with @st.cache to avoid rerunning extensions = ['.jpg', '.JPG', '.jpeg', '.JPEG', '.png', '.PNG'] @st.cache(allow_output_mutation=True) def get_file_list(root_dir): file_list = [] counter = 1 for root, directories, filenames in os.walk(root_dir): for filename in filenames: if any(ext in filename for ext in extensions): file_list.append(os.path.join(root, filename)) counter += 1 return sorted(file_list) ### Creating the side bar add_proj_text = st.sidebar.write('Start new project') add_textbox = st.sidebar.text_input('Project name') add_foldbox = st.sidebar.text_input('Folder name' ) add_newproj_btn = st.sidebar.button('Create new project') st.sidebar.write(' ') add_proj_load = st.sidebar.write('Load project') #proj_list =new_installation(proj_file) add_csvbox = st.sidebar.selectbox('Pick your project',"exp1") add_loadproj_btn = st.sidebar.button('Load project') ### store file names to a list and find the number of files in the list #file_to_anot = get_file_list(source_dir) #file_to_anot = get_file_list(source_dir) #max_ind= len(file_to_anot) -1 ### Creating a list to store the annotations ### @st.cache(allow_output_mutation=True) - is used to preserve the current state and to allow modification of the list @st.cache(allow_output_mutation=True) def init_anot(file_to_anot): anot = [None]*(len(file_to_anot)) comp_list = [None]*(len(file_to_anot)) echo_list = [None]*(len(file_to_anot)) shape_list =[None]*(len(file_to_anot)) marg_list = [None]*(len(file_to_anot)) foci_list = [None]*(len(file_to_anot)) return anot,comp_list,echo_list,shape_list,marg_list,foci_list ### Creating a list to store just the file names @st.cache(allow_output_mutation=True) def init_base_f(file_to_anot): base_file = [None]*(len(file_to_anot)) return base_file #anotf,comp_list,echo_list,shape_list,marg_list,foci_list = init_anot(file_to_anot) #base_f = init_base_f(file_to_anot) ### Given an index this function converts path in the index to windows readable path ### then load the imaeg and returns the loaded image def get_image(ind_no,file_to_anot): file_name = file_to_anot[ind_no] im_file =re.sub("\\\\","\\\\\\\\", file_name) loaded_image = Image.open(im_file) return loaded_image ### Get just the image file name from the complete path string def extract_basename(path): basename = re.search(r'[^\\/]+(?=[\\/]?$)', path) if basename: return basename.group(0) def get_index(dta_ar, out_string): for i in range(len(dta_ar)): if dta_ar[i] == out_string: in_dex = i return in_dex def main(): state = _get_state() def set_index_in(in_num): state.comp_list[in_num] = get_index(comp_options, composition) state.echo_list[in_num] = get_index(echo_options, echo) state.shape_list[in_num]= get_index(shape_options, shape) state.marg_list[in_num] = get_index(margin_options, margin) state.foci_list[in_num]= get_index(foci_options, echogenic_foci) def update_choices(ind_num): ''' This function collects the values of lables/tags for the next or previous image, then displays it in the user interface. This function is called each time Next or Previous button is pressed. ''' if state.comp_list[ind_num] != None: state.comp = state.comp_list[ind_num] else: state.comp = 0 if state.echo_list[ind_num] != None: state.echo = state.echo_list[ind_num] else: state.echo = 0 if state.shape_list[ind_num] !=None: state.shape = state.shape_list[ind_num] else: state.shape = 0 if state.marg_list[ind_num] != None: state.margin = state.marg_list[ind_num] else: state.margin = 0 if state.foci_list[ind_num] != None: state.foci = state.foci_list[ind_num] else: state.foci = 0 #print("This is from update", state.comp, state.echo, state.shape, state.margin, state.foci) # Initializing a state variable input if state.input == None: state.input = 0 state.last_anot =0 state.comp = 0 state.echo = 0 state.shape = 0 state.margin = 0 state.foci = 0 # Creating the UI comp_options = ['cystic','spongiform', 'mixed cystic','solid'] echo_options = ['anechoic','hyperechoic','isoechoic','hypoechoic','very hypoechoic'] shape_options =['wider than tall','taller than wide'] margin_options = ['smooth','ill defined','lobulated', 'irregular', 'ete'] foci_options = ['none','comet tail artifacts','macrocalcifications','peripheral calcifications','punctate echogenic foci'] with col2: prev_button = st.button('Previous') if state.active_project == True: composition = st.radio('Composition',comp_options, state.comp) echo = st.radio('Echogenicity',echo_options, state.echo) shape = st.radio('Shape',shape_options, state.shape) state.started = True with col3: next_button = st.button('Next') if state.active_project == True: margin = st.radio('Margin',margin_options, state.margin) echogenic_foci = st.radio('Echogenic Foci', foci_options, state.foci) with col1: #if state.input ==0: if next_button and state.active_project == True: if state.input == state.max_ind:
else: set_index_in(state.input) #update_choices(state.input,comp_list) state.input = state.input + 1 update_choices(state.input) if state.input > state.last_anot: state.last_anot = state.input if prev_button and state.active_project == True: if state.input == 0: e =RuntimeError('Reached the first image in the folder') st.exception(e) else: set_index_in(state.input) #update_choices(state.input,state.comp_list) state.input = state.input -1 update_choices(state.input) if add_newproj_btn and add_foldbox != "": state.file_to_anot = get_file_list(add_foldbox) state.max_ind= len(state.file_to_anot) -1 ### initializing variables state.active_project = True state.input = 0 state.last_anot =0 state.comp = 0 state.echo = 0 state.shape = 0 state.margin = 0 state.foci = 0 state.started = False state.anot_list,state.comp_list,state.echo_list,state.shape_list,state.marg_list,state.foci_list = init_anot(state.file_to_anot) state.base_f = init_base_f(state.file_to_anot) if add_foldbox != "" and state.started == True: st.image(get_image(state.input,state.file_to_anot),use_column_width=True) desc_nod, lbl, fln= gen_desc_save(composition, echo, shape, margin, echogenic_foci,state.input,state.file_to_anot ) #print("anot list",state.anot_list) state.anot_list[state.input] = lbl state.base_f[state.input] = fln col1.write( desc_nod) ### Save button ######################################################## save_button = st.button('Save') if save_button: set_index_in(state.input) df = pd.DataFrame(list(zip(state.base_f, state.anot_list)), columns =["IM_FILENAME", "LABELS"]) cwd = os.getcwd() csv_to_log = r'C:\Users\JOHNY\CV_recepies\annot1.csv' #print("printing curr file name") #print(csv_to_log) df.to_csv(csv_to_log) #proj = pd.read_csv(proj_file) #ind_pr= proj.index[proj['Project_name'] == curr_proj_name].tolist() print(ind_pr) state.sync() def gen_desc_save(composition, echo, shape, margin, echogenic_foci, ind_no,file_to_anot): comp = composition.capitalize() if echogenic_foci =="none": echo_foc = "no calcification or comet tail artiacts" else: echo_foc = echogenic_foci desc = comp + " " + echo + " " + shape + " thyroid nodule with " + margin + " margin" + " and " + echo_foc + "." file_name2 = file_to_anot[ind_no] file_only = extract_basename(file_name2) label_to_log = composition + "," + echo + "," + shape + "," + margin + "," + echogenic_foci #anotf[ind_no] = label_to_log return desc,label_to_log, file_only class _SessionState: def __init__(self, session, hash_funcs): """Initialize SessionState instance.""" self.__dict__["_state"] = { "data": {}, "hash": None, "hasher": _CodeHasher(hash_funcs), "is_rerun": False, "session": session, } def __call__(self, **kwargs): """Initialize state data once.""" for item, value in kwargs.items(): if item not in self._state["data"]: self._state["data"][item] = value def __getitem__(self, item): """Return a saved state value, None if item is undefined.""" return self._state["data"].get(item, None) def __getattr__(self, item): """Return a saved state value, None if item is undefined.""" return self._state["data"].get(item, None) def __setitem__(self, item, value): """Set state value.""" self._state["data"][item] = value def __setattr__(self, item, value): """Set state value.""" self._state["data"][item] = value def clear(self): """Clear session state and request a rerun.""" self._state["data"].clear() self._state["session"].request_rerun() def sync(self): """Rerun the app with all state values up to date from the beginning to fix rollbacks.""" # Ensure to rerun only once to avoid infinite loops # caused by a constantly changing state value at each run. # # Example: state.value += 1 if self._state["is_rerun"]: self._state["is_rerun"] = False elif self._state["hash"] is not None: if self._state["hash"] != self._state["hasher"].to_bytes(self._state["data"], None): self._state["is_rerun"] = True self._state["session"].request_rerun() self._state["hash"] = self._state["hasher"].to_bytes(self._state["data"], None) def _get_session(): session_id = get_report_ctx().session_id session_info = Server.get_current()._get_session_info(session_id) if session_info is None: raise RuntimeError("Couldn't get your Streamlit Session object.") return session_info.session def _get_state(hash_funcs=None): session = _get_session() if not hasattr(session, "_custom_session_state"): session._custom_session_state = _SessionState(session, hash_funcs) return session._custom_session_state if __name__ == "__main__": main() #main(file_to_anot, anotf, base_f,comp_list,echo_list,shape_list,marg_list,foci_list)
e =RuntimeError('Reached end of images in the folder') st.exception(e)
conditional_block
Thyroid annotator.py
### Hacked together by Johnson Thomas ### Annotation UI created in Streamlit ### Can be used for binary or multilabel annotation ### Importing libraries from streamlit.hashing import _CodeHasher from streamlit.report_thread import get_report_ctx from streamlit.server.server import Server import streamlit as st from PIL import Image import os import pandas as pd import re ### Creating a 3 column layout in streamlit col1, col2, col3= st.beta_columns([3, 1,1]) ### Folder where the image files are kept. This path is in windows format. ### If you are runnin it in Linux, chnage the path appropriately. #source_dir = r'C:\Users\JOHNY\CV_recepies\cv\images' source_dir = None csv_to_log = r'C:\Users\JOHNY\CV_recepies\annot1.csv' proj_file = r'C:\Users\JOHNY\CV_recepies\St_annot.csv' ### Function to create a python list cotaning paths to image files in a specific folder ### This function is decorated with @st.cache to avoid rerunning extensions = ['.jpg', '.JPG', '.jpeg', '.JPEG', '.png', '.PNG'] @st.cache(allow_output_mutation=True) def get_file_list(root_dir): file_list = [] counter = 1 for root, directories, filenames in os.walk(root_dir): for filename in filenames: if any(ext in filename for ext in extensions): file_list.append(os.path.join(root, filename)) counter += 1 return sorted(file_list) ### Creating the side bar add_proj_text = st.sidebar.write('Start new project') add_textbox = st.sidebar.text_input('Project name') add_foldbox = st.sidebar.text_input('Folder name' ) add_newproj_btn = st.sidebar.button('Create new project') st.sidebar.write(' ') add_proj_load = st.sidebar.write('Load project') #proj_list =new_installation(proj_file) add_csvbox = st.sidebar.selectbox('Pick your project',"exp1") add_loadproj_btn = st.sidebar.button('Load project') ### store file names to a list and find the number of files in the list #file_to_anot = get_file_list(source_dir) #file_to_anot = get_file_list(source_dir) #max_ind= len(file_to_anot) -1 ### Creating a list to store the annotations ### @st.cache(allow_output_mutation=True) - is used to preserve the current state and to allow modification of the list @st.cache(allow_output_mutation=True) def init_anot(file_to_anot): anot = [None]*(len(file_to_anot)) comp_list = [None]*(len(file_to_anot)) echo_list = [None]*(len(file_to_anot)) shape_list =[None]*(len(file_to_anot)) marg_list = [None]*(len(file_to_anot)) foci_list = [None]*(len(file_to_anot)) return anot,comp_list,echo_list,shape_list,marg_list,foci_list ### Creating a list to store just the file names @st.cache(allow_output_mutation=True) def init_base_f(file_to_anot): base_file = [None]*(len(file_to_anot)) return base_file #anotf,comp_list,echo_list,shape_list,marg_list,foci_list = init_anot(file_to_anot) #base_f = init_base_f(file_to_anot) ### Given an index this function converts path in the index to windows readable path ### then load the imaeg and returns the loaded image def
(ind_no,file_to_anot): file_name = file_to_anot[ind_no] im_file =re.sub("\\\\","\\\\\\\\", file_name) loaded_image = Image.open(im_file) return loaded_image ### Get just the image file name from the complete path string def extract_basename(path): basename = re.search(r'[^\\/]+(?=[\\/]?$)', path) if basename: return basename.group(0) def get_index(dta_ar, out_string): for i in range(len(dta_ar)): if dta_ar[i] == out_string: in_dex = i return in_dex def main(): state = _get_state() def set_index_in(in_num): state.comp_list[in_num] = get_index(comp_options, composition) state.echo_list[in_num] = get_index(echo_options, echo) state.shape_list[in_num]= get_index(shape_options, shape) state.marg_list[in_num] = get_index(margin_options, margin) state.foci_list[in_num]= get_index(foci_options, echogenic_foci) def update_choices(ind_num): ''' This function collects the values of lables/tags for the next or previous image, then displays it in the user interface. This function is called each time Next or Previous button is pressed. ''' if state.comp_list[ind_num] != None: state.comp = state.comp_list[ind_num] else: state.comp = 0 if state.echo_list[ind_num] != None: state.echo = state.echo_list[ind_num] else: state.echo = 0 if state.shape_list[ind_num] !=None: state.shape = state.shape_list[ind_num] else: state.shape = 0 if state.marg_list[ind_num] != None: state.margin = state.marg_list[ind_num] else: state.margin = 0 if state.foci_list[ind_num] != None: state.foci = state.foci_list[ind_num] else: state.foci = 0 #print("This is from update", state.comp, state.echo, state.shape, state.margin, state.foci) # Initializing a state variable input if state.input == None: state.input = 0 state.last_anot =0 state.comp = 0 state.echo = 0 state.shape = 0 state.margin = 0 state.foci = 0 # Creating the UI comp_options = ['cystic','spongiform', 'mixed cystic','solid'] echo_options = ['anechoic','hyperechoic','isoechoic','hypoechoic','very hypoechoic'] shape_options =['wider than tall','taller than wide'] margin_options = ['smooth','ill defined','lobulated', 'irregular', 'ete'] foci_options = ['none','comet tail artifacts','macrocalcifications','peripheral calcifications','punctate echogenic foci'] with col2: prev_button = st.button('Previous') if state.active_project == True: composition = st.radio('Composition',comp_options, state.comp) echo = st.radio('Echogenicity',echo_options, state.echo) shape = st.radio('Shape',shape_options, state.shape) state.started = True with col3: next_button = st.button('Next') if state.active_project == True: margin = st.radio('Margin',margin_options, state.margin) echogenic_foci = st.radio('Echogenic Foci', foci_options, state.foci) with col1: #if state.input ==0: if next_button and state.active_project == True: if state.input == state.max_ind: e =RuntimeError('Reached end of images in the folder') st.exception(e) else: set_index_in(state.input) #update_choices(state.input,comp_list) state.input = state.input + 1 update_choices(state.input) if state.input > state.last_anot: state.last_anot = state.input if prev_button and state.active_project == True: if state.input == 0: e =RuntimeError('Reached the first image in the folder') st.exception(e) else: set_index_in(state.input) #update_choices(state.input,state.comp_list) state.input = state.input -1 update_choices(state.input) if add_newproj_btn and add_foldbox != "": state.file_to_anot = get_file_list(add_foldbox) state.max_ind= len(state.file_to_anot) -1 ### initializing variables state.active_project = True state.input = 0 state.last_anot =0 state.comp = 0 state.echo = 0 state.shape = 0 state.margin = 0 state.foci = 0 state.started = False state.anot_list,state.comp_list,state.echo_list,state.shape_list,state.marg_list,state.foci_list = init_anot(state.file_to_anot) state.base_f = init_base_f(state.file_to_anot) if add_foldbox != "" and state.started == True: st.image(get_image(state.input,state.file_to_anot),use_column_width=True) desc_nod, lbl, fln= gen_desc_save(composition, echo, shape, margin, echogenic_foci,state.input,state.file_to_anot ) #print("anot list",state.anot_list) state.anot_list[state.input] = lbl state.base_f[state.input] = fln col1.write( desc_nod) ### Save button ######################################################## save_button = st.button('Save') if save_button: set_index_in(state.input) df = pd.DataFrame(list(zip(state.base_f, state.anot_list)), columns =["IM_FILENAME", "LABELS"]) cwd = os.getcwd() csv_to_log = r'C:\Users\JOHNY\CV_recepies\annot1.csv' #print("printing curr file name") #print(csv_to_log) df.to_csv(csv_to_log) #proj = pd.read_csv(proj_file) #ind_pr= proj.index[proj['Project_name'] == curr_proj_name].tolist() print(ind_pr) state.sync() def gen_desc_save(composition, echo, shape, margin, echogenic_foci, ind_no,file_to_anot): comp = composition.capitalize() if echogenic_foci =="none": echo_foc = "no calcification or comet tail artiacts" else: echo_foc = echogenic_foci desc = comp + " " + echo + " " + shape + " thyroid nodule with " + margin + " margin" + " and " + echo_foc + "." file_name2 = file_to_anot[ind_no] file_only = extract_basename(file_name2) label_to_log = composition + "," + echo + "," + shape + "," + margin + "," + echogenic_foci #anotf[ind_no] = label_to_log return desc,label_to_log, file_only class _SessionState: def __init__(self, session, hash_funcs): """Initialize SessionState instance.""" self.__dict__["_state"] = { "data": {}, "hash": None, "hasher": _CodeHasher(hash_funcs), "is_rerun": False, "session": session, } def __call__(self, **kwargs): """Initialize state data once.""" for item, value in kwargs.items(): if item not in self._state["data"]: self._state["data"][item] = value def __getitem__(self, item): """Return a saved state value, None if item is undefined.""" return self._state["data"].get(item, None) def __getattr__(self, item): """Return a saved state value, None if item is undefined.""" return self._state["data"].get(item, None) def __setitem__(self, item, value): """Set state value.""" self._state["data"][item] = value def __setattr__(self, item, value): """Set state value.""" self._state["data"][item] = value def clear(self): """Clear session state and request a rerun.""" self._state["data"].clear() self._state["session"].request_rerun() def sync(self): """Rerun the app with all state values up to date from the beginning to fix rollbacks.""" # Ensure to rerun only once to avoid infinite loops # caused by a constantly changing state value at each run. # # Example: state.value += 1 if self._state["is_rerun"]: self._state["is_rerun"] = False elif self._state["hash"] is not None: if self._state["hash"] != self._state["hasher"].to_bytes(self._state["data"], None): self._state["is_rerun"] = True self._state["session"].request_rerun() self._state["hash"] = self._state["hasher"].to_bytes(self._state["data"], None) def _get_session(): session_id = get_report_ctx().session_id session_info = Server.get_current()._get_session_info(session_id) if session_info is None: raise RuntimeError("Couldn't get your Streamlit Session object.") return session_info.session def _get_state(hash_funcs=None): session = _get_session() if not hasattr(session, "_custom_session_state"): session._custom_session_state = _SessionState(session, hash_funcs) return session._custom_session_state if __name__ == "__main__": main() #main(file_to_anot, anotf, base_f,comp_list,echo_list,shape_list,marg_list,foci_list)
get_image
identifier_name
Thyroid annotator.py
### Hacked together by Johnson Thomas ### Annotation UI created in Streamlit ### Can be used for binary or multilabel annotation ### Importing libraries from streamlit.hashing import _CodeHasher from streamlit.report_thread import get_report_ctx from streamlit.server.server import Server import streamlit as st from PIL import Image import os import pandas as pd import re ### Creating a 3 column layout in streamlit col1, col2, col3= st.beta_columns([3, 1,1]) ### Folder where the image files are kept. This path is in windows format. ### If you are runnin it in Linux, chnage the path appropriately. #source_dir = r'C:\Users\JOHNY\CV_recepies\cv\images' source_dir = None csv_to_log = r'C:\Users\JOHNY\CV_recepies\annot1.csv' proj_file = r'C:\Users\JOHNY\CV_recepies\St_annot.csv' ### Function to create a python list cotaning paths to image files in a specific folder ### This function is decorated with @st.cache to avoid rerunning extensions = ['.jpg', '.JPG', '.jpeg', '.JPEG', '.png', '.PNG'] @st.cache(allow_output_mutation=True) def get_file_list(root_dir): file_list = [] counter = 1 for root, directories, filenames in os.walk(root_dir): for filename in filenames: if any(ext in filename for ext in extensions): file_list.append(os.path.join(root, filename)) counter += 1 return sorted(file_list) ### Creating the side bar add_proj_text = st.sidebar.write('Start new project') add_textbox = st.sidebar.text_input('Project name') add_foldbox = st.sidebar.text_input('Folder name' ) add_newproj_btn = st.sidebar.button('Create new project') st.sidebar.write(' ') add_proj_load = st.sidebar.write('Load project') #proj_list =new_installation(proj_file) add_csvbox = st.sidebar.selectbox('Pick your project',"exp1") add_loadproj_btn = st.sidebar.button('Load project') ### store file names to a list and find the number of files in the list #file_to_anot = get_file_list(source_dir) #file_to_anot = get_file_list(source_dir) #max_ind= len(file_to_anot) -1 ### Creating a list to store the annotations ### @st.cache(allow_output_mutation=True) - is used to preserve the current state and to allow modification of the list @st.cache(allow_output_mutation=True) def init_anot(file_to_anot): anot = [None]*(len(file_to_anot)) comp_list = [None]*(len(file_to_anot)) echo_list = [None]*(len(file_to_anot)) shape_list =[None]*(len(file_to_anot)) marg_list = [None]*(len(file_to_anot)) foci_list = [None]*(len(file_to_anot)) return anot,comp_list,echo_list,shape_list,marg_list,foci_list ### Creating a list to store just the file names @st.cache(allow_output_mutation=True) def init_base_f(file_to_anot): base_file = [None]*(len(file_to_anot)) return base_file #anotf,comp_list,echo_list,shape_list,marg_list,foci_list = init_anot(file_to_anot) #base_f = init_base_f(file_to_anot) ### Given an index this function converts path in the index to windows readable path ### then load the imaeg and returns the loaded image def get_image(ind_no,file_to_anot): file_name = file_to_anot[ind_no] im_file =re.sub("\\\\","\\\\\\\\", file_name) loaded_image = Image.open(im_file) return loaded_image ### Get just the image file name from the complete path string def extract_basename(path): basename = re.search(r'[^\\/]+(?=[\\/]?$)', path) if basename: return basename.group(0) def get_index(dta_ar, out_string): for i in range(len(dta_ar)): if dta_ar[i] == out_string: in_dex = i return in_dex def main():
def gen_desc_save(composition, echo, shape, margin, echogenic_foci, ind_no,file_to_anot): comp = composition.capitalize() if echogenic_foci =="none": echo_foc = "no calcification or comet tail artiacts" else: echo_foc = echogenic_foci desc = comp + " " + echo + " " + shape + " thyroid nodule with " + margin + " margin" + " and " + echo_foc + "." file_name2 = file_to_anot[ind_no] file_only = extract_basename(file_name2) label_to_log = composition + "," + echo + "," + shape + "," + margin + "," + echogenic_foci #anotf[ind_no] = label_to_log return desc,label_to_log, file_only class _SessionState: def __init__(self, session, hash_funcs): """Initialize SessionState instance.""" self.__dict__["_state"] = { "data": {}, "hash": None, "hasher": _CodeHasher(hash_funcs), "is_rerun": False, "session": session, } def __call__(self, **kwargs): """Initialize state data once.""" for item, value in kwargs.items(): if item not in self._state["data"]: self._state["data"][item] = value def __getitem__(self, item): """Return a saved state value, None if item is undefined.""" return self._state["data"].get(item, None) def __getattr__(self, item): """Return a saved state value, None if item is undefined.""" return self._state["data"].get(item, None) def __setitem__(self, item, value): """Set state value.""" self._state["data"][item] = value def __setattr__(self, item, value): """Set state value.""" self._state["data"][item] = value def clear(self): """Clear session state and request a rerun.""" self._state["data"].clear() self._state["session"].request_rerun() def sync(self): """Rerun the app with all state values up to date from the beginning to fix rollbacks.""" # Ensure to rerun only once to avoid infinite loops # caused by a constantly changing state value at each run. # # Example: state.value += 1 if self._state["is_rerun"]: self._state["is_rerun"] = False elif self._state["hash"] is not None: if self._state["hash"] != self._state["hasher"].to_bytes(self._state["data"], None): self._state["is_rerun"] = True self._state["session"].request_rerun() self._state["hash"] = self._state["hasher"].to_bytes(self._state["data"], None) def _get_session(): session_id = get_report_ctx().session_id session_info = Server.get_current()._get_session_info(session_id) if session_info is None: raise RuntimeError("Couldn't get your Streamlit Session object.") return session_info.session def _get_state(hash_funcs=None): session = _get_session() if not hasattr(session, "_custom_session_state"): session._custom_session_state = _SessionState(session, hash_funcs) return session._custom_session_state if __name__ == "__main__": main() #main(file_to_anot, anotf, base_f,comp_list,echo_list,shape_list,marg_list,foci_list)
state = _get_state() def set_index_in(in_num): state.comp_list[in_num] = get_index(comp_options, composition) state.echo_list[in_num] = get_index(echo_options, echo) state.shape_list[in_num]= get_index(shape_options, shape) state.marg_list[in_num] = get_index(margin_options, margin) state.foci_list[in_num]= get_index(foci_options, echogenic_foci) def update_choices(ind_num): ''' This function collects the values of lables/tags for the next or previous image, then displays it in the user interface. This function is called each time Next or Previous button is pressed. ''' if state.comp_list[ind_num] != None: state.comp = state.comp_list[ind_num] else: state.comp = 0 if state.echo_list[ind_num] != None: state.echo = state.echo_list[ind_num] else: state.echo = 0 if state.shape_list[ind_num] !=None: state.shape = state.shape_list[ind_num] else: state.shape = 0 if state.marg_list[ind_num] != None: state.margin = state.marg_list[ind_num] else: state.margin = 0 if state.foci_list[ind_num] != None: state.foci = state.foci_list[ind_num] else: state.foci = 0 #print("This is from update", state.comp, state.echo, state.shape, state.margin, state.foci) # Initializing a state variable input if state.input == None: state.input = 0 state.last_anot =0 state.comp = 0 state.echo = 0 state.shape = 0 state.margin = 0 state.foci = 0 # Creating the UI comp_options = ['cystic','spongiform', 'mixed cystic','solid'] echo_options = ['anechoic','hyperechoic','isoechoic','hypoechoic','very hypoechoic'] shape_options =['wider than tall','taller than wide'] margin_options = ['smooth','ill defined','lobulated', 'irregular', 'ete'] foci_options = ['none','comet tail artifacts','macrocalcifications','peripheral calcifications','punctate echogenic foci'] with col2: prev_button = st.button('Previous') if state.active_project == True: composition = st.radio('Composition',comp_options, state.comp) echo = st.radio('Echogenicity',echo_options, state.echo) shape = st.radio('Shape',shape_options, state.shape) state.started = True with col3: next_button = st.button('Next') if state.active_project == True: margin = st.radio('Margin',margin_options, state.margin) echogenic_foci = st.radio('Echogenic Foci', foci_options, state.foci) with col1: #if state.input ==0: if next_button and state.active_project == True: if state.input == state.max_ind: e =RuntimeError('Reached end of images in the folder') st.exception(e) else: set_index_in(state.input) #update_choices(state.input,comp_list) state.input = state.input + 1 update_choices(state.input) if state.input > state.last_anot: state.last_anot = state.input if prev_button and state.active_project == True: if state.input == 0: e =RuntimeError('Reached the first image in the folder') st.exception(e) else: set_index_in(state.input) #update_choices(state.input,state.comp_list) state.input = state.input -1 update_choices(state.input) if add_newproj_btn and add_foldbox != "": state.file_to_anot = get_file_list(add_foldbox) state.max_ind= len(state.file_to_anot) -1 ### initializing variables state.active_project = True state.input = 0 state.last_anot =0 state.comp = 0 state.echo = 0 state.shape = 0 state.margin = 0 state.foci = 0 state.started = False state.anot_list,state.comp_list,state.echo_list,state.shape_list,state.marg_list,state.foci_list = init_anot(state.file_to_anot) state.base_f = init_base_f(state.file_to_anot) if add_foldbox != "" and state.started == True: st.image(get_image(state.input,state.file_to_anot),use_column_width=True) desc_nod, lbl, fln= gen_desc_save(composition, echo, shape, margin, echogenic_foci,state.input,state.file_to_anot ) #print("anot list",state.anot_list) state.anot_list[state.input] = lbl state.base_f[state.input] = fln col1.write( desc_nod) ### Save button ######################################################## save_button = st.button('Save') if save_button: set_index_in(state.input) df = pd.DataFrame(list(zip(state.base_f, state.anot_list)), columns =["IM_FILENAME", "LABELS"]) cwd = os.getcwd() csv_to_log = r'C:\Users\JOHNY\CV_recepies\annot1.csv' #print("printing curr file name") #print(csv_to_log) df.to_csv(csv_to_log) #proj = pd.read_csv(proj_file) #ind_pr= proj.index[proj['Project_name'] == curr_proj_name].tolist() print(ind_pr) state.sync()
identifier_body
Thyroid annotator.py
### Hacked together by Johnson Thomas ### Annotation UI created in Streamlit ### Can be used for binary or multilabel annotation
from streamlit.hashing import _CodeHasher from streamlit.report_thread import get_report_ctx from streamlit.server.server import Server import streamlit as st from PIL import Image import os import pandas as pd import re ### Creating a 3 column layout in streamlit col1, col2, col3= st.beta_columns([3, 1,1]) ### Folder where the image files are kept. This path is in windows format. ### If you are runnin it in Linux, chnage the path appropriately. #source_dir = r'C:\Users\JOHNY\CV_recepies\cv\images' source_dir = None csv_to_log = r'C:\Users\JOHNY\CV_recepies\annot1.csv' proj_file = r'C:\Users\JOHNY\CV_recepies\St_annot.csv' ### Function to create a python list cotaning paths to image files in a specific folder ### This function is decorated with @st.cache to avoid rerunning extensions = ['.jpg', '.JPG', '.jpeg', '.JPEG', '.png', '.PNG'] @st.cache(allow_output_mutation=True) def get_file_list(root_dir): file_list = [] counter = 1 for root, directories, filenames in os.walk(root_dir): for filename in filenames: if any(ext in filename for ext in extensions): file_list.append(os.path.join(root, filename)) counter += 1 return sorted(file_list) ### Creating the side bar add_proj_text = st.sidebar.write('Start new project') add_textbox = st.sidebar.text_input('Project name') add_foldbox = st.sidebar.text_input('Folder name' ) add_newproj_btn = st.sidebar.button('Create new project') st.sidebar.write(' ') add_proj_load = st.sidebar.write('Load project') #proj_list =new_installation(proj_file) add_csvbox = st.sidebar.selectbox('Pick your project',"exp1") add_loadproj_btn = st.sidebar.button('Load project') ### store file names to a list and find the number of files in the list #file_to_anot = get_file_list(source_dir) #file_to_anot = get_file_list(source_dir) #max_ind= len(file_to_anot) -1 ### Creating a list to store the annotations ### @st.cache(allow_output_mutation=True) - is used to preserve the current state and to allow modification of the list @st.cache(allow_output_mutation=True) def init_anot(file_to_anot): anot = [None]*(len(file_to_anot)) comp_list = [None]*(len(file_to_anot)) echo_list = [None]*(len(file_to_anot)) shape_list =[None]*(len(file_to_anot)) marg_list = [None]*(len(file_to_anot)) foci_list = [None]*(len(file_to_anot)) return anot,comp_list,echo_list,shape_list,marg_list,foci_list ### Creating a list to store just the file names @st.cache(allow_output_mutation=True) def init_base_f(file_to_anot): base_file = [None]*(len(file_to_anot)) return base_file #anotf,comp_list,echo_list,shape_list,marg_list,foci_list = init_anot(file_to_anot) #base_f = init_base_f(file_to_anot) ### Given an index this function converts path in the index to windows readable path ### then load the imaeg and returns the loaded image def get_image(ind_no,file_to_anot): file_name = file_to_anot[ind_no] im_file =re.sub("\\\\","\\\\\\\\", file_name) loaded_image = Image.open(im_file) return loaded_image ### Get just the image file name from the complete path string def extract_basename(path): basename = re.search(r'[^\\/]+(?=[\\/]?$)', path) if basename: return basename.group(0) def get_index(dta_ar, out_string): for i in range(len(dta_ar)): if dta_ar[i] == out_string: in_dex = i return in_dex def main(): state = _get_state() def set_index_in(in_num): state.comp_list[in_num] = get_index(comp_options, composition) state.echo_list[in_num] = get_index(echo_options, echo) state.shape_list[in_num]= get_index(shape_options, shape) state.marg_list[in_num] = get_index(margin_options, margin) state.foci_list[in_num]= get_index(foci_options, echogenic_foci) def update_choices(ind_num): ''' This function collects the values of lables/tags for the next or previous image, then displays it in the user interface. This function is called each time Next or Previous button is pressed. ''' if state.comp_list[ind_num] != None: state.comp = state.comp_list[ind_num] else: state.comp = 0 if state.echo_list[ind_num] != None: state.echo = state.echo_list[ind_num] else: state.echo = 0 if state.shape_list[ind_num] !=None: state.shape = state.shape_list[ind_num] else: state.shape = 0 if state.marg_list[ind_num] != None: state.margin = state.marg_list[ind_num] else: state.margin = 0 if state.foci_list[ind_num] != None: state.foci = state.foci_list[ind_num] else: state.foci = 0 #print("This is from update", state.comp, state.echo, state.shape, state.margin, state.foci) # Initializing a state variable input if state.input == None: state.input = 0 state.last_anot =0 state.comp = 0 state.echo = 0 state.shape = 0 state.margin = 0 state.foci = 0 # Creating the UI comp_options = ['cystic','spongiform', 'mixed cystic','solid'] echo_options = ['anechoic','hyperechoic','isoechoic','hypoechoic','very hypoechoic'] shape_options =['wider than tall','taller than wide'] margin_options = ['smooth','ill defined','lobulated', 'irregular', 'ete'] foci_options = ['none','comet tail artifacts','macrocalcifications','peripheral calcifications','punctate echogenic foci'] with col2: prev_button = st.button('Previous') if state.active_project == True: composition = st.radio('Composition',comp_options, state.comp) echo = st.radio('Echogenicity',echo_options, state.echo) shape = st.radio('Shape',shape_options, state.shape) state.started = True with col3: next_button = st.button('Next') if state.active_project == True: margin = st.radio('Margin',margin_options, state.margin) echogenic_foci = st.radio('Echogenic Foci', foci_options, state.foci) with col1: #if state.input ==0: if next_button and state.active_project == True: if state.input == state.max_ind: e =RuntimeError('Reached end of images in the folder') st.exception(e) else: set_index_in(state.input) #update_choices(state.input,comp_list) state.input = state.input + 1 update_choices(state.input) if state.input > state.last_anot: state.last_anot = state.input if prev_button and state.active_project == True: if state.input == 0: e =RuntimeError('Reached the first image in the folder') st.exception(e) else: set_index_in(state.input) #update_choices(state.input,state.comp_list) state.input = state.input -1 update_choices(state.input) if add_newproj_btn and add_foldbox != "": state.file_to_anot = get_file_list(add_foldbox) state.max_ind= len(state.file_to_anot) -1 ### initializing variables state.active_project = True state.input = 0 state.last_anot =0 state.comp = 0 state.echo = 0 state.shape = 0 state.margin = 0 state.foci = 0 state.started = False state.anot_list,state.comp_list,state.echo_list,state.shape_list,state.marg_list,state.foci_list = init_anot(state.file_to_anot) state.base_f = init_base_f(state.file_to_anot) if add_foldbox != "" and state.started == True: st.image(get_image(state.input,state.file_to_anot),use_column_width=True) desc_nod, lbl, fln= gen_desc_save(composition, echo, shape, margin, echogenic_foci,state.input,state.file_to_anot ) #print("anot list",state.anot_list) state.anot_list[state.input] = lbl state.base_f[state.input] = fln col1.write( desc_nod) ### Save button ######################################################## save_button = st.button('Save') if save_button: set_index_in(state.input) df = pd.DataFrame(list(zip(state.base_f, state.anot_list)), columns =["IM_FILENAME", "LABELS"]) cwd = os.getcwd() csv_to_log = r'C:\Users\JOHNY\CV_recepies\annot1.csv' #print("printing curr file name") #print(csv_to_log) df.to_csv(csv_to_log) #proj = pd.read_csv(proj_file) #ind_pr= proj.index[proj['Project_name'] == curr_proj_name].tolist() print(ind_pr) state.sync() def gen_desc_save(composition, echo, shape, margin, echogenic_foci, ind_no,file_to_anot): comp = composition.capitalize() if echogenic_foci =="none": echo_foc = "no calcification or comet tail artiacts" else: echo_foc = echogenic_foci desc = comp + " " + echo + " " + shape + " thyroid nodule with " + margin + " margin" + " and " + echo_foc + "." file_name2 = file_to_anot[ind_no] file_only = extract_basename(file_name2) label_to_log = composition + "," + echo + "," + shape + "," + margin + "," + echogenic_foci #anotf[ind_no] = label_to_log return desc,label_to_log, file_only class _SessionState: def __init__(self, session, hash_funcs): """Initialize SessionState instance.""" self.__dict__["_state"] = { "data": {}, "hash": None, "hasher": _CodeHasher(hash_funcs), "is_rerun": False, "session": session, } def __call__(self, **kwargs): """Initialize state data once.""" for item, value in kwargs.items(): if item not in self._state["data"]: self._state["data"][item] = value def __getitem__(self, item): """Return a saved state value, None if item is undefined.""" return self._state["data"].get(item, None) def __getattr__(self, item): """Return a saved state value, None if item is undefined.""" return self._state["data"].get(item, None) def __setitem__(self, item, value): """Set state value.""" self._state["data"][item] = value def __setattr__(self, item, value): """Set state value.""" self._state["data"][item] = value def clear(self): """Clear session state and request a rerun.""" self._state["data"].clear() self._state["session"].request_rerun() def sync(self): """Rerun the app with all state values up to date from the beginning to fix rollbacks.""" # Ensure to rerun only once to avoid infinite loops # caused by a constantly changing state value at each run. # # Example: state.value += 1 if self._state["is_rerun"]: self._state["is_rerun"] = False elif self._state["hash"] is not None: if self._state["hash"] != self._state["hasher"].to_bytes(self._state["data"], None): self._state["is_rerun"] = True self._state["session"].request_rerun() self._state["hash"] = self._state["hasher"].to_bytes(self._state["data"], None) def _get_session(): session_id = get_report_ctx().session_id session_info = Server.get_current()._get_session_info(session_id) if session_info is None: raise RuntimeError("Couldn't get your Streamlit Session object.") return session_info.session def _get_state(hash_funcs=None): session = _get_session() if not hasattr(session, "_custom_session_state"): session._custom_session_state = _SessionState(session, hash_funcs) return session._custom_session_state if __name__ == "__main__": main() #main(file_to_anot, anotf, base_f,comp_list,echo_list,shape_list,marg_list,foci_list)
### Importing libraries
random_line_split
gardenView.js
import {fabric} from 'fabric'; import * as actions from './actions'; import * as cst from '../constants'; import {PlantView} from './plantView'; import {ScoreInput} from './scoring/input'; const DEFAULT_IMAGE_WIDTH = 28; const DEFAULT_IMAGE_HEIGHT = 28; const SCROLLBAR_WIDTH = 26; const DEFAULT_USER_WIDTH = 6; const DEFAULT_USER_LENGTH = 4; /* * Encapsulates a grid representing a garden */ export class GardenView { constructor(containerSelector, plantFactory, actionDispatcher) { this.containerSelector = containerSelector; this.actionDispatcher = actionDispatcher; this.idImageSelected = null; this.imagesMapping = {}; this.plantFactory = plantFactory; this.monthSelected = 0; this.idGardenCounter = 0; this.grid = null; this.actionDispatcher.register('generateGarden', actions.GENERATE_GARDEN, (data) => this.generate(data.width, data.length)); this.actionDispatcher.register('loadGarden', actions.LOAD_GARDEN, (data) => this.load(data)); const that = this; $(`#${this.containerSelector}`).append(` <div class="text-center"><button type="button" class="btn btn-default btn-lg">Générer votre potager</button> `); $(`#${this.containerSelector}`).find('button').on('click', (event) => { that.actionDispatcher.dispatch({type: actions.MODAL_GARDEN_CREATOR}); }); } ge
idth, length) { // From user dimensions, we calculate grid features, like the size in pixels of a meter this.grid = { userDimensions: { width: width, length: length }, sizeMeter: ($(`#${this.containerSelector}`).width() - SCROLLBAR_WIDTH) / width, horizontalLines: [], verticalLines: [] }; $(`#${this.containerSelector}`).empty().append(` <div class="row"> <div class="col-md-12"> <div style="height:400px; overflow: auto;"> <canvas id="canvas-garden" width=${this.grid.sizeMeter * width} height=${this.grid.sizeMeter * length}> </canvas> </div> </div> </div> `); let self = this; this.canvas = new fabric.Canvas('canvas-garden'); let canvasContainer = $(`#${this.containerSelector}`).parent()[0]; // drag and drop events dont work right with JQuery on container... // so for canvas container, use native JS methods // On drag over canvasContainer.addEventListener('dragover', (event) => { if (event.preventDefault) { event.preventDefault(); } event.dataTransfer.dropEffect = 'copy'; return false; }, false); // On drop canvasContainer.addEventListener('drop', (event) => { event.preventDefault(); if (event.stopPropagation) { event.stopPropagation(); } const idPlant = $('#plant-selectize').val(); const position = { x: event.layerX, y: event.layerY }; self.putPlant($('#image-selected img.img-dragging')[0], idPlant, position); return false; }, false); // On selection of an object this.canvas.on('object:selected', (event) => { this.selectPlant(event.target); }); // On click on grid, but not on a object this.canvas.on('before:selection:cleared', (event) => { this.unselectPlant(); }); // On image moving this.canvas.on('object:moving', (event) => { var obj = event.target; if (typeof(obj) === 'undefined' || obj === null || typeof(obj.canvas) === 'undefined') { return; } // Below is code to be sure we can't drag a plant outside of the visible grid // if object is too big ignore if(obj.currentHeight > obj.canvas.height || obj.currentWidth > obj.canvas.width){ return; } obj.setCoords(); const imagePlant = obj._objects.filter(o => o.isType('image'))[0]; const boundingRect = { left: obj.left + obj.width / 2 - imagePlant.width / 2, top: obj.top + obj.height / 2 - imagePlant.height / 2, width: imagePlant.width, height: imagePlant.height }; // top-left corner if(boundingRect.top < 0 || boundingRect.left < 0){ obj.top = Math.max(obj.top, obj.top-boundingRect.top); obj.left = Math.max(obj.left, obj.left-boundingRect.left); } // bot-right corner if(boundingRect.top+boundingRect.height > obj.canvas.height || boundingRect.left+boundingRect.width > obj.canvas.width){ obj.top = Math.min(obj.top, obj.canvas.height-boundingRect.height+obj.top-boundingRect.top); obj.left = Math.min(obj.left, obj.canvas.width-boundingRect.width+obj.left-boundingRect.left); } // On moving, notify state panel that we made a change this.actionDispatcher.dispatch({type: actions.NOTIFY_CHANGE}); }); this.refreshGrid(); // Register listeners on some actions this.actionDispatcher.register('unselectPlant', actions.UNSELECT_PLANT, () => this.canvas.trigger('before:selection:cleared')); this.actionDispatcher.register('removePlant', actions.REMOVE_PLANT, () => this.removePlant()); this.actionDispatcher.register('showAreas', actions.SHOW_AREAS, (areaType) => this.showAreas(areaType)); this.actionDispatcher.register('hideAreas', actions.HIDE_AREAS, (areaType) => this.hideAreas(areaType)); /*this.actionDispatcher.register('showAreaSeeding', actions.SHOW_AREA_SEEDING, () => this.showAreas('seeding')); this.actionDispatcher.register('showAreaSize', actions.SHOW_AREA_SIZE, () => this.showAreas('size')); this.actionDispatcher.register('showAreaHeight', actions.SHOW_AREA_HEIGHT, () => this.showAreas('height')); this.actionDispatcher.register('showAreaSun', actions.SHOW_AREA_SUN, () => this.showAreas('sun')); this.actionDispatcher.register('hideArea', actions.HIDE_AREAS, () => this.hideAreas());*/ this.actionDispatcher.register('showMonthlyTask', actions.SHOW_TASK_MONTH, (data) => this.showMonthlyTask(data)); this.actionDispatcher.register('prepareSave', actions.PREPARE_SAVE, (data) => this.prepareSave(data)); this.actionDispatcher.register('prepareScore', actions.PREPARE_SCORE, (data) => this.prepareScoring(data)); this.actionDispatcher.register('showScorePlants', actions.SHOW_SCORE_PLANTS, (data) => this.showScoreSelection(data)); this.actionDispatcher.register('hideScorePlants', actions.HIDE_SCORE_PLANTS, (data) => this.hideScoreSelection(data)); // Unregister listeners on garden creation / loading this.actionDispatcher.unregister('generateGarden'); this.actionDispatcher.unregister('loadGarden'); } /* Get some datas about plants in garden, for saving */ prepareSave(data) { let plants = []; for (const id in this.imagesMapping) { plants.push(this.imagesMapping[id].toJSON()); } // Call save process by dispatching save event with plants data this.actionDispatcher.dispatch({type: actions.SAVE, data: { id: data.id, garden: { plants: plants, userDimensions: this.grid.userDimensions } }}); } /* Get some datas about plants in garden, to run scoring */ prepareScoring() { let plants = [], plantModels = {}; for (const id in this.imagesMapping) { const plantView = this.imagesMapping[id]; const plant = plantView.getPlant(); plants.push(plantView.toJSON()); if (!(plant.id in plantModels)) { plantModels[plant.id] = plant; } } const scoreInput = new ScoreInput(plants, plantModels, { sizeMeter: this.grid.sizeMeter }); // Call score process by dispatching save event with plants data this.actionDispatcher.dispatch({type: actions.SCORE, data: { input: scoreInput, }}); } /* Add a plant on grid, by putting image in a fabricjs group and instanciating a plantView object */ addPlantOnGrid(img, idPlant, width, height, position) { img.set({ width: width, height: height, left: position.x, top: position.y, hasRotatingPoint: false, lockRotation: true, lockScalingFlip : true, lockScalingX: true, lockScalingY: true }); const plant = this.plantFactory.buildPlant(idPlant); let plantView = new PlantView(img, plant); this.imagesMapping[img.id] = plantView; this.canvas.add(plantView.getGroup()); } /* Populate garden with plants from imported data */ load(data) { // By default, if no user dimensions saved, we generate a 6mx4m garden const {width, length} = (typeof(data.garden.userDimensions) !== 'undefined') ? data.garden.userDimensions : {width: DEFAULT_USER_WIDTH, length: DEFAULT_USER_LENGTH}; this.generate(width, length); data.garden.plants.map(jsonPlant => { const idImage = this.idGardenCounter; this.idGardenCounter = this.idGardenCounter + 1; const img = cst.PLANTS_IMAGES[jsonPlant.idPlant] || cst.DEFAULT_PLANT_IMAGE; fabric.Image.fromURL(`${cst.URL_IMAGES}/${img}`, oImg => { oImg.set({ id: idImage }); this.addPlantOnGrid(oImg, jsonPlant.idPlant, DEFAULT_IMAGE_WIDTH, DEFAULT_IMAGE_HEIGHT, jsonPlant.position); }); }); } /* Put a plant into the garden, from dragged image */ putPlant(img, idPlant, position) { const idImage = this.idGardenCounter; this.idGardenCounter = this.idGardenCounter + 1; img = new fabric.Image(img, { id: idImage }); this.addPlantOnGrid(img, idPlant, img.width, img.height, position); this.showMonthlyTask(this.monthSelected); this.actionDispatcher.dispatch({type: actions.NOTIFY_CHANGE}); } /* Remove selected plant */ removePlant() { if (this.idImageSelected === null) { return; } // We keep id in another variable to keep a reference for deleting from imagesMapping const id = this.idImageSelected; let imageGroupToRemove = this.imagesMapping[this.idImageSelected].getGroup(); this.canvas.remove(imageGroupToRemove); delete this.imagesMapping[id]; this.actionDispatcher.dispatch({type: actions.HIDE_CARD}); this.actionDispatcher.dispatch({type: actions.NOTIFY_CHANGE}); } /* Clear all plants from garden */ clear() { this.canvas.clear(); for (const id in this.imagesMapping) { delete this.imagesMapping[id]; } this.imagesMapping = {}; } /* Select a plant, and display some informations about it */ selectPlant(imageGroup) { if (this.idImageSelected !== null) { this.unselectPlant(); } const imagePlant = imageGroup._objects.filter(o => o.isType('image'))[0]; const plantView = this.imagesMapping[imagePlant.id]; this.idImageSelected = imagePlant.id; this.actionDispatcher.dispatch({type: actions.SHOW_CARD, data: plantView.getPlant()}); } /* Unselect plant, and hide some informations about it */ unselectPlant() { if (this.idImageSelected === null) { return; } const plantView = this.imagesMapping[this.idImageSelected]; this.canvas.selection = false; this.idImageSelected = null; this.actionDispatcher.dispatch({type: actions.HIDE_CARD}); } showAreas(type) { for (let idImage in this.imagesMapping) { this.imagesMapping[idImage].showArea(type, this.grid.sizeMeter); } this.canvas.renderAll(); } hideAreas(type) { for (let idImage in this.imagesMapping) { this.imagesMapping[idImage].hideArea(type); } this.canvas.renderAll(); } /* Show for each plant in garden which task is associated to selected month, if any */ showMonthlyTask(month) { this.monthSelected = month; if (this.monthSelected === 0) { this.hideMonthlyTasks(); return; } for (let idImage in this.imagesMapping) { // Because of asynchronous image loading, the render if executed in a callback this.imagesMapping[idImage].showMonthlyTasks(this.monthSelected, () => { this.canvas.renderAll(); }); } } /* Hide all tasks associated to selected monh */ hideMonthlyTasks() { for (let idImage in this.imagesMapping) { this.imagesMapping[idImage].hideMonthlyTasks(() => { this.canvas.renderAll(); }); } } showScoreSelection(selection) { selection.plants.map(plant => { this.imagesMapping[plant.idImage].showScoreSelection(selection.circlePlantColor); }); this.canvas.renderAll(); } hideScoreSelection(selection) { selection.plants.map(plant => { this.imagesMapping[plant.idImage].hideScoreSelection(); }); this.canvas.renderAll(); } /* Refresh garden grid by redrawing lines, depending of the size in pixels of a meter */ refreshGrid() { const canvasWidth = this.canvas.getWidth(); const canvasHeight = this.canvas.getHeight(); this.grid.horizontalLines.map(line => this.canvas.remove(line)); this.grid.verticalLines.map(line => this.canvas.remove(line)); this.grid.horizontalLines = []; this.grid.verticalLines = []; for (let xStart=this.grid.sizeMeter; xStart < canvasWidth; xStart=xStart+this.grid.sizeMeter) { const line = new fabric.Line([xStart, 0, xStart, canvasHeight], { stroke: '#222', strokeDashArray: [5, 5], selectable: false }); this.canvas.add(line); this.grid.verticalLines.push(line); } for (let yStart=this.grid.sizeMeter; yStart < canvasHeight ; yStart=yStart+this.grid.sizeMeter) { const line = new fabric.Line([0, yStart, canvasWidth, yStart], { stroke: '#222', strokeDashArray: [5, 5], selectable: false }); this.canvas.add(line); this.grid.horizontalLines.push(line); } } }
nerate(w
identifier_name
gardenView.js
import {fabric} from 'fabric'; import * as actions from './actions'; import * as cst from '../constants'; import {PlantView} from './plantView'; import {ScoreInput} from './scoring/input'; const DEFAULT_IMAGE_WIDTH = 28; const DEFAULT_IMAGE_HEIGHT = 28; const SCROLLBAR_WIDTH = 26; const DEFAULT_USER_WIDTH = 6; const DEFAULT_USER_LENGTH = 4; /* * Encapsulates a grid representing a garden */ export class GardenView { constructor(containerSelector, plantFactory, actionDispatcher) { this.containerSelector = containerSelector; this.actionDispatcher = actionDispatcher; this.idImageSelected = null; this.imagesMapping = {}; this.plantFactory = plantFactory; this.monthSelected = 0; this.idGardenCounter = 0; this.grid = null; this.actionDispatcher.register('generateGarden', actions.GENERATE_GARDEN, (data) => this.generate(data.width, data.length)); this.actionDispatcher.register('loadGarden', actions.LOAD_GARDEN, (data) => this.load(data)); const that = this; $(`#${this.containerSelector}`).append(` <div class="text-center"><button type="button" class="btn btn-default btn-lg">Générer votre potager</button> `); $(`#${this.containerSelector}`).find('button').on('click', (event) => { that.actionDispatcher.dispatch({type: actions.MODAL_GARDEN_CREATOR}); }); } generate(width, length) { // From user dimensions, we calculate grid features, like the size in pixels of a meter this.grid = { userDimensions: { width: width, length: length }, sizeMeter: ($(`#${this.containerSelector}`).width() - SCROLLBAR_WIDTH) / width, horizontalLines: [], verticalLines: [] };
$(`#${this.containerSelector}`).empty().append(` <div class="row"> <div class="col-md-12"> <div style="height:400px; overflow: auto;"> <canvas id="canvas-garden" width=${this.grid.sizeMeter * width} height=${this.grid.sizeMeter * length}> </canvas> </div> </div> </div> `); let self = this; this.canvas = new fabric.Canvas('canvas-garden'); let canvasContainer = $(`#${this.containerSelector}`).parent()[0]; // drag and drop events dont work right with JQuery on container... // so for canvas container, use native JS methods // On drag over canvasContainer.addEventListener('dragover', (event) => { if (event.preventDefault) { event.preventDefault(); } event.dataTransfer.dropEffect = 'copy'; return false; }, false); // On drop canvasContainer.addEventListener('drop', (event) => { event.preventDefault(); if (event.stopPropagation) { event.stopPropagation(); } const idPlant = $('#plant-selectize').val(); const position = { x: event.layerX, y: event.layerY }; self.putPlant($('#image-selected img.img-dragging')[0], idPlant, position); return false; }, false); // On selection of an object this.canvas.on('object:selected', (event) => { this.selectPlant(event.target); }); // On click on grid, but not on a object this.canvas.on('before:selection:cleared', (event) => { this.unselectPlant(); }); // On image moving this.canvas.on('object:moving', (event) => { var obj = event.target; if (typeof(obj) === 'undefined' || obj === null || typeof(obj.canvas) === 'undefined') { return; } // Below is code to be sure we can't drag a plant outside of the visible grid // if object is too big ignore if(obj.currentHeight > obj.canvas.height || obj.currentWidth > obj.canvas.width){ return; } obj.setCoords(); const imagePlant = obj._objects.filter(o => o.isType('image'))[0]; const boundingRect = { left: obj.left + obj.width / 2 - imagePlant.width / 2, top: obj.top + obj.height / 2 - imagePlant.height / 2, width: imagePlant.width, height: imagePlant.height }; // top-left corner if(boundingRect.top < 0 || boundingRect.left < 0){ obj.top = Math.max(obj.top, obj.top-boundingRect.top); obj.left = Math.max(obj.left, obj.left-boundingRect.left); } // bot-right corner if(boundingRect.top+boundingRect.height > obj.canvas.height || boundingRect.left+boundingRect.width > obj.canvas.width){ obj.top = Math.min(obj.top, obj.canvas.height-boundingRect.height+obj.top-boundingRect.top); obj.left = Math.min(obj.left, obj.canvas.width-boundingRect.width+obj.left-boundingRect.left); } // On moving, notify state panel that we made a change this.actionDispatcher.dispatch({type: actions.NOTIFY_CHANGE}); }); this.refreshGrid(); // Register listeners on some actions this.actionDispatcher.register('unselectPlant', actions.UNSELECT_PLANT, () => this.canvas.trigger('before:selection:cleared')); this.actionDispatcher.register('removePlant', actions.REMOVE_PLANT, () => this.removePlant()); this.actionDispatcher.register('showAreas', actions.SHOW_AREAS, (areaType) => this.showAreas(areaType)); this.actionDispatcher.register('hideAreas', actions.HIDE_AREAS, (areaType) => this.hideAreas(areaType)); /*this.actionDispatcher.register('showAreaSeeding', actions.SHOW_AREA_SEEDING, () => this.showAreas('seeding')); this.actionDispatcher.register('showAreaSize', actions.SHOW_AREA_SIZE, () => this.showAreas('size')); this.actionDispatcher.register('showAreaHeight', actions.SHOW_AREA_HEIGHT, () => this.showAreas('height')); this.actionDispatcher.register('showAreaSun', actions.SHOW_AREA_SUN, () => this.showAreas('sun')); this.actionDispatcher.register('hideArea', actions.HIDE_AREAS, () => this.hideAreas());*/ this.actionDispatcher.register('showMonthlyTask', actions.SHOW_TASK_MONTH, (data) => this.showMonthlyTask(data)); this.actionDispatcher.register('prepareSave', actions.PREPARE_SAVE, (data) => this.prepareSave(data)); this.actionDispatcher.register('prepareScore', actions.PREPARE_SCORE, (data) => this.prepareScoring(data)); this.actionDispatcher.register('showScorePlants', actions.SHOW_SCORE_PLANTS, (data) => this.showScoreSelection(data)); this.actionDispatcher.register('hideScorePlants', actions.HIDE_SCORE_PLANTS, (data) => this.hideScoreSelection(data)); // Unregister listeners on garden creation / loading this.actionDispatcher.unregister('generateGarden'); this.actionDispatcher.unregister('loadGarden'); } /* Get some datas about plants in garden, for saving */ prepareSave(data) { let plants = []; for (const id in this.imagesMapping) { plants.push(this.imagesMapping[id].toJSON()); } // Call save process by dispatching save event with plants data this.actionDispatcher.dispatch({type: actions.SAVE, data: { id: data.id, garden: { plants: plants, userDimensions: this.grid.userDimensions } }}); } /* Get some datas about plants in garden, to run scoring */ prepareScoring() { let plants = [], plantModels = {}; for (const id in this.imagesMapping) { const plantView = this.imagesMapping[id]; const plant = plantView.getPlant(); plants.push(plantView.toJSON()); if (!(plant.id in plantModels)) { plantModels[plant.id] = plant; } } const scoreInput = new ScoreInput(plants, plantModels, { sizeMeter: this.grid.sizeMeter }); // Call score process by dispatching save event with plants data this.actionDispatcher.dispatch({type: actions.SCORE, data: { input: scoreInput, }}); } /* Add a plant on grid, by putting image in a fabricjs group and instanciating a plantView object */ addPlantOnGrid(img, idPlant, width, height, position) { img.set({ width: width, height: height, left: position.x, top: position.y, hasRotatingPoint: false, lockRotation: true, lockScalingFlip : true, lockScalingX: true, lockScalingY: true }); const plant = this.plantFactory.buildPlant(idPlant); let plantView = new PlantView(img, plant); this.imagesMapping[img.id] = plantView; this.canvas.add(plantView.getGroup()); } /* Populate garden with plants from imported data */ load(data) { // By default, if no user dimensions saved, we generate a 6mx4m garden const {width, length} = (typeof(data.garden.userDimensions) !== 'undefined') ? data.garden.userDimensions : {width: DEFAULT_USER_WIDTH, length: DEFAULT_USER_LENGTH}; this.generate(width, length); data.garden.plants.map(jsonPlant => { const idImage = this.idGardenCounter; this.idGardenCounter = this.idGardenCounter + 1; const img = cst.PLANTS_IMAGES[jsonPlant.idPlant] || cst.DEFAULT_PLANT_IMAGE; fabric.Image.fromURL(`${cst.URL_IMAGES}/${img}`, oImg => { oImg.set({ id: idImage }); this.addPlantOnGrid(oImg, jsonPlant.idPlant, DEFAULT_IMAGE_WIDTH, DEFAULT_IMAGE_HEIGHT, jsonPlant.position); }); }); } /* Put a plant into the garden, from dragged image */ putPlant(img, idPlant, position) { const idImage = this.idGardenCounter; this.idGardenCounter = this.idGardenCounter + 1; img = new fabric.Image(img, { id: idImage }); this.addPlantOnGrid(img, idPlant, img.width, img.height, position); this.showMonthlyTask(this.monthSelected); this.actionDispatcher.dispatch({type: actions.NOTIFY_CHANGE}); } /* Remove selected plant */ removePlant() { if (this.idImageSelected === null) { return; } // We keep id in another variable to keep a reference for deleting from imagesMapping const id = this.idImageSelected; let imageGroupToRemove = this.imagesMapping[this.idImageSelected].getGroup(); this.canvas.remove(imageGroupToRemove); delete this.imagesMapping[id]; this.actionDispatcher.dispatch({type: actions.HIDE_CARD}); this.actionDispatcher.dispatch({type: actions.NOTIFY_CHANGE}); } /* Clear all plants from garden */ clear() { this.canvas.clear(); for (const id in this.imagesMapping) { delete this.imagesMapping[id]; } this.imagesMapping = {}; } /* Select a plant, and display some informations about it */ selectPlant(imageGroup) { if (this.idImageSelected !== null) { this.unselectPlant(); } const imagePlant = imageGroup._objects.filter(o => o.isType('image'))[0]; const plantView = this.imagesMapping[imagePlant.id]; this.idImageSelected = imagePlant.id; this.actionDispatcher.dispatch({type: actions.SHOW_CARD, data: plantView.getPlant()}); } /* Unselect plant, and hide some informations about it */ unselectPlant() { if (this.idImageSelected === null) { return; } const plantView = this.imagesMapping[this.idImageSelected]; this.canvas.selection = false; this.idImageSelected = null; this.actionDispatcher.dispatch({type: actions.HIDE_CARD}); } showAreas(type) { for (let idImage in this.imagesMapping) { this.imagesMapping[idImage].showArea(type, this.grid.sizeMeter); } this.canvas.renderAll(); } hideAreas(type) { for (let idImage in this.imagesMapping) { this.imagesMapping[idImage].hideArea(type); } this.canvas.renderAll(); } /* Show for each plant in garden which task is associated to selected month, if any */ showMonthlyTask(month) { this.monthSelected = month; if (this.monthSelected === 0) { this.hideMonthlyTasks(); return; } for (let idImage in this.imagesMapping) { // Because of asynchronous image loading, the render if executed in a callback this.imagesMapping[idImage].showMonthlyTasks(this.monthSelected, () => { this.canvas.renderAll(); }); } } /* Hide all tasks associated to selected monh */ hideMonthlyTasks() { for (let idImage in this.imagesMapping) { this.imagesMapping[idImage].hideMonthlyTasks(() => { this.canvas.renderAll(); }); } } showScoreSelection(selection) { selection.plants.map(plant => { this.imagesMapping[plant.idImage].showScoreSelection(selection.circlePlantColor); }); this.canvas.renderAll(); } hideScoreSelection(selection) { selection.plants.map(plant => { this.imagesMapping[plant.idImage].hideScoreSelection(); }); this.canvas.renderAll(); } /* Refresh garden grid by redrawing lines, depending of the size in pixels of a meter */ refreshGrid() { const canvasWidth = this.canvas.getWidth(); const canvasHeight = this.canvas.getHeight(); this.grid.horizontalLines.map(line => this.canvas.remove(line)); this.grid.verticalLines.map(line => this.canvas.remove(line)); this.grid.horizontalLines = []; this.grid.verticalLines = []; for (let xStart=this.grid.sizeMeter; xStart < canvasWidth; xStart=xStart+this.grid.sizeMeter) { const line = new fabric.Line([xStart, 0, xStart, canvasHeight], { stroke: '#222', strokeDashArray: [5, 5], selectable: false }); this.canvas.add(line); this.grid.verticalLines.push(line); } for (let yStart=this.grid.sizeMeter; yStart < canvasHeight ; yStart=yStart+this.grid.sizeMeter) { const line = new fabric.Line([0, yStart, canvasWidth, yStart], { stroke: '#222', strokeDashArray: [5, 5], selectable: false }); this.canvas.add(line); this.grid.horizontalLines.push(line); } } }
random_line_split